text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/deed/pet_deed/shared_kimogila_deed.iff"
result.attribute_template_id = 2
result.stfName("pet_deed","kimogila")
#### BEGIN MODIFICATIONS ####
result.setStringAttribute("radial_filename", "radials/deed_datapad.py")
result.setStringAttribute("deed_pcd", "object/intangible/pet/shared_kimogila_hue.iff")
result.setStringAttribute("deed_mobile", "object/mobile/shared_kimogila_hue.iff")
#### END MODIFICATIONS ####
return result | obi-two/Rebelion | data/scripts/templates/object/tangible/deed/pet_deed/shared_kimogila_deed.py | Python | mit | 691 | 0.037627 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
#
# Copyright 2014 Michael Terry <mike@mterry.name>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# This is just a small override to the system gettext.py which allows us to
# always return a string with fancy unicode characters, which will notify us
# if we ever get a unicode->ascii translation by accident.
def translation(*args, **kwargs):
class Translation:
ZWSP = u"" # ZERO WIDTH SPACE, basically an invisible space separator
def install(self, **kwargs):
import __builtin__
__builtin__.__dict__['_'] = lambda x: x + self.ZWSP
def ungettext(self, one, more, n):
if n == 1: return one + self.ZWSP
else: return more + self.ZWSP
return Translation()
| alanfranz/duplicity | testing/overrides/gettext.py | Python | gpl-2.0 | 1,496 | 0.004016 |
'''
A simple module for Sopel (http://sopel.chat) to get horoscope
information from a JSON API and put it back into chat
All orignal code written by: BluDragyn. (www.bludragyn.net)
This module is released under the terms of the GPLv3
(https://www.gnu.org/licenses/gpl-3.0.en.html)
If you use and like this module, please send me an email
(dragyn@bludragyn.net) or drop in to see me on Occultus IRC
(irc.occultus.net), I hang out in #sacredpaths
'''
import json
import urllib3
from sopel import module
@module.commands('hs', 'horoscope')
def horoscope(bot, trigger):
signs = set(['aquarius', 'Aquarius',
'pisces', 'Pisces',
'aries', 'Aries',
'taurus', 'Taurus',
'gemini', 'Gemini',
'cancer', 'Cancer',
'leo', 'Leo',
'virgo', 'Virgo',
'libra', 'Libra',
'scorpio', 'Scorpio',
'sagittarius', 'Sagittarius',
'capricorn', 'Capricorn'])
sign = trigger.group(2)
nick = trigger.nick
if sign in signs:
sign = sign.lower()
hs = get_hs(sign)
sign = sign.capitalize()
bot.say('Today\'s horoscope for ' + sign + ' is: ' + hs)
else:
bot.say(nick + ', please use a valid zodiac sign and try again.')
def get_hs(sunsign):
http = urllib3.PoolManager()
url = 'http://sandipbgt.com/theastrologer/api/horoscope/' \
+ sunsign + '/today/'
response = http.request('GET', url)
raw = json.loads(response.data.decode('utf8'))
hscope = raw['horoscope']
if not hscope:
hscope = 'There was an error getting the horoscope right now.\
Please try again later.'
return hscope
else:
hscope = hscope[:-59]
return hscope | KaoticEvil/Sopel-Modules | horoscope.py | Python | gpl-3.0 | 1,835 | 0.002725 |
"""Generate pass."""
from scoff.ast.visits.syntax import (
SyntaxChecker,
SyntaxCheckerError,
SyntaxErrorDescriptor,
)
class VecgenPass(SyntaxChecker):
"""Visit AST and generate intermediate code."""
_CONFIG_DIRECTIVES = ("register_size",)
_SYNTAX_ERR_INVALID_VAL = SyntaxCheckerError("invalid value", "v001")
_SYNTAX_ERR_INVALID_NAME = SyntaxCheckerError(
"unknown name: '{name}'", "v002"
)
_SYNTAX_ERR_TIME_PAST = SyntaxErrorDescriptor(
"t001",
"absolute time is in the past",
"absolute time is in the past, current time is {cur}, requested is {req}",
)
_SYNTAX_ERRORS = {"t001": _SYNTAX_ERR_TIME_PAST}
def __init__(self, *args, **kwargs):
"""Initialize."""
super().__init__(*args, **kwargs)
self._sequence = []
self._definitions = {}
self._directives = {}
self._current_time = 0
def visit_ConfigurationDirective(self, node):
"""Visit configuration directive."""
if node.directive not in self._CONFIG_DIRECTIVES:
# unknown, ignore for now
return
if node.directive in self._directives:
# re-define, warning
pass
# store
self._directives[node.directive] = node.value
def visit_ValueDefinition(self, node):
"""Visit value definition."""
if node.name in self._definitions:
# re-define, warning
pass
self._definitions[node.name] = node.value
def visit_InitialElement(self, node):
"""Visit initial element."""
if isinstance(node.val, str):
# symbol lookup
if node.val not in self._definitions:
raise self.get_error_from_code(node, "v002", name=node.val)
node.val = self._definitions[node.val]
self._current_time += 1
return {"event": "initial", "value": node.val}
def visit_SequenceElement(self, node):
"""Visit sequence element."""
if isinstance(node.mask, str):
# symbol lookup
if node.mask not in self._definitions:
raise self.get_error_from_code(node, "v002", name=node.mask)
node.mask = self._definitions[node.mask]
if node.time is None:
self._current_time += 1
# insert relative time
time = {"mode": "rel", "delta": 1}
else:
if node.time["mode"] == "rel":
self._current_time += node.time["delta"]
else:
abs_time = node.time["time"]
if abs_time < self._current_time:
# time is in the past, cannot be
raise self.get_error_from_code(
node, "t001", cur=self._current_time, req=abs_time
)
self._current_time = abs_time
time = node.time
return {"event": node.event, "mask": node.mask, "time": time}
def visit_HexValue(self, node):
"""Visit hexadecimal value."""
try:
value = int(node.val, 16)
except ValueError:
raise self._SYNTAX_ERR_INVALID_VAL
return value
def visit_BinValue(self, node):
"""Visit binary value."""
try:
value = int(node.val, 2)
except ValueError:
raise self._SYNTAX_ERR_INVALID_VAL
return value
def visit_AbsTimeValue(self, node):
"""Visit absolute time value."""
if node.time < 0:
raise self._SYNTAX_ERR_INVALID_VAL
return {"mode": "abs", "time": node.time}
def visit_RelTimeValue(self, node):
"""Visit relative time value."""
if node.time < 0:
raise self._SYNTAX_ERR_INVALID_VAL
return {"mode": "rel", "delta": node.time}
def visit_VectorDescription(self, node):
"""Visit AST root."""
ir = self._directives
ir["sequence"] = [node.initial] + node.sequence
# doesnt return, not sure why
self._sequence = ir
return ir
def visit_BitwiseBinOperation(self, node):
"""Visit binary bitwise operation."""
if node.op == "<<":
return node.lhs << node.rhs
elif node.op == ">>":
return node.lhs >> node.rhs
elif node.op == "|":
return node.lhs | node.rhs
elif node.op == "&":
return node.lhs & node.rhs
elif node.op == "^":
return node.lhs ^ node.rhs
else:
# cannot happen!
return None
def visit_BitwiseNegate(self, node):
"""Visit negation."""
return ~node.val
def visit_BooleanExpr(self, node):
"""Visit boolean expression."""
return node.op
def visit_Comment(self, node):
"""Visit comment."""
def visit(self, node):
"""Perform visit."""
ret = super().visit(node)
return self._sequence
| brunosmmm/hdltools | hdltools/vecgen/generate.py | Python | mit | 4,978 | 0.000201 |
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class ModuleDocFragment(object):
# Standard F5 documentation fragment
DOCUMENTATION = r'''
options:
provider:
description:
- A dict object containing connection details.
type: dict
version_added: "1.0.0"
suboptions:
password:
description:
- The password for the user account used to connect to the BIG-IP.
- You may omit this option by setting the environment variable C(F5_PASSWORD).
type: str
required: true
aliases: [ pass, pwd ]
server:
description:
- The BIG-IP host.
- You may omit this option by setting the environment variable C(F5_SERVER).
type: str
required: true
server_port:
description:
- The BIG-IP server port.
- You may omit this option by setting the environment variable C(F5_SERVER_PORT).
type: int
default: 22
user:
description:
- The username to connect to the BIG-IP with. This user must have
administrative privileges on the device.
- You may omit this option by setting the environment variable C(F5_USER).
type: str
required: true
validate_certs:
description:
- If C(no), SSL certificates are not validated. Use this only
on personally controlled sites using self-signed certificates.
- You may omit this option by setting the environment variable C(F5_VALIDATE_CERTS).
type: bool
default: yes
timeout:
description:
- Specifies the timeout in seconds for communicating with the network device
for either connecting or sending commands. If the timeout is
exceeded before the operation is completed, the module will error.
type: int
ssh_keyfile:
description:
- Specifies the SSH keyfile to use to authenticate the connection to
the remote device. This argument is only used for I(cli) transports.
- You may omit this option by setting the environment variable C(ANSIBLE_NET_SSH_KEYFILE).
type: path
transport:
description:
- Configures the transport connection to use when connecting to the
remote device.
type: str
choices: ['cli']
default: cli
no_f5_teem:
description:
- If C(yes), TEEM telemetry data is not sent to F5.
- You may omit this option by setting the environment variable C(F5_TELEMETRY_OFF).
- Previously used variable C(F5_TEEM) is deprecated as its name was confusing.
default: no
type: bool
auth_provider:
description:
- Configures the auth provider for to obtain authentication tokens from the remote device.
- This option is really used when working with BIG-IQ devices.
type: str
notes:
- For more information on using Ansible to manage F5 Networks devices see U(https://www.ansible.com/integrations/networks/f5).
- Requires BIG-IP software version >= 12.
- The F5 modules only manipulate the running configuration of the F5 product. To ensure that BIG-IP
specific configuration persists to disk, be sure to include at least one task that uses the
M(f5networks.f5_modules.bigip_config) module to save the running configuration. Refer to the module's documentation for
the correct usage of the module to save your running configuration.
'''
| F5Networks/f5-ansible | ansible_collections/f5networks/f5_modules/plugins/doc_fragments/f5ssh.py | Python | gpl-3.0 | 3,698 | 0.004327 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from formencode.variabledecode import variable_encode
from allura.tests import TestController
from allura.tests import decorators as td
from allura.lib import helpers as h
class TestFeeds(TestController):
def setUp(self):
TestController.setUp(self)
self._setUp()
@td.with_wiki
@td.with_tracker
def _setUp(self):
self.app.get('/wiki/')
self.app.get('/bugs/')
self.app.post(
'/bugs/save_ticket',
params=variable_encode(dict(
ticket_form=dict(
ticket_num='',
labels='',
assigned_to='',
milestone='',
summary='This is a ticket',
status='open',
description='This is a description'))),
status=302)
title = 'Descri\xe7\xe3o e Arquitetura'
self.app.post(
h.urlquote('/wiki/%s/update' % title),
params=dict(
title=title.encode('utf-8'),
text="Nothing much",
labels='',
),
status=302)
self.app.get(h.urlquote('/wiki/%s/' % title))
def test_project_feed(self):
self.app.get('/feed.rss')
self.app.get('/feed.atom')
@td.with_wiki
def test_wiki_feed(self):
self.app.get('/wiki/feed.rss')
self.app.get('/wiki/feed.atom')
@td.with_wiki
def test_wiki_page_feed(self):
self.app.post('/wiki/Root/update', params={
'title': 'Root',
'text': '',
'labels': '',
})
self.app.get('/wiki/Root/feed.rss')
self.app.get('/wiki/Root/feed.atom')
@td.with_tracker
def test_ticket_list_feed(self):
self.app.get('/bugs/feed.rss')
self.app.get('/bugs/feed.atom')
@td.with_tracker
def test_ticket_feed(self):
self.app.get('/bugs/1/feed.rss')
r = self.app.get('/bugs/1/feed.atom')
self.app.post('/bugs/1/update_ticket', params=dict(
assigned_to='',
ticket_num='',
labels='',
summary='This is a new ticket',
status='unread',
milestone='',
description='This is another description'), extra_environ=dict(username='root'))
r = self.app.get('/bugs/1/feed.atom')
assert '=&gt' in r
assert '\n+' in r
| apache/allura | Allura/allura/tests/functional/test_feeds.py | Python | apache-2.0 | 3,296 | 0.000303 |
# (c) 2015 J Miguel Farto, jmfarto@gmail.com
r'''
Aditional static paths
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
from flask import Blueprint
from flask import current_app as app
from flask.helpers import send_from_directory
from ..config import JMOL_PATH
from ..config import JSMOL_PATH
from ..config import J2S_PATH
from ..config import THREEJS_PATH
static_paths = Blueprint('static_paths', __name__)
@static_paths.route('/css/<path:filename>')
def css(filename):
# send_static file secures filename
return app.send_static_file(os.path.join('sage', 'css', filename))
@static_paths.route('/images/<path:filename>')
@static_paths.route('/favicon.ico', defaults={'filename': 'favicon.ico'})
def images(filename):
# send_static file secures filename
return app.send_static_file(os.path.join('sage', 'images', filename))
@static_paths.route('/javascript/<path:filename>')
@static_paths.route('/java/<path:filename>')
def static_file(filename):
return app.send_static_file(filename)
@static_paths.route('/java/jmol/<path:filename>')
def jmol(filename):
return send_from_directory(JMOL_PATH, filename)
@static_paths.route('/jsmol/<path:filename>')
def jsmol(filename):
return send_from_directory(JSMOL_PATH, filename)
@static_paths.route('/j2s/<path:filename>')
def j2s(filename):
return send_from_directory(J2S_PATH, filename)
@static_paths.route('/threejs/<path:filename>')
def threejs(filename):
return send_from_directory(THREEJS_PATH, filename)
| damahou/sagewui | sagewui/blueprints/static_paths.py | Python | gpl-3.0 | 1,622 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright 2010-2019 University of Oslo, Norway
#
# This file is part of pybofh.
#
# pybofh is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pybofh is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pybofh; if not, see <https://www.gnu.org/licenses/>.
"""
This module consists of formatting utils for displaying responses from the
XMLRPC server in a human readable form.
Most notably is the parsing and formatting according to the hints
(format suggestions) given by the server.
Not all commands will have format suggestions. An XMLRPC command will either:
- Not use format suggestions and return a pre-formatted string
- Use format suggestions and return a dictionary or list of dictionaries.
For commands with format suggestions, the formatting class
:py:class:`SuggestionFormatter` is used. For all other commands,
:py:class:`StringFormatter` is used.
Format suggestions
------------------
A format suggestion is a dictionary with the following keys and values:
hdr
An optional header line (string)
str_vars
Either a string, a tuple or a list of tuples.
If str_vars is a string, it will be outputted directly.
If str_vars is a tuple, it should contain two or three items:
1. A format string (e.g. "foo=%s, bar=%s")
2. A list of keys from the bofh response to use for formatting the
string (e.g. ['foo_value', 'bar_value'])
3. An optional sub header
If str_vars is a list of tuples, each tuple should be on the format
mentioned. Each tuple is only formatted and added to the output if the
keys in the tuple exists in the bofh response.
"""
from __future__ import absolute_import, unicode_literals
import abc
import logging
import six
from six.moves import reduce
logger = logging.getLogger(__name__)
class FieldRef(object):
"""
A field reference for format suggestions.
Field references from format suggestions are strings that identify the
expected names and types of fields in result sets.
Each reference is a string that follow one of the following syntaxes:
- <name>
- <name>:<type>:<params>
The only currently supported <type> is "date", which expects a date format
as <params>
"""
def __init__(self, field_name, field_type=None, field_params=None):
self.name = field_name
self.type = field_type or None
self.params = field_params or None
def __repr__(self):
return '<{cls.__name__} {obj.name}>'.format(cls=type(self), obj=self)
@classmethod
def from_str(cls, field_ref):
try:
field_name, field_type, field_params = field_ref.split(":", 2)
except ValueError:
field_name, field_type, field_params = (field_ref, None, None)
return cls(field_name,
field_type=field_type,
field_params=field_params)
def sdf2strftime(sdf_string):
"""
Convert java SimpleDateFormat to strftime.
The bofhd server returns date formatting hints in a
`java.text.SimpleDateFormat` syntax, because reasons.
"""
conversions = (
# (subst, with),
("yyyy", "%Y"),
("MM", "%m"),
("dd", "%d"),
("HH", "%H"),
("mm", "%M"),
("ss", "%S"),
)
return reduce(lambda f, r: f.replace(*r), conversions, sdf_string)
def get_formatted_field(field_ref, data_set):
"""
Format a single field value from a data set
:type field_ref: FieldRef
:param field_ref:
a reference to a field in the data set
:type data_set: dict
:param data_set:
a data set in the result from running a command
"""
value = data_set[field_ref.name]
# Convert value according to field_type and field_data
if field_ref.type is None:
pass
elif field_ref.type == 'date':
format_str = str(sdf2strftime(field_ref.params))
value = value.strftime(format_str) if value else value
else:
raise ValueError("invalid field_ref type %r" % (field_ref.type, ))
if value is None:
return "<not set>"
else:
return value
class FormatItem(object):
"""
Formatter for a bofh response data set.
The formatter consists of a format string, field references to map into the
format string, and an optional header.
"""
def __init__(self, format_str, fields=None, header=None):
"""
:param str format_str:
A format string, e.g. ``"foo: %s, bar: %s"``.
:param tuple fields:
FieldRef references to insert into the format string.
:param str header:
An optional header for the format string.
"""
self.format_str = format_str
self.fields = tuple(fields or ())
self.header = header
def __repr__(self):
return '<FormatItem fields=%r>' % (tuple(f.name for f in self.fields),)
def mismatches(self, data_set):
"""
Get a tuple of field references missing in a data_set
:type data_set: dict
:param data_set: A partial reponse (item).
:rtype: tuple
:returns:
Returns missing field names (keys) missing in the data_set.
"""
return tuple(f.name for f in self.fields if f.name not in data_set)
def match(self, data_set):
"""
Check if this FormatItem applies to a given data set.
:type data_set: dict
:param data_set: A partial reponse (item).
:rtype: bool
:returns:
True if the data_set contains all required field references in
self.field.
"""
return not bool(self.mismatches(data_set))
def format(self, data_set):
"""
Format a given data set with this FormatItem.
:type data_set: dict
:rtype: six.text_type
"""
values = tuple(get_formatted_field(f, data_set)
for f in self.fields)
return self.format_str % values
class FormatSuggestion(object):
"""
Format suggestion for a bofh command.
The format suggestion is a collection of :py:class:`FormatItem` formatters
for items (usually dicts) in a bofhd server response.
"""
key_header = "hdr"
key_string_vars = "str_vars"
def __init__(self, items, header=None):
self.items = items
self.header = header
def __len__(self):
return len(self.items)
def __iter__(self):
return iter(self.items)
@staticmethod
def _iter_format_strings(string_vars):
"""Generate FormatItems from a sequence of str_vars."""
if isinstance(string_vars, six.string_types):
# For some reason, we got a single format string rather than a
# sequence of (format, (vars, ...)) tuples.
yield FormatItem(string_vars, None, None)
return
for t in string_vars:
if len(t) == 3:
format_str, field_refs, sub_header = t
# TODO: What's the deal here?
# Looks to be a fix for an issue where a format
# suggestion had swapped sub_header and format_str?!
if "%" in sub_header:
format_str, sub_header = sub_header, None
elif len(t) == 2:
format_str, field_refs = t
sub_header = None
else:
raise ValueError("invalid tuple length (%d)" % (len(t), ))
fields = map(FieldRef.from_str, field_refs or ())
yield FormatItem(format_str, fields=fields, header=sub_header)
@classmethod
def from_dict(cls, suggestion_response):
"""
Create a FormatSuggestion() from a bofhd format suggestion response.
:type suggestion_response: dict
:param suggestion_response:
The format suggestion given by a bofhd server.
The dict should at least contain a 'str_vars' key, and optionally a
'hdr' key.
:rtype: FormatSuggestion
"""
header = suggestion_response.get(cls.key_header)
string_vars = suggestion_response.get(cls.key_string_vars)
items = tuple(cls._iter_format_strings(string_vars))
return cls(items, header=header)
@six.add_metaclass(abc.ABCMeta)
class ResponseFormatter(object):
""" Abstract response formatter. """
@abc.abstractmethod
def __call__(self, response):
"""
Format server response
:param response:
A response from the server. The response should be *washed* before
given to formatters.
"""
raise NotImplementedError()
class StringFormatter(ResponseFormatter):
"""Response formatter for commands without a format suggestion."""
def __call__(self, response):
if isinstance(response, six.string_types):
return response
else:
return repr(response)
class SuggestionFormatter(ResponseFormatter):
"""
Response formatter for commands with a format suggestion.
"""
def __init__(self, format_suggestion):
self.suggestion = format_suggestion
def _generate_lines(self, response):
if self.suggestion.header:
yield self.suggestion.header
for fmt_no, fmt_item in enumerate(self.suggestion, 1):
logger.info('processing formatter %d/%d: %r',
fmt_no, len(self.suggestion), fmt_item)
if fmt_item.header:
yield fmt_item.header
for part, data_item in enumerate(response, 1):
if isinstance(data_item, six.string_types):
yield data_item
continue
if fmt_item.mismatches(data_item):
continue
try:
yield fmt_item.format(data_item)
except Exception:
logger.error("unable to format response part %d",
part, exc_info=True)
def __call__(self, response):
if not isinstance(response, (list, tuple)):
response = [response]
logger.info('formatting response with %d part(s)', len(response))
return "\n".join(self._generate_lines(response))
def get_formatter(format_spec):
"""
Get an appropriate formatter.
"""
logger.debug('get_formatter(%r)', format_spec)
default = StringFormatter()
if not format_spec:
return default
else:
try:
return SuggestionFormatter(FormatSuggestion.from_dict(format_spec))
except Exception:
logger.error("unable to get SuggestionFormatter", exc_info=True)
return default
| unioslo/pybofh | bofh/formatting.py | Python | gpl-3.0 | 11,183 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.dialogflowcx_v3beta1.services.pages import pagers
from google.cloud.dialogflowcx_v3beta1.types import fulfillment
from google.cloud.dialogflowcx_v3beta1.types import page
from google.cloud.dialogflowcx_v3beta1.types import page as gcdc_page
from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import PagesTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import PagesGrpcAsyncIOTransport
from .client import PagesClient
class PagesAsyncClient:
"""Service for managing
[Pages][google.cloud.dialogflow.cx.v3beta1.Page].
"""
_client: PagesClient
DEFAULT_ENDPOINT = PagesClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = PagesClient.DEFAULT_MTLS_ENDPOINT
entity_type_path = staticmethod(PagesClient.entity_type_path)
parse_entity_type_path = staticmethod(PagesClient.parse_entity_type_path)
flow_path = staticmethod(PagesClient.flow_path)
parse_flow_path = staticmethod(PagesClient.parse_flow_path)
intent_path = staticmethod(PagesClient.intent_path)
parse_intent_path = staticmethod(PagesClient.parse_intent_path)
page_path = staticmethod(PagesClient.page_path)
parse_page_path = staticmethod(PagesClient.parse_page_path)
transition_route_group_path = staticmethod(PagesClient.transition_route_group_path)
parse_transition_route_group_path = staticmethod(
PagesClient.parse_transition_route_group_path
)
webhook_path = staticmethod(PagesClient.webhook_path)
parse_webhook_path = staticmethod(PagesClient.parse_webhook_path)
common_billing_account_path = staticmethod(PagesClient.common_billing_account_path)
parse_common_billing_account_path = staticmethod(
PagesClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(PagesClient.common_folder_path)
parse_common_folder_path = staticmethod(PagesClient.parse_common_folder_path)
common_organization_path = staticmethod(PagesClient.common_organization_path)
parse_common_organization_path = staticmethod(
PagesClient.parse_common_organization_path
)
common_project_path = staticmethod(PagesClient.common_project_path)
parse_common_project_path = staticmethod(PagesClient.parse_common_project_path)
common_location_path = staticmethod(PagesClient.common_location_path)
parse_common_location_path = staticmethod(PagesClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
PagesAsyncClient: The constructed client.
"""
return PagesClient.from_service_account_info.__func__(PagesAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
PagesAsyncClient: The constructed client.
"""
return PagesClient.from_service_account_file.__func__(PagesAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return PagesClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> PagesTransport:
"""Returns the transport used by the client instance.
Returns:
PagesTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(PagesClient).get_transport_class, type(PagesClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, PagesTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the pages client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.PagesTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = PagesClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_pages(
self,
request: Union[page.ListPagesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListPagesAsyncPager:
r"""Returns the list of all pages in the specified flow.
.. code-block:: python
from google.cloud import dialogflowcx_v3beta1
def sample_list_pages():
# Create a client
client = dialogflowcx_v3beta1.PagesClient()
# Initialize request argument(s)
request = dialogflowcx_v3beta1.ListPagesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_pages(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3beta1.types.ListPagesRequest, dict]):
The request object. The request message for
[Pages.ListPages][google.cloud.dialogflow.cx.v3beta1.Pages.ListPages].
parent (:class:`str`):
Required. The flow to list all pages for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/flows/<Flow ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.services.pages.pagers.ListPagesAsyncPager:
The response message for
[Pages.ListPages][google.cloud.dialogflow.cx.v3beta1.Pages.ListPages].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = page.ListPagesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_pages,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListPagesAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_page(
self,
request: Union[page.GetPageRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> page.Page:
r"""Retrieves the specified page.
.. code-block:: python
from google.cloud import dialogflowcx_v3beta1
def sample_get_page():
# Create a client
client = dialogflowcx_v3beta1.PagesClient()
# Initialize request argument(s)
request = dialogflowcx_v3beta1.GetPageRequest(
name="name_value",
)
# Make the request
response = client.get_page(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3beta1.types.GetPageRequest, dict]):
The request object. The request message for
[Pages.GetPage][google.cloud.dialogflow.cx.v3beta1.Pages.GetPage].
name (:class:`str`):
Required. The name of the page. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/flows/<Flow ID>/pages/<Page ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.Page:
A Dialogflow CX conversation (session) can be described and visualized as a
state machine. The states of a CX session are
represented by pages.
For each flow, you define many pages, where your
combined pages can handle a complete conversation on
the topics the flow is designed for. At any given
moment, exactly one page is the current page, the
current page is considered active, and the flow
associated with that page is considered active. Every
flow has a special start page. When a flow initially
becomes active, the start page page becomes the
current page. For each conversational turn, the
current page will either stay the same or transition
to another page.
You configure each page to collect information from
the end-user that is relevant for the conversational
state represented by the page.
For more information, see the [Page
guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page).
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = page.GetPageRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_page,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_page(
self,
request: Union[gcdc_page.CreatePageRequest, dict] = None,
*,
parent: str = None,
page: gcdc_page.Page = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcdc_page.Page:
r"""Creates a page in the specified flow.
.. code-block:: python
from google.cloud import dialogflowcx_v3beta1
def sample_create_page():
# Create a client
client = dialogflowcx_v3beta1.PagesClient()
# Initialize request argument(s)
page = dialogflowcx_v3beta1.Page()
page.display_name = "display_name_value"
request = dialogflowcx_v3beta1.CreatePageRequest(
parent="parent_value",
page=page,
)
# Make the request
response = client.create_page(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3beta1.types.CreatePageRequest, dict]):
The request object. The request message for
[Pages.CreatePage][google.cloud.dialogflow.cx.v3beta1.Pages.CreatePage].
parent (:class:`str`):
Required. The flow to create a page for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/flows/<Flow ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
page (:class:`google.cloud.dialogflowcx_v3beta1.types.Page`):
Required. The page to create.
This corresponds to the ``page`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.Page:
A Dialogflow CX conversation (session) can be described and visualized as a
state machine. The states of a CX session are
represented by pages.
For each flow, you define many pages, where your
combined pages can handle a complete conversation on
the topics the flow is designed for. At any given
moment, exactly one page is the current page, the
current page is considered active, and the flow
associated with that page is considered active. Every
flow has a special start page. When a flow initially
becomes active, the start page page becomes the
current page. For each conversational turn, the
current page will either stay the same or transition
to another page.
You configure each page to collect information from
the end-user that is relevant for the conversational
state represented by the page.
For more information, see the [Page
guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page).
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, page])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcdc_page.CreatePageRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if page is not None:
request.page = page
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_page,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def update_page(
self,
request: Union[gcdc_page.UpdatePageRequest, dict] = None,
*,
page: gcdc_page.Page = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcdc_page.Page:
r"""Updates the specified page.
.. code-block:: python
from google.cloud import dialogflowcx_v3beta1
def sample_update_page():
# Create a client
client = dialogflowcx_v3beta1.PagesClient()
# Initialize request argument(s)
page = dialogflowcx_v3beta1.Page()
page.display_name = "display_name_value"
request = dialogflowcx_v3beta1.UpdatePageRequest(
page=page,
)
# Make the request
response = client.update_page(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3beta1.types.UpdatePageRequest, dict]):
The request object. The request message for
[Pages.UpdatePage][google.cloud.dialogflow.cx.v3beta1.Pages.UpdatePage].
page (:class:`google.cloud.dialogflowcx_v3beta1.types.Page`):
Required. The page to update.
This corresponds to the ``page`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
The mask to control which fields get
updated. If the mask is not present, all
fields will be updated.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.Page:
A Dialogflow CX conversation (session) can be described and visualized as a
state machine. The states of a CX session are
represented by pages.
For each flow, you define many pages, where your
combined pages can handle a complete conversation on
the topics the flow is designed for. At any given
moment, exactly one page is the current page, the
current page is considered active, and the flow
associated with that page is considered active. Every
flow has a special start page. When a flow initially
becomes active, the start page page becomes the
current page. For each conversational turn, the
current page will either stay the same or transition
to another page.
You configure each page to collect information from
the end-user that is relevant for the conversational
state represented by the page.
For more information, see the [Page
guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page).
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([page, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcdc_page.UpdatePageRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if page is not None:
request.page = page
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_page,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("page.name", request.page.name),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def delete_page(
self,
request: Union[page.DeletePageRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes the specified page.
.. code-block:: python
from google.cloud import dialogflowcx_v3beta1
def sample_delete_page():
# Create a client
client = dialogflowcx_v3beta1.PagesClient()
# Initialize request argument(s)
request = dialogflowcx_v3beta1.DeletePageRequest(
name="name_value",
)
# Make the request
client.delete_page(request=request)
Args:
request (Union[google.cloud.dialogflowcx_v3beta1.types.DeletePageRequest, dict]):
The request object. The request message for
[Pages.DeletePage][google.cloud.dialogflow.cx.v3beta1.Pages.DeletePage].
name (:class:`str`):
Required. The name of the page to delete. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/Flows/<flow ID>/pages/<Page ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = page.DeletePageRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_page,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
await rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-dialogflowcx",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("PagesAsyncClient",)
| googleapis/python-dialogflow-cx | google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py | Python | apache-2.0 | 32,025 | 0.001624 |
import os
import sys
import random
import pygame
from Engine import *
from Montag import *
from Character import Character
from pygame.locals import *
class AICharacter(Character):
def __init__(self, screen, **kwargs):
super().__init__(screen, **kwargs)
self.enemy = kwargs.get("enemy", None)
self.movement_state = kwargs.get("movement_state", None)
self.waypoints = kwargs.get("waypoints", None)
self.area = kwargs.get("random_walk_area", None)
self.obstaclemap = kwargs.get("obstaclemap", None)
self.pathfinding_grid = self.obstaclemap.grid
self.dialog = kwargs.get("dialog", None)
self.dialogmanager = kwargs.get("dialogmanager", None)
if self.waypoints:
self.remaining_waypoints = self.waypoints.copy()
self.grid_pos = self.remaining_waypoints[0].copy()
self.walk_to_points = [self.remaining_waypoints.pop(0)]
self.movement_state = "waypoints"
self.state = "walk"
elif self.area:
self.movement_state = "random_walk"
self.pause_time = kwargs.get("pause_time", 1000)
self.pause_time_passed = 0
def click(self):
if self.dialog:
self.dialogmanager.start_dialog(self.dialog)
def hold_position(self):
self.movement_state = None
def update(self, current_time=None, event=None):
if not current_time:
current_time = pygame.time.get_ticks()
if self.state == "stand":
time_change = current_time - self.current_time
self.pause_time_passed += time_change
else:
self.pause_time_passed = 0
if not self.dead:
if not self.movement_temporarily_suppressed:
if not self.walk_to_points and self.pause_time_passed >= self.pause_time:
if self.movement_state == "random_walk":
self.walk_to_points = self.pathfinding_grid.find_path(self.grid_pos, [
random.uniform(self.area[0], self.area[0] + self.area[2]),
random.uniform(self.area[1], self.area[1] + self.area[3])])
self.frame = 0
elif self.movement_state == "waypoints":
if len(self.remaining_waypoints) == 0:
self.remaining_waypoints = self.waypoints.copy()
self.walk_to_points = [self.remaining_waypoints.pop(0)]
super().update(current_time, event)
if __name__ == "__main__":
pygame.init()
clock = pygame.time.Clock()
screen_info = pygame.display.Info()
screen_size = [screen_info.current_w, screen_info.current_h]
screen = pygame.display.set_mode(screen_size, RESIZABLE)
chars = []
b = Engine(screen)
b.load_tilemap("TheMap/map.floor", 0)
b.load_obstaclemap("TheMap/map.obstacles", 0)
montag = AICharacter(screen, "graphics/droids/blue_guard/atlas.txt", "graphics/droids/red_guard/config.txt", pathfinding_grid=b.obstacles.grid, pos=[3, 0], movement_state="random_walk", area=[5, 0, 10, 5])
while True:
current_time = pygame.time.get_ticks()
clock.tick(60)
screen.fill((0, 0, 0))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == VIDEORESIZE:
screen_size = event.dict["size"]
screen = pygame.display.set_mode(screen_size, RESIZABLE)
else:
montag.update(current_time, event)
b.update()
b.draw([0, 0])
#chars.sort(key=lambda x: (x.pos[1], x.pos[0]))
montag.update(current_time)
montag.draw()
pygame.display.update()
| lumidify/fahrenheit451 | AICharacter.py | Python | gpl-2.0 | 3,797 | 0.003687 |
from setuptools import setup, find_packages
CLASSIFIERS = (
('Development Status :: 4 - Beta'),
('Environment :: Console'),
('Environment :: Web Environment'),
('Framework :: Django'),
('Intended Audience :: Developers'),
('Intended Audience :: Science/Research'),
('Intended Audience :: System Administrators'),
('License :: OSI Approved :: BSD License'),
('Natural Language :: English'),
('Operating System :: OS Independent'),
('Topic :: Artistic Software'),
('Topic :: Internet :: WWW/HTTP'),
('Topic :: Internet :: WWW/HTTP :: Dynamic Content'),
('Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries'),
('Topic :: Multimedia'),
('Topic :: Multimedia :: Graphics'),
('Topic :: Scientific/Engineering :: Visualization'),
('Topic :: Software Development :: Libraries :: Python Modules'),
('Topic :: Utilities'),
)
DESCRIPTION = """Second generation Python wrapper for the `Google Chart Image API <http://code.google.com/apis/chart/image/>`_.
Chart instances can render the URL of the actual Google chart and quickly insert into webpages on the fly or save images for later use.
Made for dynamic Python websites (Django, Zope, CGI, etc.) that need on the fly, dynamic chart image generation. Works for Python versions 2.3 to 3.2.
"""
setup(
name='google-chartwrapper',
version='1.0.0',
description='Python Google Chart Wrapper',
long_description=DESCRIPTION,
author="Justin Quick",
author_email='justquick@gmail.com',
url='https://github.com/justquick/google-chartwrapper',
classifiers=CLASSIFIERS,
packages=find_packages('.', ('examples',)),
) | justquick/google-chartwrapper | setup.py | Python | bsd-3-clause | 1,680 | 0.002976 |
import re
import abc
import asyncio
import contextlib
import urllib.parse as urlparse
import aiohttp
import pyquery
from pycrawl.utils import Queue
from pycrawl.http import Request
from pycrawl.http import Response
from pycrawl.middleware import CrawlerMiddlewareManager
class Spider(metaclass=abc.ABCMeta):
def __init__(self, middlewares=None, loop=None, **config):
self.config = config
self._context = {}
self._loop = loop or asyncio.get_event_loop()
self._connector = aiohttp.TCPConnector(loop=self._loop)
self._middlewares = CrawlerMiddlewareManager(self, middlewares)
def enqueue_request(self, **kwargs):
context = self._context[self.task]
max_depth = self.config.get('max_depth')
if max_depth and context['request'].depth > max_depth:
return
request = Request(referer=context['response'], **kwargs)
if request.url in self._seen:
return
if not self._url_allowed(request):
return
request.depth = context['response'].request.depth + 1
self._queue.put_nowait(request)
def _url_allowed(self, request):
return next(
(
True for domain in self.config['domains']
if request.furl.host.endswith(domain)
),
False,
)
@asyncio.coroutine
def start(self):
self._seen = set()
self._queue = Queue(loop=self._loop)
for url in self.config['urls']:
self._queue.put_nowait(Request(url))
workers = [asyncio.Task(self._work()) for _ in range(self.config['concurrency'])]
yield from self._queue.join()
for worker in workers:
worker.cancel()
@asyncio.coroutine
def _work(self):
while True:
request = yield from self._queue.get()
yield from self._fetch(request)
self._queue.task_done()
@asyncio.coroutine
def _fetch(self, request):
for callback in self._middlewares['before_request']:
request = callback(request)
resp = yield from aiohttp.request('GET', request.url, loop=self._loop)
body = yield from resp.read_and_close()
response = Response(request, resp, body)
for callback in self._middlewares['after_response']:
response = callback(response)
with self._request_context(self._loop, request, response):
self.parse(response)
@property
def _task(self):
return asyncio.get_current_task(loop=self._loop)
@contextlib.contextmanager
def _request_context(self, request, response):
self._context[self.task] = {'request': request, 'response': response}
try:
yield
finally:
del self._context[self.task]
@abc.abstractmethod
def parse(self, response):
pass
class Route:
def __init__(self, pattern, callback):
self.pattern = re.compile(pattern)
self.callback = callback
def filter_urls(self, urls):
return (url for url in urls if self.pattern.search(url))
class RouteSpider(Spider):
def __init__(self, middlewares=None, routes=None, **config):
super().__init__(middlewares=middlewares, **config)
self._routes = routes or []
def route(self, pattern):
def wrapper(callback):
self._routes.append(Route(callback, pattern))
return callback
return wrapper
def parse(self, response):
route = response.request.meta.get('route')
if route:
route.callback(self, response)
parsed = pyquery.PyQuery(response.content)
elms = parsed('a[href]')
hrefs = elms.map(lambda: urlparse.urljoin(response.request.url, pyquery.PyQuery(this).attr('href')))
for route in self._routes:
for url in route.filter_urls(hrefs):
self.enqueue_request(url=url, route=route)
| jmcarp/pycrawl | pycrawl/crawl.py | Python | bsd-3-clause | 3,966 | 0.000504 |
# -*- coding: utf-8 -*-
# © 2016 Lorenzo Battistini - Agile Business Group
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import stock
| houssine78/addons | stock_picking_back2draft/models/__init__.py | Python | agpl-3.0 | 161 | 0 |
import os
import sys
import stat
from ._compat import open_stream, text_type, filename_to_ui, get_streerror
from .exceptions import BadParameter
from .utils import safecall, LazyFile
class ParamType(object):
"""Helper for converting values through types. The following is
necessary for a valid type:
* it needs a name
* it needs to pass through None unchanged
* it needs to convert from a string
* it needs to convert its result type through unchanged
(eg: needs to be idempotent)
* it needs to be able to deal with param and context being `None`.
This can be the case when the object is used with prompt
inputs.
"""
#: the descriptive name of this type
name = None
#: if a list of this type is expected and the value is pulled from a
#: string environment variable, this is what splits it up. `None`
#: means any whitespace. For all parameters the general rule is that
#: whitespace splits them up. The exception are paths and files which
#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
#: Windows).
envvar_list_splitter = None
def __call__(self, value, param=None, ctx=None):
if value is not None:
return self.convert(value, param, ctx)
def get_metavar(self, param):
"""Returns the metavar default for this param if it provides one."""
def get_missing_message(self, param):
"""Optionally might return extra information about a missing
parameter.
.. versionadded:: 2.0
"""
def convert(self, value, param, ctx):
"""Converts the value. This is not invoked for values that are
`None` (the missing value).
"""
return value
def split_envvar_value(self, rv):
"""Given a value from an environment variable this splits it up
into small chunks depending on the defined envvar list splitter.
If the splitter is set to `None`, which means that whitespace splits,
then leading and trailing whitespace is ignored. Otherwise, leading
and trailing splitters usually lead to empty items being included.
"""
return (rv or '').split(self.envvar_list_splitter)
def fail(self, message, param=None, ctx=None):
"""Helper method to fail with an invalid value message."""
raise BadParameter(message, ctx=ctx, param=param)
class FuncParamType(ParamType):
def __init__(self, func):
self.name = func.__name__
self.func = func
def convert(self, value, param, ctx):
try:
return self.func(value)
except ValueError:
try:
value = text_type(value)
except UnicodeError:
value = str(value).decode('utf-8', 'replace')
self.fail(value, param, ctx)
class StringParamType(ParamType):
name = 'text'
def convert(self, value, param, ctx):
if isinstance(value, bytes):
try:
enc = getattr(sys.stdin, 'encoding', None)
if enc is not None:
value = value.decode(enc)
except UnicodeError:
try:
value = value.decode(sys.getfilesystemencoding())
except UnicodeError:
value = value.decode('utf-8', 'replace')
return value
return value
def __repr__(self):
return 'STRING'
class Choice(ParamType):
"""The choice type allows a value to checked against a fixed set of
supported values. All of these values have to be strings.
See :ref:`choice-opts` for an example.
"""
name = 'choice'
def __init__(self, choices):
self.choices = choices
def get_metavar(self, param):
return '[%s]' % '|'.join(self.choices)
def get_missing_message(self, param):
return 'Choose from %s.' % ', '.join(self.choices)
def convert(self, value, param, ctx):
# Exact match
if value in self.choices:
return value
# Match through normalization
if ctx is not None and \
ctx.token_normalize_func is not None:
value = ctx.token_normalize_func(value)
for choice in self.choices:
if ctx.token_normalize_func(choice) == value:
return choice
self.fail('invalid choice: %s. (choose from %s)' %
(value, ', '.join(self.choices)), param, ctx)
def __repr__(self):
return 'Choice(%r)' % list(self.choices)
class IntParamType(ParamType):
name = 'integer'
def convert(self, value, param, ctx):
try:
return int(value)
except ValueError:
self.fail('%s is not a valid integer' % value, param, ctx)
def __repr__(self):
return 'INT'
class IntRange(IntParamType):
"""A parameter that works similar to :data:`click.INT` but restricts
the value to fit into a range. The default behavior is to fail if the
value falls outside the range, but it can also be silently clamped
between the two edges.
See :ref:`ranges` for an example.
"""
name = 'integer range'
def __init__(self, min=None, max=None, clamp=False):
self.min = min
self.max = max
self.clamp = clamp
def convert(self, value, param, ctx):
rv = IntParamType.convert(self, value, param, ctx)
if self.clamp:
if self.min is not None and rv < self.min:
return self.min
if self.max is not None and rv > self.max:
return self.max
if self.min is not None and rv < self.min or \
self.max is not None and rv > self.max:
if self.min is None:
self.fail('%s is bigger than the maximum valid value '
'%s.' % (rv, self.max), param, ctx)
elif self.max is None:
self.fail('%s is smaller than the minimum valid value '
'%s.' % (rv, self.min), param, ctx)
else:
self.fail('%s is not in the valid range of %s to %s.'
% (rv, self.min, self.max), param, ctx)
return rv
def __repr__(self):
return 'IntRange(%r, %r)' % (self.min, self.max)
class BoolParamType(ParamType):
name = 'boolean'
def convert(self, value, param, ctx):
if isinstance(value, bool):
return bool(value)
value = value.lower()
if value in ('true', '1', 'yes', 'y'):
return True
elif value in ('false', '0', 'no', 'n'):
return False
self.fail('%s is not a valid boolean' % value, param, ctx)
def __repr__(self):
return 'BOOL'
class FloatParamType(ParamType):
name = 'float'
def convert(self, value, param, ctx):
try:
return float(value)
except ValueError:
self.fail('%s is not a valid floating point value' %
value, param, ctx)
def __repr__(self):
return 'FLOAT'
class UUIDParameterType(ParamType):
name = 'uuid'
def convert(self, value, param, ctx):
import uuid
try:
return uuid.UUID(value)
except ValueError:
self.fail('%s is not a valid UUID value' % value, param, ctx)
def __repr__(self):
return 'UUID'
class File(ParamType):
"""Declares a parameter to be a file for reading or writing. The file
is automatically closed once the context tears down (after the command
finished working).
Files can be opened for reading or writing. The special value ``-``
indicates stdin or stdout depending on the mode.
By default, the file is opened for reading text data, but it can also be
opened in binary mode or for writing. The encoding parameter can be used
to force a specific encoding.
The `lazy` flag controls if the file should be opened immediately or
upon first IO. The default is to be non lazy for standard input and
output streams as well as files opened for reading, lazy otherwise.
Starting with Click 2.0, files can also be opened atomically in which
case all writes go into a separate file in the same folder and upon
completion the file will be moved over to the original location. This
is useful if a file regularly read by other users is modified.
See :ref:`file-args` for more information.
"""
name = 'filename'
envvar_list_splitter = os.path.pathsep
def __init__(self, mode='r', encoding=None, errors='strict', lazy=None,
atomic=False):
self.mode = mode
self.encoding = encoding
self.errors = errors
self.lazy = lazy
self.atomic = atomic
def resolve_lazy_flag(self, value):
if self.lazy is not None:
return self.lazy
if value == '-':
return False
elif 'w' in self.mode:
return True
return False
def convert(self, value, param, ctx):
try:
if hasattr(value, 'read') or hasattr(value, 'write'):
return value
lazy = self.resolve_lazy_flag(value)
if lazy:
f = LazyFile(value, self.mode, self.encoding, self.errors,
atomic=self.atomic)
if ctx is not None:
ctx.call_on_close(f.close_intelligently)
return f
f, should_close = open_stream(value, self.mode,
self.encoding, self.errors,
atomic=self.atomic)
# If a context is provided, we automatically close the file
# at the end of the context execution (or flush out). If a
# context does not exist, it's the caller's responsibility to
# properly close the file. This for instance happens when the
# type is used with prompts.
if ctx is not None:
if should_close:
ctx.call_on_close(safecall(f.close))
else:
ctx.call_on_close(safecall(f.flush))
return f
except (IOError, OSError) as e:
self.fail('Could not open file: %s: %s' % (
filename_to_ui(value),
get_streerror(e),
), param, ctx)
class Path(ParamType):
"""The path type is similar to the :class:`File` type but it performs
different checks. First of all, instead of returning a open file
handle it returns just the filename. Secondly, it can perform various
basic checks about what the file or directory should be.
:param exists: if set to true, the file or directory needs to exist for
this value to be valid. If this is not required and a
file does indeed not exist, then all further checks are
silently skipped.
:param file_okay: controls if a file is a possible value.
:param dir_okay: controls if a directory is a possible value.
:param writable: if true, a writable check is performed.
:param readable: if true, a readable check is performed.
:param resolve_path: if this is true, then the path is fully resolved
before the value is passed onwards. This means
that it's absolute and symlinks are resolved.
"""
envvar_list_splitter = os.path.pathsep
def __init__(self, exists=False, file_okay=True, dir_okay=True,
writable=False, readable=True, resolve_path=False):
self.exists = exists
self.file_okay = file_okay
self.dir_okay = dir_okay
self.writable = writable
self.readable = readable
self.resolve_path = resolve_path
if self.file_okay and not self.dir_okay:
self.name = 'file'
self.path_type = 'File'
if self.dir_okay and not self.file_okay:
self.name = 'directory'
self.path_type = 'Directory'
else:
self.name = 'path'
self.path_type = 'Path'
def convert(self, value, param, ctx):
rv = value
if self.resolve_path:
rv = os.path.realpath(rv)
try:
st = os.stat(rv)
except OSError:
if not self.exists:
return rv
self.fail('%s "%s" does not exist.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if not self.file_okay and stat.S_ISREG(st.st_mode):
self.fail('%s "%s" is a file.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if not self.dir_okay and stat.S_ISDIR(st.st_mode):
self.fail('%s "%s" is a directory.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if self.writable and not os.access(value, os.W_OK):
self.fail('%s "%s" is not writable.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if self.readable and not os.access(value, os.R_OK):
self.fail('%s "%s" is not readable.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
return rv
def convert_type(ty, default=None):
"""Converts a callable or python ty into the most appropriate param
ty.
"""
if isinstance(ty, ParamType):
return ty
guessed_type = False
if ty is None and default is not None:
ty = type(default)
guessed_type = True
if ty is text_type or ty is str or ty is None:
return STRING
if ty is int:
return INT
# Booleans are only okay if not guessed. This is done because for
# flags the default value is actually a bit of a lie in that it
# indicates which of the flags is the one we want. See get_default()
# for more information.
if ty is bool and not guessed_type:
return BOOL
if ty is float:
return FLOAT
if guessed_type:
return STRING
# Catch a common mistake
if __debug__:
try:
if issubclass(ty, ParamType):
raise AssertionError('Attempted to use an uninstantiated '
'parameter type (%s).' % ty)
except TypeError:
pass
return FuncParamType(ty)
#: A unicode string parameter type which is the implicit default. This
#: can also be selected by using ``str`` as type.
STRING = StringParamType()
#: An integer parameter. This can also be selected by using ``int`` as
#: type.
INT = IntParamType()
#: A floating point value parameter. This can also be selected by using
#: ``float`` as type.
FLOAT = FloatParamType()
#: A boolean parameter. This is the default for boolean flags. This can
#: also be selected by using ``bool`` as a type.
BOOL = BoolParamType()
#: A UUID parameter.
UUID = UUIDParameterType()
| patrickwind/My_Blog | venv/lib/python2.7/site-packages/click/types.py | Python | gpl-2.0 | 15,176 | 0 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import os.path
import createTasks
import csv
import json
SERVER = "http://crowdcrafting.org"
URL_ROOT = "https://s3-us-west-2.amazonaws.com/drillpadcat/"
def dictreader(rows):
rows = iter(rows)
header = rows.next()
for row in rows:
yield dict(zip(header, row))
if len(sys.argv) != 4:
print """Usage: drillpadcatimgtaskload.py frackfinder 00000000-0000-0000-0000-000000000000 somefile.csv
Replace the zeroes with your access key
The csv should contain at least the following columns:
latitude,longitude,path
Path is the path relative to teh root of the drillpadcat s3 bucket.
"""
else:
app, accesskey, csvfile = sys.argv[1:]
with open(csvfile) as f:
for row in dictreader(csv.reader(f)):
row['url'] = URL_ROOT + row.pop("path")
class options:
api_url = SERVER
api_key = accesskey
create_app = False
update_template = False
update_tasks = False
app_root = app
create_task = json.dumps(row)
n_answers = 30
app_name = None
verbose = False
createTasks.CreateTasks(options)
| SkyTruth/pybossa_tools | drillpadcatimgtaskload.py | Python | agpl-3.0 | 1,281 | 0.003123 |
"""
Updates start offsets, stop offsets and indicators everywhere in score.
.. note:: This is probably the most important part of Abjad to optimize. Use the
profiler to figure out how many unnecessary updates are happening. Then reimplement.
As a hint, the update manager implements a weird version of the "observer pattern."
It may make sense to revisit a textbook example of the observer pattern and review
the implementation of the update manager.
"""
from . import duration as _duration
from . import indicators as _indicators
from . import iterate as iterate_
from . import math as _math
from . import obgc as _obgc
from . import parentage as _parentage
from . import score as _score
from . import sequence as _sequence
from . import timespan as _timespan
def _get_after_grace_leaf_offsets(leaf):
container = leaf._parent
main_leaf = container._main_leaf
main_leaf_stop_offset = main_leaf._stop_offset
assert main_leaf_stop_offset is not None
displacement = -leaf._get_duration()
sibling = leaf._sibling(1)
while sibling is not None and sibling._parent is container:
displacement -= sibling._get_duration()
sibling = sibling._sibling(1)
if leaf._parent is not None and leaf._parent._main_leaf is not None:
main_leaf = leaf._parent._main_leaf
sibling = main_leaf._sibling(1)
if (
sibling is not None
and hasattr(sibling, "_before_grace_container")
and sibling._before_grace_container is not None
):
before_grace_container = sibling._before_grace_container
duration = before_grace_container._get_duration()
displacement -= duration
start_offset = _duration.Offset(main_leaf_stop_offset, displacement=displacement)
displacement += leaf._get_duration()
stop_offset = _duration.Offset(main_leaf_stop_offset, displacement=displacement)
return start_offset, stop_offset
def _get_before_grace_leaf_offsets(leaf):
container = leaf._parent
main_leaf = container._main_leaf
main_leaf_start_offset = main_leaf._start_offset
assert main_leaf_start_offset is not None
displacement = -leaf._get_duration()
sibling = leaf._sibling(1)
while sibling is not None and sibling._parent is container:
displacement -= sibling._get_duration()
sibling = sibling._sibling(1)
start_offset = _duration.Offset(main_leaf_start_offset, displacement=displacement)
displacement += leaf._get_duration()
stop_offset = _duration.Offset(main_leaf_start_offset, displacement=displacement)
return start_offset, stop_offset
def _get_measure_start_offsets(component):
wrappers = []
prototype = _indicators.TimeSignature
root = _parentage.Parentage(component).root
for component_ in _iterate_entire_score(root):
wrappers_ = component_._get_indicators(prototype, unwrap=False)
wrappers.extend(wrappers_)
pairs = []
for wrapper in wrappers:
component = wrapper.component
start_offset = component._get_timespan().start_offset
time_signature = wrapper.indicator
pair = start_offset, time_signature
pairs.append(pair)
offset_zero = _duration.Offset(0)
default_time_signature = _indicators.TimeSignature((4, 4))
default_pair = (offset_zero, default_time_signature)
if pairs and not pairs[0] == offset_zero:
pairs.insert(0, default_pair)
elif not pairs:
pairs = [default_pair]
pairs.sort(key=lambda x: x[0])
score_stop_offset = root._get_timespan().stop_offset
dummy_last_pair = (score_stop_offset, None)
pairs.append(dummy_last_pair)
measure_start_offsets = []
at_first_measure = True
for current_pair, next_pair in _sequence.nwise(pairs):
current_start_offset, current_time_signature = current_pair
next_start_offset, next_time_signature = next_pair
measure_start_offset = current_start_offset
while measure_start_offset < next_start_offset:
measure_start_offsets.append(measure_start_offset)
partial = current_time_signature.partial
if at_first_measure and partial is not None:
measure_start_offset += partial
measure_start_offsets.append(measure_start_offset)
at_first_measure = False
measure_start_offset += current_time_signature.duration
return measure_start_offsets
def _get_on_beat_grace_leaf_offsets(leaf):
container = leaf._parent
anchor_leaf = container._get_on_beat_anchor_leaf()
anchor_leaf_start_offset = anchor_leaf._start_offset
assert anchor_leaf_start_offset is not None
anchor_leaf_start_offset = _duration.Offset(anchor_leaf_start_offset.pair)
start_displacement = _duration.Duration(0)
sibling = leaf._sibling(-1)
while sibling is not None and sibling._parent is container:
start_displacement += sibling._get_duration()
sibling = sibling._sibling(-1)
stop_displacement = start_displacement + leaf._get_duration()
if start_displacement == 0:
start_displacement = None
start_offset = _duration.Offset(
anchor_leaf_start_offset.pair, displacement=start_displacement
)
stop_offset = _duration.Offset(
anchor_leaf_start_offset.pair, displacement=stop_displacement
)
return start_offset, stop_offset
def _get_score_tree_state_flags(parentage):
offsets_are_current = True
indicators_are_current = True
offsets_in_seconds_are_current = True
for component in parentage:
if offsets_are_current:
if not component._offsets_are_current:
offsets_are_current = False
if indicators_are_current:
if not component._indicators_are_current:
indicators_are_current = False
if offsets_in_seconds_are_current:
if not component._offsets_in_seconds_are_current:
offsets_in_seconds_are_current = False
return (
offsets_are_current,
indicators_are_current,
offsets_in_seconds_are_current,
)
def _iterate_entire_score(root):
"""
NOTE: RETURNS GRACE NOTES LAST (AND OUT-OF-ORDER).
"""
components = list(iterate_.components(root, grace=False))
graces = iterate_.components(root, grace=True)
components.extend(graces)
return components
def _make_metronome_mark_map(root):
pairs = []
all_stop_offsets = set()
for component in _iterate_entire_score(root):
indicators = component._get_indicators(_indicators.MetronomeMark)
if len(indicators) == 1:
metronome_mark = indicators[0]
if not metronome_mark.is_imprecise:
pair = (component._start_offset, metronome_mark)
pairs.append(pair)
if component._stop_offset is not None:
all_stop_offsets.add(component._stop_offset)
pairs.sort(key=lambda _: _[0])
if not pairs:
return
if pairs[0][0] != 0:
return
score_stop_offset = max(all_stop_offsets)
timespans = _timespan.TimespanList()
clocktime_start_offset = _duration.Offset(0)
for left, right in _sequence.nwise(pairs, wrapped=True):
metronome_mark = left[-1]
start_offset = left[0]
stop_offset = right[0]
# last timespan
if stop_offset == 0:
stop_offset = score_stop_offset
duration = stop_offset - start_offset
multiplier = _duration.Multiplier(60, metronome_mark.units_per_minute)
clocktime_duration = duration / metronome_mark.reference_duration
clocktime_duration *= multiplier
timespan = _timespan.Timespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=(clocktime_start_offset, clocktime_duration),
)
timespans.append(timespan)
clocktime_start_offset += clocktime_duration
return timespans
# TODO: reimplement with some type of bisection
def _to_measure_number(component, measure_start_offsets):
component_start_offset = component._get_timespan().start_offset
displacement = component_start_offset.displacement
if displacement is not None:
component_start_offset = _duration.Offset(
component_start_offset, displacement=None
)
# score-initial grace music only:
if displacement < 0 and component_start_offset == 0:
measure_number = 0
return measure_number
measure_start_offsets = measure_start_offsets[:]
measure_start_offsets.append(_math.Infinity())
pairs = _sequence.nwise(measure_start_offsets)
for measure_index, pair in enumerate(pairs):
if pair[0] <= component_start_offset < pair[-1]:
measure_number = measure_index + 1
return measure_number
message = f"can not find measure number for {repr(component)}:\n"
message += f" {repr(measure_start_offsets)}"
raise ValueError(message)
def _update_all_indicators(root):
"""
Updating indicators does not update offsets.
On the other hand, getting an effective indicator does update
offsets when at least one indicator of the appropriate type
attaches to score.
"""
components = _iterate_entire_score(root)
for component in components:
for wrapper in component._get_indicators(unwrap=False):
if wrapper.context is not None:
wrapper._update_effective_context()
component._indicators_are_current = True
def _update_all_offsets(root):
"""
Updating offsets does not update indicators.
Updating offsets does not update offsets in seconds.
"""
on_beat_grace_music = []
for component in _iterate_entire_score(root):
if isinstance(component, _obgc.OnBeatGraceContainer) or isinstance(
component._parent, _obgc.OnBeatGraceContainer
):
on_beat_grace_music.append(component)
else:
_update_component_offsets(component)
component._offsets_are_current = True
for component in on_beat_grace_music:
_update_component_offsets(component)
component._offsets_are_current = True
def _update_all_offsets_in_seconds(root):
_update_all_offsets(root)
timespans = _make_metronome_mark_map(root)
for component in _iterate_entire_score(root):
_update_clocktime_offsets(component, timespans)
component._offsets_in_seconds_are_current = True
def _update_clocktime_offsets(component, timespans):
if not timespans:
return
for timespan in timespans:
if timespan.start_offset <= component._start_offset < timespan.stop_offset:
pair = timespan.annotation
clocktime_start_offset, clocktime_duration = pair
local_offset = component._start_offset - timespan.start_offset
multiplier = local_offset / timespan.duration
duration = multiplier * clocktime_duration
offset = clocktime_start_offset + duration
component._start_offset_in_seconds = _duration.Offset(offset)
if timespan.start_offset <= component._stop_offset < timespan.stop_offset:
pair = timespan.annotation
clocktime_start_offset, clocktime_duration = pair
local_offset = component._stop_offset - timespan.start_offset
multiplier = local_offset / timespan.duration
duration = multiplier * clocktime_duration
offset = clocktime_start_offset + duration
component._stop_offset_in_seconds = _duration.Offset(offset)
return
if component._stop_offset == timespans[-1].stop_offset:
pair = timespans[-1].annotation
clocktime_start_offset, clocktime_duration = pair
offset = clocktime_start_offset + clocktime_duration
component._stop_offset_in_seconds = _duration.Offset(offset)
return
raise Exception(f"can not find {offset} in {timespans}.")
def _update_component_offsets(component):
if isinstance(component, _score.BeforeGraceContainer):
pair = _get_before_grace_leaf_offsets(component[0])
start_offset = pair[0]
pair = _get_before_grace_leaf_offsets(component[-1])
stop_offset = pair[-1]
elif isinstance(component._parent, _score.BeforeGraceContainer):
pair = _get_before_grace_leaf_offsets(component)
start_offset, stop_offset = pair
elif isinstance(component, _obgc.OnBeatGraceContainer):
pair = _get_on_beat_grace_leaf_offsets(component[0])
start_offset = pair[0]
pair = _get_on_beat_grace_leaf_offsets(component[-1])
stop_offset = pair[-1]
elif isinstance(component._parent, _obgc.OnBeatGraceContainer):
pair = _get_on_beat_grace_leaf_offsets(component)
start_offset, stop_offset = pair
elif isinstance(component, _score.AfterGraceContainer):
pair = _get_after_grace_leaf_offsets(component[0])
start_offset = pair[0]
pair = _get_after_grace_leaf_offsets(component[-1])
stop_offset = pair[-1]
elif isinstance(component._parent, _score.AfterGraceContainer):
pair = _get_after_grace_leaf_offsets(component)
start_offset, stop_offset = pair
else:
previous = component._sibling(-1)
if previous is not None:
start_offset = previous._stop_offset
else:
start_offset = _duration.Offset(0)
# on-beat anchor leaf:
if (
component._parent is not None
and _obgc.OnBeatGraceContainer._is_on_beat_anchor_voice(component._parent)
and component is component._parent[0]
):
anchor_voice = component._parent
assert _obgc.OnBeatGraceContainer._is_on_beat_anchor_voice(anchor_voice)
on_beat_grace_container = None
on_beat_wrapper = anchor_voice._parent
assert _obgc.OnBeatGraceContainer._is_on_beat_wrapper(on_beat_wrapper)
index = on_beat_wrapper.index(anchor_voice)
if index == 0:
on_beat_grace_container = on_beat_wrapper[1]
else:
on_beat_grace_container = on_beat_wrapper[0]
if on_beat_grace_container is not None:
durations = [_._get_duration() for _ in on_beat_grace_container]
start_displacement = sum(durations)
start_offset = _duration.Offset(
start_offset, displacement=start_displacement
)
stop_offset = start_offset + component._get_duration()
component._start_offset = start_offset
component._stop_offset = stop_offset
component._timespan.start_offset = start_offset
component._timespan.stop_offset = stop_offset
def _update_measure_numbers(component):
measure_start_offsets = _get_measure_start_offsets(component)
root = _parentage.Parentage(component).root
for component in _iterate_entire_score(root):
measure_number = _to_measure_number(component, measure_start_offsets)
component._measure_number = measure_number
def _update_now(component, offsets=False, offsets_in_seconds=False, indicators=False):
assert offsets or offsets_in_seconds or indicators
parentage = component._get_parentage()
for component_ in parentage:
if component_._is_forbidden_to_update:
return
(
offsets_are_current,
indicators_are_current,
offsets_in_seconds_are_current,
) = _get_score_tree_state_flags(parentage)
root = parentage[-1]
if offsets and not offsets_are_current:
_update_all_offsets(root)
if offsets_in_seconds and not offsets_in_seconds_are_current:
_update_all_offsets_in_seconds(root)
if indicators and not indicators_are_current:
_update_all_indicators(root)
| Abjad/abjad | abjad/_update.py | Python | gpl-3.0 | 15,897 | 0.000944 |
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 09 13:04:53 2015
* If TimerTool.exe is running, kill the process.
* If input parameter is given, start TimerTool and set clock resolution
Starts TimerTool.exe and sets the clock resolution to argv[0] ms
Ex: python set_clock_resolution 0.5
@author: marcus
"""
import time, datetime
from socket import gethostname, gethostbyname
import os
import numpy as np
def main():
my_path = os.path.join('C:',os.sep,'Share','sync_clocks')
os.chdir(my_path)
# Initial timestamps
t1 = time.clock()
t2 = time.time()
t3 = datetime.datetime.now()
td1 = []
td2 = []
td3 = []
for i in xrange(100):
td1.append(time.clock()-t1)
td2.append(time.time() -t2)
td3.append((datetime.datetime.now()-t3).total_seconds())
time.sleep(0.001)
# Create text file and write header
t = datetime.datetime.now()
ip = gethostbyname(gethostname()).split('.')[-1]
f_name = '_'.join([ip,'test_clock_res',str(t.year),str(t.month),str(t.day),
str(t.hour),str(t.minute),str(t.second)])
f = open(f_name+'.txt','w')
f.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' %
('mean_clock','median_clock','sd_clock',
'mean_time','median_time','sd_time',
'mean_datetime','median_datetime','sd_datetime',))
# Write results to text file
f.write('%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\n' %
(np.mean(np.diff(td1))*1000, np.median(np.diff(td1))*1000,np.std(np.diff(td1))*1000,
np.mean(np.diff(td2))*1000, np.median(np.diff(td2))*1000,np.std(np.diff(td2))*1000,
np.mean(np.diff(td3))*1000, np.median(np.diff(td3))*1000,np.std(np.diff(td3))*1000))
f.close()
if __name__ == "__main__":
main() | marcus-nystrom/share-gaze | sync_clocks/test_clock_resolution.py | Python | mit | 1,930 | 0.020207 |
import _plotly_utils.basevalidators
class SurfacecolorValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="surfacecolor", parent_name="surface", **kwargs):
super(SurfacecolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "data"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/surface/_surfacecolor.py | Python | mit | 459 | 0.002179 |
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='adafruit-circuitpython-register',
use_scm_version=True,
setup_requires=['setuptools_scm'],
description='CircuitPython data descriptor classes to represent hardware registers on I2C and SPI devices.',
long_description=long_description,
# The project's main homepage.
url='https://github.com/adafruit/Adafruit_CircuitPython_Register',
# Author details
author='Adafruit Industries',
author_email='support@adafruit.com',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Hardware',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='adafruit register micropython circuitpython',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=['adafruit_register'],
)
| rosalindfdt/huzzahbadge | huzzah/register/setup.py | Python | artistic-2.0 | 1,740 | 0.000575 |
#! /usr/bin/env python
"""Convert a LaTeX .toc file to some PDFTeX magic to create that neat outline.
The output file has an extension of '.bkm' instead of '.out', since hyperref
already uses that extension.
"""
import getopt
import os
import re
import string
import sys
# Ench item in an entry is a tuple of:
#
# Section #, Title String, Page #, List of Sub-entries
#
# The return value of parse_toc() is such a tuple.
cline_re = r"""^
\\contentsline\ \{([a-z]*)} # type of section in $1
\{(?:\\numberline\ \{([0-9.A-Z]+)})? # section number
(.*)} # title string
\{(\d+)}$""" # page number
cline_rx = re.compile(cline_re, re.VERBOSE)
OUTER_TO_INNER = -1
_transition_map = {
('chapter', 'section'): OUTER_TO_INNER,
('section', 'subsection'): OUTER_TO_INNER,
('subsection', 'subsubsection'): OUTER_TO_INNER,
('subsubsection', 'subsection'): 1,
('subsection', 'section'): 1,
('section', 'chapter'): 1,
('subsection', 'chapter'): 2,
('subsubsection', 'section'): 2,
('subsubsection', 'chapter'): 3,
}
INCLUDED_LEVELS = ("chapter", "section", "subsection", "subsubsection")
class BadSectionNesting(Exception):
"""Raised for unsupported section level transitions."""
def __init__(self, level, newsection, path, lineno):
self.level = level
self.newsection = newsection
self.path = path
self.lineno = lineno
def __str__(self):
return ("illegal transition from %s to %s at %s (line %s)"
% (self.level, self.newsection, self.path, self.lineno))
def parse_toc(fp, bigpart=None):
toc = top = []
stack = [toc]
level = bigpart or 'chapter'
lineno = 0
while 1:
line = fp.readline()
if not line:
break
lineno = lineno + 1
m = cline_rx.match(line)
if m:
stype, snum, title, pageno = m.group(1, 2, 3, 4)
title = clean_title(title)
entry = (stype, snum, title, int(pageno), [])
if stype == level:
toc.append(entry)
else:
if stype not in INCLUDED_LEVELS:
# we don't want paragraphs & subparagraphs
continue
try:
direction = _transition_map[(level, stype)]
except KeyError:
raise BadSectionNesting(level, stype, fp.name, lineno)
if direction == OUTER_TO_INNER:
toc = toc[-1][-1]
stack.insert(0, toc)
toc.append(entry)
else:
for i in range(direction):
del stack[0]
toc = stack[0]
toc.append(entry)
level = stype
else:
sys.stderr.write("l.%s: " + line)
return top
hackscore_rx = re.compile(r"\\hackscore\s*{[^}]*}")
raisebox_rx = re.compile(r"\\raisebox\s*{[^}]*}")
title_rx = re.compile(r"\\([a-zA-Z])+\s+")
title_trans = string.maketrans("", "")
def clean_title(title):
title = raisebox_rx.sub("", title)
title = hackscore_rx.sub(r"\\_", title)
pos = 0
while 1:
m = title_rx.search(title, pos)
if m:
start = m.start()
if title[start:start+15] != "\\textunderscore":
title = title[:start] + title[m.end():]
pos = start + 1
else:
break
title = title.translate(title_trans, "{}")
return title
def write_toc(toc, fp):
for entry in toc:
write_toc_entry(entry, fp, 0)
def write_toc_entry(entry, fp, layer):
stype, snum, title, pageno, toc = entry
s = "\\pdfoutline goto name{page%03d}" % pageno
if toc:
s = "%s count -%d" % (s, len(toc))
if snum:
title = "%s %s" % (snum, title)
s = "%s {%s}\n" % (s, title)
fp.write(s)
for entry in toc:
write_toc_entry(entry, fp, layer + 1)
def process(ifn, ofn, bigpart=None):
toc = parse_toc(open(ifn), bigpart)
write_toc(toc, open(ofn, "w"))
def main():
bigpart = None
opts, args = getopt.getopt(sys.argv[1:], "c:")
if opts:
bigpart = opts[0][1]
if not args:
usage()
sys.exit(2)
for filename in args:
base, ext = os.path.splitext(filename)
ext = ext or ".toc"
process(base + ext, base + ".bkm", bigpart)
if __name__ == "__main__":
main()
| TathagataChakraborti/resource-conflicts | PLANROB-2015/seq-sat-lama/Python-2.5.2/Doc/tools/toc2bkm.py | Python | mit | 4,520 | 0.000442 |
# proxy module
from __future__ import absolute_import
from mayavi.core.api import *
| enthought/etsproxy | enthought/mayavi/core/api.py | Python | bsd-3-clause | 84 | 0 |
#!/usr/bin/env python
"""
@package mi.core.test.test_persistent_store
@file <git-workspace>/ooi/edex/com.raytheon.uf.ooi.plugin.instrumentagent/utility/edex_static/base/ooi/instruments/mi-instrument/mi/core/test/test_persistent_store.py
@author Johnathon Rusk
@brief Unit tests for PersistentStoreDict module
"""
# Note: Execute via, "nosetests -a UNIT -v mi/core/test/test_persistent_store.py"
__author__ = 'Johnathon Rusk'
__license__ = 'Apache 2.0'
from nose.plugins.attrib import attr
from mi.core.unit_test import MiUnitTest
import sys
from mi.core.persistent_store import PersistentStoreDict
@attr('UNIT', group='mi')
class TestPersistentStoreDict(MiUnitTest):
def setUp(self):
self.UNICODE_KEY = "UNICODE_KEY" # Test 'str' type key
self.UNICODE_VALUES = [u"this is a unicode string", u"this is another unicode string"]
self.INT_KEY = u"INT_KEY"
self.INT_VALUES = [1234, 5678]
self.LONG_KEY = "LONG_KEY" # Test 'str' type key
self.LONG_VALUES = [sys.maxint + 1, sys.maxint + 2]
self.FLOAT_KEY = u"FLOAT_KEY"
self.FLOAT_VALUES = [56.78, 12.34]
self.BOOL_KEY = "BOOL_KEY" # Test 'str' type key
self.BOOL_VALUES = [True, False]
self.DICT_KEY = u"DICT_KEY"
self.DICT_VALUES = [{u"KEY_1":1, u"KEY_2":2, u"KEY_3":3}, {u"KEY_4":4, u"KEY_5":5, u"KEY_6":6}]
self.LIST_KEY = "LIST_KEY" # Test 'str' type key
self.LIST_VALUES = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 0]]
self.persistentStoreDict = PersistentStoreDict("unit_test", "GI01SUMO-00001")
def tearDown(self):
self.persistentStoreDict.clear() # NOTE: This technically assumes the delete functionality works.
def helper_get(self, key, expectedValue, expectedValueType):
self.assertIn(type(key), [str, unicode])
value = self.persistentStoreDict[key]
self.assertIs(type(value), expectedValueType)
self.assertEqual(value, expectedValue)
def helper_set(self, key, value, valueType, shouldAddKey):
self.assertIn(type(key), [str, unicode])
self.assertIs(type(value), valueType)
self.assertIs(type(shouldAddKey), bool)
initialKeyCount = len(self.persistentStoreDict.keys())
self.persistentStoreDict[key] = value
self.assertEqual(len(self.persistentStoreDict.keys()), (initialKeyCount + 1) if shouldAddKey else initialKeyCount)
def helper_del(self, key):
self.assertIn(type(key), [str, unicode])
initialKeyCount = len(self.persistentStoreDict.keys())
del self.persistentStoreDict[key]
self.assertEqual(len(self.persistentStoreDict.keys()), initialKeyCount - 1)
def test_createRecords_success_unicode(self):
self.helper_set(self.UNICODE_KEY, self.UNICODE_VALUES[0], unicode, True)
def test_createRecords_success_int(self):
self.helper_set(self.INT_KEY, self.INT_VALUES[0], int, True)
def test_createRecords_success_long(self):
self.helper_set(self.LONG_KEY, self.LONG_VALUES[0], long, True)
def test_createRecords_success_float(self):
self.helper_set(self.FLOAT_KEY, self.FLOAT_VALUES[0], float, True)
def test_createRecords_success_bool(self):
self.helper_set(self.BOOL_KEY, self.BOOL_VALUES[0], bool, True)
def test_createRecords_success_dict(self):
self.helper_set(self.DICT_KEY, self.DICT_VALUES[0], dict, True)
def test_createRecords_success_list(self):
self.helper_set(self.LIST_KEY, self.LIST_VALUES[0], list, True)
def test_createRecords_fail_badKeyType(self):
key = 0
value = u"this will fail"
self.assertNotIn(type(key), [str, unicode])
self.assertIn(type(value), [unicode, int, long, float, bool, dict, list])
with self.assertRaises(TypeError) as contextManager:
self.persistentStoreDict[key] = value
self.assertEqual(contextManager.exception.args[0], "Key must be of type 'str' or 'unicode'.")
def test_createRecords_fail_badItemType(self):
key = u"this will fail"
value = 2+3j
self.assertIn(type(key), [str, unicode])
self.assertNotIn(type(value), [unicode, int, long, float, bool, dict, list])
with self.assertRaises(TypeError) as contextManager:
self.persistentStoreDict[key] = value
self.assertEqual(contextManager.exception.args[0], "Value must be of type: 'unicode', 'int', 'long', 'float', 'bool', 'dict', or 'list'")
def test_createRecords_fail_badItemType_nested(self):
key = u"this will fail"
value = {u"KEY_1":[1, 2, 3], u"KEY_2":[1+2j, 3+4j, 5+6j]}
self.assertIn(type(key), [str, unicode])
self.assertIn(type(value), [unicode, int, long, float, bool, dict, list])
self.assertNotIn(type(value[u'KEY_2'][0]), [unicode, int, long, float, bool, dict, list])
with self.assertRaises(TypeError) as contextManager:
self.persistentStoreDict[key] = value
self.assertEqual(contextManager.exception.args[0], "Value must be of type: 'unicode', 'int', 'long', 'float', 'bool', 'dict', or 'list'")
def test_getRecords_success_unicode(self):
self.helper_set(self.UNICODE_KEY, self.UNICODE_VALUES[0], unicode, True)
self.helper_get(self.UNICODE_KEY, self.UNICODE_VALUES[0], unicode)
def test_getRecords_success_int(self):
self.helper_set(self.INT_KEY, self.INT_VALUES[0], int, True)
self.helper_get(self.INT_KEY, self.INT_VALUES[0], int)
def test_getRecords_success_long(self):
self.helper_set(self.LONG_KEY, self.LONG_VALUES[0], long, True)
self.helper_get(self.LONG_KEY, self.LONG_VALUES[0], long)
def test_getRecords_success_float(self):
self.helper_set(self.FLOAT_KEY, self.FLOAT_VALUES[0], float, True)
self.helper_get(self.FLOAT_KEY, self.FLOAT_VALUES[0], float)
def test_getRecords_success_bool(self):
self.helper_set(self.BOOL_KEY, self.BOOL_VALUES[0], bool, True)
self.helper_get(self.BOOL_KEY, self.BOOL_VALUES[0], bool)
def test_getRecords_success_dict(self):
self.helper_set(self.DICT_KEY, self.DICT_VALUES[0], dict, True)
self.helper_get(self.DICT_KEY, self.DICT_VALUES[0], dict)
def test_getRecords_success_list(self):
self.helper_set(self.LIST_KEY, self.LIST_VALUES[0], list, True)
self.helper_get(self.LIST_KEY, self.LIST_VALUES[0], list)
def test_getRecords_fail_badKeyType(self):
key = 0
self.assertNotIn(type(key), [str, unicode])
with self.assertRaises(TypeError) as contextManager:
value = self.persistentStoreDict[key]
self.assertEqual(contextManager.exception.args[0], "Key must be of type 'str' or 'unicode'.")
def test_getRecords_fail_keyNotFound(self):
key = u"this will fail"
self.assertIn(type(key), [str, unicode])
with self.assertRaises(KeyError) as contextManager:
value = self.persistentStoreDict[key]
self.assertEqual(contextManager.exception.args[0], "No item found with key: '{0}'".format(key))
def test_updateRecords_success_unicode(self):
self.helper_set(self.UNICODE_KEY, self.UNICODE_VALUES[0], unicode, True)
self.helper_get(self.UNICODE_KEY, self.UNICODE_VALUES[0], unicode)
self.helper_set(self.UNICODE_KEY, self.UNICODE_VALUES[1], unicode, False)
self.helper_get(self.UNICODE_KEY, self.UNICODE_VALUES[1], unicode)
def test_updateRecords_success_int(self):
self.helper_set(self.INT_KEY, self.INT_VALUES[0], int, True)
self.helper_get(self.INT_KEY, self.INT_VALUES[0], int)
self.helper_set(self.INT_KEY, self.INT_VALUES[1], int, False)
self.helper_get(self.INT_KEY, self.INT_VALUES[1], int)
def test_updateRecords_success_long(self):
self.helper_set(self.LONG_KEY, self.LONG_VALUES[0], long, True)
self.helper_get(self.LONG_KEY, self.LONG_VALUES[0], long)
self.helper_set(self.LONG_KEY, self.LONG_VALUES[1], long, False)
self.helper_get(self.LONG_KEY, self.LONG_VALUES[1], long)
def test_updateRecords_success_float(self):
self.helper_set(self.FLOAT_KEY, self.FLOAT_VALUES[0], float, True)
self.helper_get(self.FLOAT_KEY, self.FLOAT_VALUES[0], float)
self.helper_set(self.FLOAT_KEY, self.FLOAT_VALUES[1], float, False)
self.helper_get(self.FLOAT_KEY, self.FLOAT_VALUES[1], float)
def test_updateRecords_success_bool(self):
self.helper_set(self.BOOL_KEY, self.BOOL_VALUES[0], bool, True)
self.helper_get(self.BOOL_KEY, self.BOOL_VALUES[0], bool)
self.helper_set(self.BOOL_KEY, self.BOOL_VALUES[1], bool, False)
self.helper_get(self.BOOL_KEY, self.BOOL_VALUES[1], bool)
def test_updateRecords_success_dict(self):
self.helper_set(self.DICT_KEY, self.DICT_VALUES[0], dict, True)
self.helper_get(self.DICT_KEY, self.DICT_VALUES[0], dict)
self.helper_set(self.DICT_KEY, self.DICT_VALUES[1], dict, False)
self.helper_get(self.DICT_KEY, self.DICT_VALUES[1], dict)
def test_updateRecords_success_list(self):
self.helper_set(self.LIST_KEY, self.LIST_VALUES[0], list, True)
self.helper_get(self.LIST_KEY, self.LIST_VALUES[0], list)
self.helper_set(self.LIST_KEY, self.LIST_VALUES[1], list, False)
self.helper_get(self.LIST_KEY, self.LIST_VALUES[1], list)
def test_removeRecords_success_unicode(self):
self.helper_set(self.UNICODE_KEY, self.UNICODE_VALUES[0], unicode, True)
self.helper_del(self.UNICODE_KEY)
def test_removeRecords_success_int(self):
self.helper_set(self.INT_KEY, self.INT_VALUES[0], int, True)
self.helper_del(self.INT_KEY)
def test_removeRecords_success_long(self):
self.helper_set(self.LONG_KEY, self.LONG_VALUES[0], long, True)
self.helper_del(self.LONG_KEY)
def test_removeRecords_success_float(self):
self.helper_set(self.FLOAT_KEY, self.FLOAT_VALUES[0], float, True)
self.helper_del(self.FLOAT_KEY)
def test_removeRecords_success_bool(self):
self.helper_set(self.BOOL_KEY, self.BOOL_VALUES[0], bool, True)
self.helper_del(self.BOOL_KEY)
def test_removeRecords_success_dict(self):
self.helper_set(self.DICT_KEY, self.DICT_VALUES[0], dict, True)
self.helper_del(self.DICT_KEY)
def test_removeRecords_success_list(self):
self.helper_set(self.LIST_KEY, self.LIST_VALUES[0], list, True)
self.helper_del(self.LIST_KEY)
def test_removeRecords_fail_badKeyType(self):
key = 0
self.assertNotIn(type(key), [str, unicode])
with self.assertRaises(TypeError) as contextManager:
del self.persistentStoreDict[key]
self.assertEqual(contextManager.exception.args[0], "Key must be of type 'str' or 'unicode'.")
def test_removeRecords_fail_keyNotFound(self):
key = u"this will fail"
self.assertIn(type(key), [str, unicode])
with self.assertRaises(KeyError) as contextManager:
del self.persistentStoreDict[key]
self.assertEqual(contextManager.exception.args[0], "No item found with key: '{0}'".format(key))
| janeen666/mi-instrument | mi/core/test/test_persistent_store.py | Python | bsd-2-clause | 11,196 | 0.003483 |
from insights.parsers.hostname import Hostname
from insights.tests import context_wrap
HOSTNAME = "rhel7.example.com"
HOSTNAME_SHORT = "rhel7"
def test_hostname():
data = Hostname(context_wrap(HOSTNAME))
assert data.fqdn == "rhel7.example.com"
assert data.hostname == "rhel7"
assert data.domain == "example.com"
assert "{0}".format(data) == "<hostname: rhel7, domain: example.com>"
data = Hostname(context_wrap(HOSTNAME_SHORT))
assert data.fqdn == "rhel7"
assert data.hostname == "rhel7"
assert data.domain == ""
data = Hostname(context_wrap(""))
assert data.fqdn is None
assert data.hostname is None
assert data.domain is None
| wcmitchell/insights-core | insights/parsers/tests/test_hostname.py | Python | apache-2.0 | 685 | 0 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# Copyright (C) 2014-2016 Anler Hernández <hello@anler.me>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from taiga.base.utils import json
from tests import factories as f
from tests.utils import helper_test_http_method, disconnect_signals, reconnect_signals
from unittest import mock
import pytest
pytestmark = pytest.mark.django_db
def setup_module(module):
disconnect_signals()
def teardown_module(module):
reconnect_signals()
@pytest.fixture
def data():
m = type("Models", (object,), {})
m.registered_user = f.UserFactory.create()
m.token = f.ApplicationTokenFactory(state="random-state")
m.registered_user_with_token = m.token.user
return m
def test_application_tokens_create(client, data):
url = reverse('application-tokens-list')
users = [
None,
data.registered_user,
data.registered_user_with_token
]
data = json.dumps({"application": data.token.application.id})
results = helper_test_http_method(client, "post", url, data, users)
assert results == [405, 405, 405]
def test_applications_retrieve_token(client, data):
url=reverse('applications-token', kwargs={"pk": data.token.application.id})
users = [
None,
data.registered_user,
data.registered_user_with_token
]
results = helper_test_http_method(client, "get", url, None, users)
assert results == [401, 200, 200]
def test_application_tokens_retrieve(client, data):
url = reverse('application-tokens-detail', kwargs={"pk": data.token.id})
users = [
None,
data.registered_user,
data.registered_user_with_token
]
results = helper_test_http_method(client, "get", url, None, users)
assert results == [401, 404, 200]
def test_application_tokens_authorize(client, data):
url=reverse('application-tokens-authorize')
users = [
None,
data.registered_user,
data.registered_user_with_token
]
data = json.dumps({
"application": data.token.application.id,
"state": "random-state-123123",
})
results = helper_test_http_method(client, "post", url, data, users)
assert results == [401, 200, 200]
def test_application_tokens_validate(client, data):
url=reverse('application-tokens-validate')
users = [
None,
data.registered_user,
data.registered_user_with_token
]
data = json.dumps({
"application": data.token.application.id,
"auth_code": data.token.auth_code,
"state": data.token.state
})
results = helper_test_http_method(client, "post", url, data, users)
assert results == [200, 200, 200]
def test_application_tokens_update(client, data):
url = reverse('application-tokens-detail', kwargs={"pk": data.token.id})
users = [
None,
data.registered_user,
data.registered_user_with_token
]
patch_data = json.dumps({"application": data.token.application.id})
results = helper_test_http_method(client, "patch", url, patch_data, users)
assert results == [405, 405, 405]
def test_application_tokens_delete(client, data):
url = reverse('application-tokens-detail', kwargs={"pk": data.token.id})
users = [
None,
data.registered_user,
data.registered_user_with_token
]
results = helper_test_http_method(client, "delete", url, None, users)
assert results == [401, 403, 204]
def test_application_tokens_list(client, data):
url = reverse('application-tokens-list')
users = [
None,
data.registered_user,
data.registered_user_with_token
]
results = helper_test_http_method(client, "get", url, None, users)
assert results == [401, 200, 200]
| xdevelsistemas/taiga-back-community | tests/integration/resources_permissions/test_application_tokens_resources.py | Python | agpl-3.0 | 4,691 | 0.000853 |
# Copyright 2012 Lee Verberne <lee@blarg.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ConfigParser
import logging
import os, os.path
from fabric.api import local, prompt
NAME = 'tgz'
log = logging.getLogger(NAME)
def _get_config(configfile='package.ini'):
config = ConfigParser.SafeConfigParser()
config.read(configfile)
return config
def untar(version, config, env):
'Downloads a file URI and untars to builddir'
if not version:
version = ''
if not config:
config = _get_config()
destdir = env.builddir
try:
if config.get(NAME, 'destination') == 'root':
destdir = env.rootdir
destdir = os.path.join(destdir, config.get(NAME, 'prefix').lstrip('/'))
except ConfigParser.NoOptionError:
pass
sourceurl = config.get(NAME, 'source', False,
{'version': version.split('-',1)[0],})
log.debug('Using source URL of %s' % sourceurl)
# For now just use system tools for this
if not os.path.exists('src.tgz'):
local("curl -f -o src.tgz " + sourceurl, capture=False)
if not os.path.exists(destdir):
os.makedirs(destdir)
local("tar -C %s -xvf src.tgz" % destdir, capture=False)
| kafana/ubik | lib/ubik/fab/tgz.py | Python | gpl-3.0 | 1,827 | 0.004926 |
from citrination_client.search.pif.query.chemical.chemical_field_operation import ChemicalFieldOperation
from citrination_client.search.pif.query.core.base_object_query import BaseObjectQuery
from citrination_client.search.pif.query.core.field_operation import FieldOperation
class CompositionQuery(BaseObjectQuery):
"""
Class to query against a PIF Composition object.
"""
def __init__(self, element=None, actual_weight_percent=None, actual_atomic_percent=None,
ideal_weight_percent=None, ideal_atomic_percent=None, logic=None, tags=None,
length=None, offset=None):
"""
Constructor.
:param element: One or more :class:`ChemicalFieldOperation` operations against the element field.
:param actual_weight_percent: One or more :class:`FieldOperation` operations against the actual
weight percent field.
:param actual_atomic_percent: One or more :class:`FieldOperation` operations against the actual
atomic percent field.
:param ideal_weight_percent: One or more :class:`FieldOperation` operations against the ideal
weight percent field.
:param ideal_atomic_percent: One or more :class:`FieldOperation` operations against the ideal
atomic percent field.
:param logic: Logic for this filter. Must be equal to one of "MUST", "MUST_NOT", "SHOULD", or "OPTIONAL".
:param tags: One or more :class:`FieldOperation` operations against the tags field.
:param length: One or more :class:`FieldOperation` operations against the length field.
:param offset: One or more :class:`FieldOperation` operations against the offset field.
"""
super(CompositionQuery, self).__init__(logic=logic, tags=tags, length=length, offset=offset)
self._element = None
self.element = element
self._actual_weight_percent = None
self.actual_weight_percent = actual_weight_percent
self._actual_atomic_percent = None
self.actual_atomic_percent = actual_atomic_percent
self._ideal_weight_percent = None
self.ideal_weight_percent = ideal_weight_percent
self._ideal_atomic_percent = None
self.ideal_atomic_percent = ideal_atomic_percent
@property
def element(self):
return self._element
@element.setter
def element(self, element):
self._element = self._get_object(ChemicalFieldOperation, element)
@element.deleter
def element(self):
self._element = None
@property
def actual_weight_percent(self):
return self._actual_weight_percent
@actual_weight_percent.setter
def actual_weight_percent(self, actual_weight_percent):
self._actual_weight_percent = self._get_object(FieldOperation, actual_weight_percent)
@actual_weight_percent.deleter
def actual_weight_percent(self):
self._actual_weight_percent = None
@property
def actual_atomic_percent(self):
return self._actual_atomic_percent
@actual_atomic_percent.setter
def actual_atomic_percent(self, actual_atomic_percent):
self._actual_atomic_percent = self._get_object(FieldOperation, actual_atomic_percent)
@actual_atomic_percent.deleter
def actual_atomic_percent(self):
self._actual_atomic_percent = None
@property
def ideal_weight_percent(self):
return self._ideal_weight_percent
@ideal_weight_percent.setter
def ideal_weight_percent(self, ideal_weight_percent):
self._ideal_weight_percent = self._get_object(FieldOperation, ideal_weight_percent)
@ideal_weight_percent.deleter
def ideal_weight_percent(self):
self._ideal_weight_percent = None
@property
def ideal_atomic_percent(self):
return self._ideal_atomic_percent
@ideal_atomic_percent.setter
def ideal_atomic_percent(self, ideal_atomic_percent):
self._ideal_atomic_percent = self._get_object(FieldOperation, ideal_atomic_percent)
@ideal_atomic_percent.deleter
def ideal_atomic_percent(self):
self._ideal_atomic_percent = None
| calfonso/python-citrination-client | citrination_client/search/pif/query/chemical/composition_query.py | Python | apache-2.0 | 4,113 | 0.004619 |
# -*- coding: utf-8 -*-
"""
Django settings for ad-manage project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
from oscar.defaults import *
from oscar import OSCAR_MAIN_TEMPLATE_DIR, get_core_apps
import os
import sys
import environ
from path import Path as path
ROOT_DIR = environ.Path(__file__) - 3 # (ad_manage/config/settings/common.py - 3 = ad_manage/)
PROJECT_ROOT = path(__file__).abspath().dirname().dirname().dirname() # Root del proyecto en str
APPS_DIR = ROOT_DIR.path('ad_manage')
APPS_DIR_STR = PROJECT_ROOT / 'ad_manage' / 'djangoapps' #Directorio de aplicaciones django
sys.path.append(APPS_DIR_STR) #Definicion de carpeta para encontrar las apps
env = environ.Env()
environ.Env.read_env()
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.flatpages',
'rest_framework',
'paypal',
'django_extensions',
'django_messages',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
'jsonify',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'compressor',
'widget_tweaks',
'social.apps.django_app.default',
'django_countries', # paises para db
'geopy',
'corsheaders',
)
OSCAR_APPS = tuple(get_core_apps(['catalogue', 'search', 'partner','customer']))
# Apps specific for this project go here.
LOCAL_APPS = (
# Your stuff: custom apps go here
'searchapi',
'course',
'document_manager',
'third_party_auth',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS + OSCAR_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'oscar.apps.basket.middleware.BasketMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'social.apps.django_app.middleware.SocialAuthExceptionMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""dilosung""", 'santosa@dilosung.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
'default': env.db('DATABASE_URL', default='postgres:///admanage'),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
LANGUAGES = (
('en', ('English')),
('es', ('Spanish')),
)
TIME_ZONE = 'UTC'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'es-mx'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
OSCAR_MAIN_TEMPLATE_DIR
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
'oscar.apps.search.context_processors.search_form',
'oscar.apps.promotions.context_processors.promotions',
'oscar.apps.checkout.context_processors.checkout',
'oscar.apps.customer.notifications.context_processors.notifications',
'oscar.core.context_processors.metadata',
# django-social-auth
'social.apps.django_app.context_processors.backends',
'social.apps.django_app.context_processors.login_redirect',
],
},
},
]
# See: http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'oscar.apps.customer.auth_backends.EmailBackend',
'social.backends.google.GoogleOAuth2',
'social.backends.twitter.TwitterOAuth',
'social.backends.facebook.FacebookOAuth2',
'social.backends.linkedin.LinkedinOAuth',
'django.contrib.auth.backends.ModelBackend',
)
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# Your common stuff: Below this line define 3rd party library settings
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
'URL': env("ELASTICSEARCH_SERVER_URL", default='http://127.0.0.1:9200/'),
'INDEX_NAME': 'ad_manage',
},
}
HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor'
# REST AUTH CONFIGURATION
# ------------------------------------------------------------------------------
REST_AUTH_SERIALIZERS = {
'USER_DETAILS_SERIALIZER': 'users.serializers.UserDetailsSerializer'
}
SOCIAL_AUTH_FACEBOOK_KEY = '473625016174858'
SOCIAL_AUTH_FACEBOOK_SECRET = 'cd56815ec4dbbd3f29b7c20938e2510a'
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '724697221459-8792b05mvdtju5makgtvh9a8ctplom9d.apps.googleusercontent.com'
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'dPf6pnj0UIkgLwSJ24o5nErA'
SOCIAL_AUTH_LINKEDIN_KEY = '75lbs3827ex6w1'
SOCIAL_AUTH_LINKEDIN_SECRET = '9nCbCqnpGxszrNfw'
SOCIAL_AUTH_TWITTER_KEY = 'VClYhRXoMY7YhWHRyhKgfYh8D'
SOCIAL_AUTH_TWITTER_SECRET = 'kpFMyKQ3ufWHzJZ3Slp4Ru4OjCdPGACJI1Jz6w7pjkxv3ow0EL'
# PAY PAL CONFIGURATION
# ------------------------------------------------------------------------------
PAYPAL_API_USERNAME = 'omar.miranda_api1.me.com'
PAYPAL_API_PASSWORD = '8QEN2EC2ZC48F373'
PAYPAL_API_SIGNATURE = 'AFcWxV21C7fd0v3bYYYRCpSSRl31A-8GLBUs91.2XXT3PXmeNJQ0E1gr'
PAYPAL_SOLUTION_TYPE = 'Mark'
PAYPAL_SANDBOX_MODE = 'True'
# OSCAR CONFIGURATION
# ------------------------------------------------------------------------------
OSCAR_MISSING_IMAGE_URL = MEDIA_URL + 'image_not_found.jpg'
OSCAR_DEFAULT_CURRENCY = 'MXN'
OSCAR_SHOP_NAME = 'eCharli'
OSCAR_SHOP_TAGLINE = 'Teaching the world'
############################# MENTOR RECEIPT UPLOAD #############################
# MENTOR RECEIPT CONFIG
# WARNING: Certain django storage backends do not support atomic
# file overwrites (including the default, OverwriteStorage) - instead
# there are separate calls to delete and then write a new file in the
# storage backend. This introduces the risk of a race condition
# occurring when a user uploads a new profile image to replace an
# earlier one (the file will temporarily be deleted).
MENTOR_RECEIPT_BACKEND = {
'class': 'storages.backends.overwrite.OverwriteStorage',
'options': {
'location': os.path.join(MEDIA_ROOT, 'mentor-receipt/'),
'base_url': os.path.join(MEDIA_URL, 'mentor-receipt/'),
},
}
MENTOR_RECEIPT_DEFAULT_FILENAME = 'documents/mentor/default'
MENTOR_RECEIPT_DEFAULT_FILE_EXTENSION = 'pdf'
OSCAR_SEND_REGISTRATION_EMAIL = True
# This secret key is used in generating unguessable URLs to users'
# mentor documents. Once it has been set, changing it will make the
# platform unaware of current document URLs, resulting in reverting all
# users' mentor documents to the default placeholder document.
MENTOR_RECEIPT_SECRET_KEY = 'placeholder secret key'
MENTOR_RECEIPT_MAX_BYTES = 1024 * 1024
MENTOR_RECEIPT_MIN_BYTES = 100
SOCIAL_AUTH_PIPELINE = (
'social.pipeline.social_auth.social_details',
'social.pipeline.social_auth.social_uid',
'social.pipeline.social_auth.auth_allowed',
'social.pipeline.social_auth.social_user',
'social.pipeline.user.get_username',
'social.pipeline.user.create_user',
'third_party_auth.pipeline.save_profile', # add data to CustomerProfile
'social.pipeline.social_auth.associate_user',
'social.pipeline.social_auth.load_extra_data',
'social.pipeline.user.user_details',
)
LOGIN_REDIRECT_URL = '/'
#https://github.com/ottoyiu/django-cors-headers
CORS_ORIGIN_ALLOW_ALL = True
CORS_ORIGIN_REGEX_WHITELIST = ('^(https?://)?(\w+\.)?dilosung\.com$', )
CORS_ALLOW_HEADERS = (
'Access-Control-Allow-Origin',
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken'
)
| dilosung/ad-manage | config/settings/common.py | Python | mit | 13,364 | 0.002469 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 Ben Kurtovic <ben.kurtovic@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import unicode_literals
from . import Node
from ..compat import htmlentities, py3k, str
__all__ = ["HTMLEntity"]
class HTMLEntity(Node):
"""Represents an HTML entity, like `` ``, either named or unnamed."""
def __init__(self, value, named=None, hexadecimal=False, hex_char="x"):
super(HTMLEntity, self).__init__()
self._value = value
if named is None: # Try to guess whether or not the entity is named
try:
int(value)
self._named = False
self._hexadecimal = False
except ValueError:
try:
int(value, 16)
self._named = False
self._hexadecimal = True
except ValueError:
self._named = True
self._hexadecimal = False
else:
self._named = named
self._hexadecimal = hexadecimal
self._hex_char = hex_char
def __unicode__(self):
if self.named:
return "&{0};".format(self.value)
if self.hexadecimal:
return "&#{0}{1};".format(self.hex_char, self.value)
return "&#{0};".format(self.value)
def __strip__(self, **kwargs):
if kwargs.get("normalize"):
return self.normalize()
return self
if not py3k:
@staticmethod
def _unichr(value):
"""Implement builtin unichr() with support for non-BMP code points.
On wide Python builds, this functions like the normal unichr(). On
narrow builds, this returns the value's encoded surrogate pair.
"""
try:
return unichr(value)
except ValueError:
# Test whether we're on the wide or narrow Python build. Check
# the length of a non-BMP code point
# (U+1F64A, SPEAK-NO-EVIL MONKEY):
if len("\U0001F64A") == 1: # pragma: no cover
raise
# Ensure this is within the range we can encode:
if value > 0x10FFFF:
raise ValueError("unichr() arg not in range(0x110000)")
code = value - 0x10000
if value < 0: # Invalid code point
raise
lead = 0xD800 + (code >> 10)
trail = 0xDC00 + (code % (1 << 10))
return unichr(lead) + unichr(trail)
@property
def value(self):
"""The string value of the HTML entity."""
return self._value
@property
def named(self):
"""Whether the entity is a string name for a codepoint or an integer.
For example, ``Σ``, ``Σ``, and ``Σ`` refer to the same
character, but only the first is "named", while the others are integer
representations of the codepoint.
"""
return self._named
@property
def hexadecimal(self):
"""If unnamed, this is whether the value is hexadecimal or decimal."""
return self._hexadecimal
@property
def hex_char(self):
"""If the value is hexadecimal, this is the letter denoting that.
For example, the hex_char of ``"ሴ"`` is ``"x"``, whereas the
hex_char of ``"ሴ"`` is ``"X"``. Lowercase and uppercase ``x``
are the only values supported.
"""
return self._hex_char
@value.setter
def value(self, newval):
newval = str(newval)
try:
int(newval)
except ValueError:
try:
int(newval, 16)
except ValueError:
if newval not in htmlentities.entitydefs:
raise ValueError("entity value is not a valid name")
self._named = True
self._hexadecimal = False
else:
if int(newval, 16) < 0 or int(newval, 16) > 0x10FFFF:
raise ValueError("entity value is not in range(0x110000)")
self._named = False
self._hexadecimal = True
else:
test = int(newval, 16 if self.hexadecimal else 10)
if test < 0 or test > 0x10FFFF:
raise ValueError("entity value is not in range(0x110000)")
self._named = False
self._value = newval
@named.setter
def named(self, newval):
newval = bool(newval)
if newval and self.value not in htmlentities.entitydefs:
raise ValueError("entity value is not a valid name")
if not newval:
try:
int(self.value, 16)
except ValueError:
err = "current entity value is not a valid Unicode codepoint"
raise ValueError(err)
self._named = newval
@hexadecimal.setter
def hexadecimal(self, newval):
newval = bool(newval)
if newval and self.named:
raise ValueError("a named entity cannot be hexadecimal")
self._hexadecimal = newval
@hex_char.setter
def hex_char(self, newval):
newval = str(newval)
if newval not in ("x", "X"):
raise ValueError(newval)
self._hex_char = newval
def normalize(self):
"""Return the unicode character represented by the HTML entity."""
chrfunc = chr if py3k else HTMLEntity._unichr
if self.named:
return chrfunc(htmlentities.name2codepoint[self.value])
if self.hexadecimal:
return chrfunc(int(self.value, 16))
return chrfunc(int(self.value))
| gencer/mwparserfromhell | mwparserfromhell/nodes/html_entity.py | Python | mit | 6,771 | 0.000148 |
#!/usr/bin/python
# encoding: utf-8
# license: MIT
# Raspberry Pi-powered cat feeder
# Ryan Matthews
# mhaddy@gmail.com
#import schedule
import time
import datetime
import logging
import RPi.GPIO as GPIO
from twython import Twython
import configvars as cv
from random import randint
import pygame
import pygame.camera
from pygame.locals import *
import requests
# cronitor
requests.get(
'https://cronitor.link/{}/run'.format(cv.cronitor_hash),
timeout=10
)
logging.basicConfig(filename=cv.log_dir+cv.log_filename,format='%(asctime)s : %(levelname)s : %(message)s',level=logging.INFO)
# we're using randint() here to prevent Twitter deleting tweets it feels are duplicates
# Twython
APP_KEY = cv.APPKEY
APP_SECRET = cv.APPSECRET
OAUTH_TOKEN = cv.ACCESSTOKEN
OAUTH_TOKEN_SECRET = cv.ACCESSTOKENSECRET
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
logging.info('----------------------------')
logging.info('Initiated FeedGoo routine for {}'.format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S %Z%z")))
GPIO.setmode(GPIO.BCM)
GPIO.setup(cv.servo_pin, GPIO.OUT)
GPIO.setup(cv.buzz_pin, GPIO.OUT)
GPIO.setup(cv.butt_switch_pin, GPIO.IN)
GPIO.setup(cv.butt_led_pin, GPIO.OUT)
GPIO.output(cv.buzz_pin, False)
GPIO.output(cv.butt_led_pin, False)
# Functions that make the world, err, feeder wheels go 'round
# Rotate feeder wheel clockwise
def servo_cw():
servo = GPIO.PWM(cv.servo_pin, 50)
try:
servo.start(cv.rotate_time_cw)
time.sleep(cv.sleep_time_servo)
servo.stop()
except KeyboardInterrupt:
logging.info("CTRL+C pressed, servo operation stopped")
except:
logging.info("Servo operation interrupted")
finally:
GPIO.cleanup()
# Call the appropriate servo_XXX function
def feed_goo():
#not yet implemented
#GPIO.output(butt_led_pin, False)
for x in range(0,10):
GPIO.output(cv.buzz_pin, True)
time.sleep(cv.sleep_time_buzz)
GPIO.output(cv.buzz_pin, False)
#not yet implemented
#GPIO.output(butt_led_pin, True)
logging.debug("Servo rotate CW start")
servo_cw()
logging.debug("Servo rotate CW finish")
# TODO: Hook this into IFTTT
def manual_feed():
feed_goo()
logging.info("Goo has been manually fed!")
# take a picture 2 seconds after servo stops
pygame.init()
pygame.camera.init()
cam = pygame.camera.Camera('/dev/video0',(640,480))
cam.start()
image = cam.get_image()
pygame.image.save(image,"{}/image.jpg".format(cv.img_path))
photo = open("{}/image.jpg".format(cv.img_path),"rb")
response = twitter.upload_media(media=photo)
twitter.update_status(status="Goo has been fed! /{}".format(randint(0,10000)), media_ids=[response['media_id']])
# call the feeding routine
# scheduled via cron (schedule was too unreliable)
manual_feed()
# cronitor
requests.get(
'https://cronitor.link/{}/complete'.format(cv.cronitor_hash),
timeout=10
)
| mhaddy/FeedGoo | man_feedgoo.py | Python | gpl-3.0 | 2,896 | 0.020718 |
# @MUNTJAC_COPYRIGHT@
# @MUNTJAC_LICENSE@
from unittest import TestCase
from muntjac.ui.label import Label
from muntjac.ui.tab_sheet import TabSheet
class TestTabSheet(TestCase):
def testAddExistingComponent(self):
c = Label('abc')
tabSheet = TabSheet()
tabSheet.addComponent(c)
tabSheet.addComponent(c)
itr = tabSheet.getComponentIterator()
self.assertEquals(c, itr.next())
self.assertRaises(StopIteration, itr.next)
self.assertNotEquals(tabSheet.getTab(c), None)
def testGetComponentFromTab(self):
c = Label('abc')
tabSheet = TabSheet()
tab = tabSheet.addTab(c)
self.assertEquals(c, tab.getComponent())
def testAddTabWithComponentOnly(self):
tabSheet = TabSheet()
tab1 = tabSheet.addTab(Label('aaa'))
tab2 = tabSheet.addTab(Label('bbb'))
tab3 = tabSheet.addTab(Label('ccc'))
# Check right order of tabs
self.assertEquals(0, tabSheet.getTabPosition(tab1))
self.assertEquals(1, tabSheet.getTabPosition(tab2))
self.assertEquals(2, tabSheet.getTabPosition(tab3))
# Calling addTab with existing component does not move tab
tabSheet.addTab(tab1.getComponent())
# Check right order of tabs
self.assertEquals(0, tabSheet.getTabPosition(tab1))
self.assertEquals(1, tabSheet.getTabPosition(tab2))
self.assertEquals(2, tabSheet.getTabPosition(tab3))
def testAddTabWithComponentAndIndex(self):
tabSheet = TabSheet()
tab1 = tabSheet.addTab(Label('aaa'))
tab2 = tabSheet.addTab(Label('bbb'))
tab3 = tabSheet.addTab(Label('ccc'))
tab4 = tabSheet.addTab(Label('ddd'), 1)
tab5 = tabSheet.addTab(Label('eee'), 3)
self.assertEquals(0, tabSheet.getTabPosition(tab1))
self.assertEquals(1, tabSheet.getTabPosition(tab4))
self.assertEquals(2, tabSheet.getTabPosition(tab2))
self.assertEquals(3, tabSheet.getTabPosition(tab5))
self.assertEquals(4, tabSheet.getTabPosition(tab3))
# Calling addTab with existing component does not move tab
tabSheet.addTab(tab1.getComponent(), 3)
self.assertEquals(0, tabSheet.getTabPosition(tab1))
self.assertEquals(1, tabSheet.getTabPosition(tab4))
self.assertEquals(2, tabSheet.getTabPosition(tab2))
self.assertEquals(3, tabSheet.getTabPosition(tab5))
self.assertEquals(4, tabSheet.getTabPosition(tab3))
def testAddTabWithAllParameters(self):
tabSheet = TabSheet()
tab1 = tabSheet.addTab(Label('aaa'))
tab2 = tabSheet.addTab(Label('bbb'))
tab3 = tabSheet.addTab(Label('ccc'))
tab4 = tabSheet.addTab(Label('ddd'), 'ddd', None, 1)
tab5 = tabSheet.addTab(Label('eee'), 'eee', None, 3)
self.assertEquals(0, tabSheet.getTabPosition(tab1))
self.assertEquals(1, tabSheet.getTabPosition(tab4))
self.assertEquals(2, tabSheet.getTabPosition(tab2))
self.assertEquals(3, tabSheet.getTabPosition(tab5))
self.assertEquals(4, tabSheet.getTabPosition(tab3))
# Calling addTab with existing component does not move tab
tabSheet.addTab(tab1.getComponent(), 'xxx', None, 3)
self.assertEquals(0, tabSheet.getTabPosition(tab1))
self.assertEquals(1, tabSheet.getTabPosition(tab4))
self.assertEquals(2, tabSheet.getTabPosition(tab2))
self.assertEquals(3, tabSheet.getTabPosition(tab5))
self.assertEquals(4, tabSheet.getTabPosition(tab3))
def testGetTabByPosition(self):
tabSheet = TabSheet()
tab1 = tabSheet.addTab(Label('aaa'))
tab2 = tabSheet.addTab(Label('bbb'))
tab3 = tabSheet.addTab(Label('ccc'))
self.assertEquals(tab1, tabSheet.getTab(0))
self.assertEquals(tab2, tabSheet.getTab(1))
self.assertEquals(tab3, tabSheet.getTab(2))
| rwl/muntjac | muntjac/test/server/component/test_tab_sheet.py | Python | apache-2.0 | 3,921 | 0.001275 |
import logging
import requests
HUE_IP = '192.168.86.32'
HUE_USERNAME = '7KcxItfntdF0DuWV9t0GPMeToEBlvHTgqWNZqxu6'
logger = logging.getLogger('hue')
def getLights():
url = 'http://{0}/api/{1}/lights'.format(HUE_IP, HUE_USERNAME)
try:
r = requests.get(url)
except:
logger.error('Failed getting status for all lights')
return
if r.status_code == 200:
data = r.json()
return data
def getStatus(id):
url = 'http://{0}/api/{1}/lights/{2}'.format(HUE_IP, HUE_USERNAME, id)
try:
r = requests.get(url)
except:
logger.error('Failed getting status for light {0}'.format (id))
return
if r.status_code == 200:
data = r.json()
return data
| twotymz/lucy | hue/lights.py | Python | mit | 705 | 0.025532 |
#!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Timeline (Model) query functions.
"""
__authors__ = [
'"Sverre Rabbelier" <sverre@rabbelier.nl>',
]
from soc.logic.models import base
from soc.logic.models import sponsor as sponsor_logic
import soc.models.timeline
class Logic(base.Logic):
"""Logic methods for the Timeline model.
"""
def __init__(self, model=soc.models.timeline.Timeline,
base_model=None, scope_logic=sponsor_logic):
"""Defines the name, key_name and model for this entity.
"""
super(Logic, self).__init__(model=model, base_model=base_model,
scope_logic=scope_logic)
logic = Logic()
| jamslevy/gsoc | app/soc/logic/models/timeline.py | Python | apache-2.0 | 1,237 | 0.003234 |
"""
Abstract base classes define the primitives that renderers and
graphics contexts must implement to serve as a matplotlib backend
:class:`RendererBase`
An abstract base class to handle drawing/rendering operations.
:class:`FigureCanvasBase`
The abstraction layer that separates the
:class:`matplotlib.figure.Figure` from the backend specific
details like a user interface drawing area
:class:`GraphicsContextBase`
An abstract base class that provides color, line styles, etc...
:class:`Event`
The base class for all of the matplotlib event
handling. Derived classes suh as :class:`KeyEvent` and
:class:`MouseEvent` store the meta data like keys and buttons
pressed, x and y locations in pixel and
:class:`~matplotlib.axes.Axes` coordinates.
"""
from __future__ import division
import os, warnings, time
import numpy as np
import matplotlib.cbook as cbook
import matplotlib.colors as colors
import matplotlib.transforms as transforms
import matplotlib.widgets as widgets
from matplotlib import rcParams
class RendererBase:
"""An abstract base class to handle drawing/rendering operations.
The following methods *must* be implemented in the backend:
* :meth:`draw_path`
* :meth:`draw_image`
* :meth:`draw_text`
* :meth:`get_text_width_height_descent`
The following methods *should* be implemented in the backend for
optimization reasons:
* :meth:`draw_markers`
* :meth:`draw_path_collection`
* :meth:`draw_quad_mesh`
"""
def __init__(self):
self._texmanager = None
def open_group(self, s):
"""
Open a grouping element with label *s*. Is only currently used by
:mod:`~matplotlib.backends.backend_svg`
"""
pass
def close_group(self, s):
"""
Close a grouping element with label *s*
Is only currently used by :mod:`~matplotlib.backends.backend_svg`
"""
pass
def draw_path(self, gc, path, transform, rgbFace=None):
"""
Draws a :class:`~matplotlib.path.Path` instance using the
given affine transform.
"""
raise NotImplementedError
def draw_markers(self, gc, marker_path, marker_trans, path, trans, rgbFace=None):
"""
Draws a marker at each of the vertices in path. This includes
all vertices, including control points on curves. To avoid
that behavior, those vertices should be removed before calling
this function.
*gc*
the :class:`GraphicsContextBase` instance
*marker_trans*
is an affine transform applied to the marker.
*trans*
is an affine transform applied to the path.
This provides a fallback implementation of draw_markers that
makes multiple calls to :meth:`draw_path`. Some backends may
want to override this method in order to draw the marker only
once and reuse it multiple times.
"""
tpath = trans.transform_path(path)
for vertices, codes in tpath.iter_segments():
if len(vertices):
x,y = vertices[-2:]
self.draw_path(gc, marker_path,
marker_trans + transforms.Affine2D().translate(x, y),
rgbFace)
def draw_path_collection(self, master_transform, cliprect, clippath,
clippath_trans, paths, all_transforms, offsets,
offsetTrans, facecolors, edgecolors, linewidths,
linestyles, antialiaseds, urls):
"""
Draws a collection of paths, selecting drawing properties from
the lists *facecolors*, *edgecolors*, *linewidths*,
*linestyles* and *antialiaseds*. *offsets* is a list of
offsets to apply to each of the paths. The offsets in
*offsets* are first transformed by *offsetTrans* before
being applied.
This provides a fallback implementation of
:meth:`draw_path_collection` that makes multiple calls to
draw_path. Some backends may want to override this in order
to render each set of path data only once, and then reference
that path multiple times with the different offsets, colors,
styles etc. The generator methods
:meth:`_iter_collection_raw_paths` and
:meth:`_iter_collection` are provided to help with (and
standardize) the implementation across backends. It is highly
recommended to use those generators, so that changes to the
behavior of :meth:`draw_path_collection` can be made globally.
"""
path_ids = []
for path, transform in self._iter_collection_raw_paths(
master_transform, paths, all_transforms):
path_ids.append((path, transform))
for xo, yo, path_id, gc, rgbFace in self._iter_collection(
path_ids, cliprect, clippath, clippath_trans,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls):
path, transform = path_id
transform = transforms.Affine2D(transform.get_matrix()).translate(xo, yo)
self.draw_path(gc, path, transform, rgbFace)
def draw_quad_mesh(self, master_transform, cliprect, clippath,
clippath_trans, meshWidth, meshHeight, coordinates,
offsets, offsetTrans, facecolors, antialiased,
showedges):
"""
This provides a fallback implementation of
:meth:`draw_quad_mesh` that generates paths and then calls
:meth:`draw_path_collection`.
"""
from matplotlib.collections import QuadMesh
paths = QuadMesh.convert_mesh_to_paths(
meshWidth, meshHeight, coordinates)
if showedges:
edgecolors = np.array([[0.0, 0.0, 0.0, 1.0]], np.float_)
linewidths = np.array([1.0], np.float_)
else:
edgecolors = facecolors
linewidths = np.array([0.0], np.float_)
return self.draw_path_collection(
master_transform, cliprect, clippath, clippath_trans,
paths, [], offsets, offsetTrans, facecolors, edgecolors,
linewidths, [], [antialiased], [None])
def _iter_collection_raw_paths(self, master_transform, paths, all_transforms):
"""
This is a helper method (along with :meth:`_iter_collection`) to make
it easier to write a space-efficent :meth:`draw_path_collection`
implementation in a backend.
This method yields all of the base path/transform
combinations, given a master transform, a list of paths and
list of transforms.
The arguments should be exactly what is passed in to
:meth:`draw_path_collection`.
The backend should take each yielded path and transform and
create an object that can be referenced (reused) later.
"""
Npaths = len(paths)
Ntransforms = len(all_transforms)
N = max(Npaths, Ntransforms)
if Npaths == 0:
return
transform = transforms.IdentityTransform()
for i in xrange(N):
path = paths[i % Npaths]
if Ntransforms:
transform = all_transforms[i % Ntransforms]
yield path, transform + master_transform
def _iter_collection(self, path_ids, cliprect, clippath, clippath_trans,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls):
"""
This is a helper method (along with
:meth:`_iter_collection_raw_paths`) to make it easier to write
a space-efficent :meth:`draw_path_collection` implementation in a
backend.
This method yields all of the path, offset and graphics
context combinations to draw the path collection. The caller
should already have looped over the results of
:meth:`_iter_collection_raw_paths` to draw this collection.
The arguments should be the same as that passed into
:meth:`draw_path_collection`, with the exception of
*path_ids*, which is a list of arbitrary objects that the
backend will use to reference one of the paths created in the
:meth:`_iter_collection_raw_paths` stage.
Each yielded result is of the form::
xo, yo, path_id, gc, rgbFace
where *xo*, *yo* is an offset; *path_id* is one of the elements of
*path_ids*; *gc* is a graphics context and *rgbFace* is a color to
use for filling the path.
"""
Npaths = len(path_ids)
Noffsets = len(offsets)
N = max(Npaths, Noffsets)
Nfacecolors = len(facecolors)
Nedgecolors = len(edgecolors)
Nlinewidths = len(linewidths)
Nlinestyles = len(linestyles)
Naa = len(antialiaseds)
Nurls = len(urls)
if (Nfacecolors == 0 and Nedgecolors == 0) or Npaths == 0:
return
if Noffsets:
toffsets = offsetTrans.transform(offsets)
gc = self.new_gc()
gc.set_clip_rectangle(cliprect)
if clippath is not None:
clippath = transforms.TransformedPath(clippath, clippath_trans)
gc.set_clip_path(clippath)
if Nfacecolors == 0:
rgbFace = None
if Nedgecolors == 0:
gc.set_linewidth(0.0)
xo, yo = 0, 0
for i in xrange(N):
path_id = path_ids[i % Npaths]
if Noffsets:
xo, yo = toffsets[i % Noffsets]
if Nfacecolors:
rgbFace = facecolors[i % Nfacecolors]
if Nedgecolors:
gc.set_foreground(edgecolors[i % Nedgecolors])
if Nlinewidths:
gc.set_linewidth(linewidths[i % Nlinewidths])
if Nlinestyles:
gc.set_dashes(*linestyles[i % Nlinestyles])
if rgbFace is not None and len(rgbFace)==4:
gc.set_alpha(rgbFace[-1])
rgbFace = rgbFace[:3]
gc.set_antialiased(antialiaseds[i % Naa])
if Nurls:
gc.set_url(urls[i % Nurls])
yield xo, yo, path_id, gc, rgbFace
def get_image_magnification(self):
"""
Get the factor by which to magnify images passed to :meth:`draw_image`.
Allows a backend to have images at a different resolution to other
artists.
"""
return 1.0
def draw_image(self, x, y, im, bbox, clippath=None, clippath_trans=None):
"""
Draw the image instance into the current axes;
*x*
is the distance in pixels from the left hand side of the canvas.
*y*
the distance from the origin. That is, if origin is
upper, y is the distance from top. If origin is lower, y
is the distance from bottom
*im*
the :class:`matplotlib._image.Image` instance
*bbox*
a :class:`matplotlib.transforms.Bbox` instance for clipping, or
None
"""
raise NotImplementedError
def option_image_nocomposite(self):
"""
overwrite this method for renderers that do not necessarily
want to rescale and composite raster images. (like SVG)
"""
return False
def draw_tex(self, gc, x, y, s, prop, angle, ismath='TeX!'):
raise NotImplementedError
def draw_text(self, gc, x, y, s, prop, angle, ismath=False):
"""
Draw the text instance
*gc*
the :class:`GraphicsContextBase` instance
*x*
the x location of the text in display coords
*y*
the y location of the text in display coords
*s*
a :class:`matplotlib.text.Text` instance
*prop*
a :class:`matplotlib.font_manager.FontProperties` instance
*angle*
the rotation angle in degrees
**backend implementers note**
When you are trying to determine if you have gotten your bounding box
right (which is what enables the text layout/alignment to work
properly), it helps to change the line in text.py::
if 0: bbox_artist(self, renderer)
to if 1, and then the actual bounding box will be blotted along with
your text.
"""
raise NotImplementedError
def flipy(self):
"""
Return true if y small numbers are top for renderer Is used
for drawing text (:mod:`matplotlib.text`) and images
(:mod:`matplotlib.image`) only
"""
return True
def get_canvas_width_height(self):
'return the canvas width and height in display coords'
return 1, 1
def get_texmanager(self):
"""
return the :class:`matplotlib.texmanager.TexManager` instance
"""
if self._texmanager is None:
from matplotlib.texmanager import TexManager
self._texmanager = TexManager()
return self._texmanager
def get_text_width_height_descent(self, s, prop, ismath):
"""
get the width and height, and the offset from the bottom to the
baseline (descent), in display coords of the string s with
:class:`~matplotlib.font_manager.FontProperties` prop
"""
raise NotImplementedError
def new_gc(self):
"""
Return an instance of a :class:`GraphicsContextBase`
"""
return GraphicsContextBase()
def points_to_pixels(self, points):
"""
Convert points to display units
*points*
a float or a numpy array of float
return points converted to pixels
You need to override this function (unless your backend
doesn't have a dpi, eg, postscript or svg). Some imaging
systems assume some value for pixels per inch::
points to pixels = points * pixels_per_inch/72.0 * dpi/72.0
"""
return points
def strip_math(self, s):
return cbook.strip_math(s)
def start_rasterizing(self):
pass
def stop_rasterizing(self):
pass
class GraphicsContextBase:
"""
An abstract base class that provides color, line styles, etc...
"""
# a mapping from dash styles to suggested offset, dash pairs
dashd = {
'solid' : (None, None),
'dashed' : (0, (6.0, 6.0)),
'dashdot' : (0, (3.0, 5.0, 1.0, 5.0)),
'dotted' : (0, (1.0, 3.0)),
}
def __init__(self):
self._alpha = 1.0
self._antialiased = 1 # use 0,1 not True, False for extension code
self._capstyle = 'butt'
self._cliprect = None
self._clippath = None
self._dashes = None, None
self._joinstyle = 'miter'
self._linestyle = 'solid'
self._linewidth = 1
self._rgb = (0.0, 0.0, 0.0)
self._hatch = None
self._url = None
self._snap = None
def copy_properties(self, gc):
'Copy properties from gc to self'
self._alpha = gc._alpha
self._antialiased = gc._antialiased
self._capstyle = gc._capstyle
self._cliprect = gc._cliprect
self._clippath = gc._clippath
self._dashes = gc._dashes
self._joinstyle = gc._joinstyle
self._linestyle = gc._linestyle
self._linewidth = gc._linewidth
self._rgb = gc._rgb
self._hatch = gc._hatch
self._url = gc._url
self._snap = gc._snap
def get_alpha(self):
"""
Return the alpha value used for blending - not supported on
all backends
"""
return self._alpha
def get_antialiased(self):
"Return true if the object should try to do antialiased rendering"
return self._antialiased
def get_capstyle(self):
"""
Return the capstyle as a string in ('butt', 'round', 'projecting')
"""
return self._capstyle
def get_clip_rectangle(self):
"""
Return the clip rectangle as a :class:`~matplotlib.transforms.Bbox` instance
"""
return self._cliprect
def get_clip_path(self):
"""
Return the clip path in the form (path, transform), where path
is a :class:`~matplotlib.path.Path` instance, and transform is
an affine transform to apply to the path before clipping.
"""
if self._clippath is not None:
return self._clippath.get_transformed_path_and_affine()
return None, None
def get_dashes(self):
"""
Return the dash information as an offset dashlist tuple.
The dash list is a even size list that gives the ink on, ink
off in pixels.
See p107 of to PostScript `BLUEBOOK
<http://www-cdf.fnal.gov/offline/PostScript/BLUEBOOK.PDF>`_
for more info.
Default value is None
"""
return self._dashes
def get_joinstyle(self):
"""
Return the line join style as one of ('miter', 'round', 'bevel')
"""
return self._joinstyle
def get_linestyle(self, style):
"""
Return the linestyle: one of ('solid', 'dashed', 'dashdot',
'dotted').
"""
return self._linestyle
def get_linewidth(self):
"""
Return the line width in points as a scalar
"""
return self._linewidth
def get_rgb(self):
"""
returns a tuple of three floats from 0-1. color can be a
matlab format string, a html hex color string, or a rgb tuple
"""
return self._rgb
def get_url(self):
"""
returns a url if one is set, None otherwise
"""
return self._url
def get_snap(self):
"""
returns the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
return self._snap
def set_alpha(self, alpha):
"""
Set the alpha value used for blending - not supported on
all backends
"""
self._alpha = alpha
def set_antialiased(self, b):
"""
True if object should be drawn with antialiased rendering
"""
# use 0, 1 to make life easier on extension code trying to read the gc
if b: self._antialiased = 1
else: self._antialiased = 0
def set_capstyle(self, cs):
"""
Set the capstyle as a string in ('butt', 'round', 'projecting')
"""
if cs in ('butt', 'round', 'projecting'):
self._capstyle = cs
else:
raise ValueError('Unrecognized cap style. Found %s' % cs)
def set_clip_rectangle(self, rectangle):
"""
Set the clip rectangle with sequence (left, bottom, width, height)
"""
self._cliprect = rectangle
def set_clip_path(self, path):
"""
Set the clip path and transformation. Path should be a
:class:`~matplotlib.transforms.TransformedPath` instance.
"""
assert path is None or isinstance(path, transforms.TransformedPath)
self._clippath = path
def set_dashes(self, dash_offset, dash_list):
"""
Set the dash style for the gc.
*dash_offset*
is the offset (usually 0).
*dash_list*
specifies the on-off sequence as points. ``(None, None)`` specifies a solid line
"""
self._dashes = dash_offset, dash_list
def set_foreground(self, fg, isRGB=False):
"""
Set the foreground color. fg can be a matlab format string, a
html hex color string, an rgb unit tuple, or a float between 0
and 1. In the latter case, grayscale is used.
The :class:`GraphicsContextBase` converts colors to rgb
internally. If you know the color is rgb already, you can set
``isRGB=True`` to avoid the performace hit of the conversion
"""
if isRGB:
self._rgb = fg
else:
self._rgb = colors.colorConverter.to_rgba(fg)
def set_graylevel(self, frac):
"""
Set the foreground color to be a gray level with *frac*
"""
self._rgb = (frac, frac, frac)
def set_joinstyle(self, js):
"""
Set the join style to be one of ('miter', 'round', 'bevel')
"""
if js in ('miter', 'round', 'bevel'):
self._joinstyle = js
else:
raise ValueError('Unrecognized join style. Found %s' % js)
def set_linewidth(self, w):
"""
Set the linewidth in points
"""
self._linewidth = w
def set_linestyle(self, style):
"""
Set the linestyle to be one of ('solid', 'dashed', 'dashdot',
'dotted').
"""
try:
offset, dashes = self.dashd[style]
except:
raise ValueError('Unrecognized linestyle: %s' % style)
self._linestyle = style
self.set_dashes(offset, dashes)
def set_url(self, url):
"""
Sets the url for links in compatible backends
"""
self._url = url
def set_snap(self, snap):
"""
Sets the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
self._snap = snap
def set_hatch(self, hatch):
"""
Sets the hatch style for filling
"""
self._hatch = hatch
def get_hatch(self):
"""
Gets the current hatch style
"""
return self._hatch
class Event:
"""
A matplotlib event. Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`. The following attributes
are defined and shown with their default values
*name*
the event name
*canvas*
the FigureCanvas instance generating the event
*guiEvent*
the GUI event that triggered the matplotlib event
"""
def __init__(self, name, canvas,guiEvent=None):
self.name = name
self.canvas = canvas
self.guiEvent = guiEvent
class IdleEvent(Event):
"""
An event triggered by the GUI backend when it is idle -- useful
for passive animation
"""
pass
class DrawEvent(Event):
"""
An event triggered by a draw operation on the canvas
In addition to the :class:`Event` attributes, the following event attributes are defined:
*renderer*
the :class:`RendererBase` instance for the draw event
"""
def __init__(self, name, canvas, renderer):
Event.__init__(self, name, canvas)
self.renderer = renderer
class ResizeEvent(Event):
"""
An event triggered by a canvas resize
In addition to the :class:`Event` attributes, the following event attributes are defined:
*width*
width of the canvas in pixels
*height*
height of the canvas in pixels
"""
def __init__(self, name, canvas):
Event.__init__(self, name, canvas)
self.width, self.height = canvas.get_width_height()
class LocationEvent(Event):
"""
A event that has a screen location
The following additional attributes are defined and shown with
their default values
In addition to the :class:`Event` attributes, the following event attributes are defined:
*x*
x position - pixels from left of canvas
*y*
y position - pixels from bottom of canvas
*inaxes*
the :class:`~matplotlib.axes.Axes` instance if mouse is over axes
*xdata*
x coord of mouse in data coords
*ydata*
y coord of mouse in data coords
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
# the last event that was triggered before this one
lastevent = None
def __init__(self, name, canvas, x, y,guiEvent=None):
"""
*x*, *y* in figure coords, 0,0 = bottom, left
"""
Event.__init__(self, name, canvas,guiEvent=guiEvent)
self.x = x
self.y = y
if x is None or y is None:
# cannot check if event was in axes if no x,y info
self.inaxes = None
self._update_enter_leave()
return
# Find all axes containing the mouse
axes_list = [a for a in self.canvas.figure.get_axes() if a.in_axes(self)]
if len(axes_list) == 0: # None found
self.inaxes = None
self._update_enter_leave()
return
elif (len(axes_list) > 1): # Overlap, get the highest zorder
axCmp = lambda _x,_y: cmp(_x.zorder, _y.zorder)
axes_list.sort(axCmp)
self.inaxes = axes_list[-1] # Use the highest zorder
else: # Just found one hit
self.inaxes = axes_list[0]
try:
xdata, ydata = self.inaxes.transData.inverted().transform_point((x, y))
except ValueError:
self.xdata = None
self.ydata = None
else:
self.xdata = xdata
self.ydata = ydata
self._update_enter_leave()
def _update_enter_leave(self):
'process the figure/axes enter leave events'
if LocationEvent.lastevent is not None:
last = LocationEvent.lastevent
if last.inaxes!=self.inaxes:
# process axes enter/leave events
if last.inaxes is not None:
last.canvas.callbacks.process('axes_leave_event', last)
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
else:
# process a figure enter event
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
LocationEvent.lastevent = self
class MouseEvent(LocationEvent):
"""
A mouse event ('button_press_event', 'button_release_event', 'scroll_event',
'motion_notify_event').
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
*button*
button pressed None, 1, 2, 3, 'up', 'down' (up and down are used for scroll events)
*key*
the key pressed: None, chr(range(255), 'shift', 'win', or 'control'
*step*
number of scroll steps (positive for 'up', negative for 'down')
Example usage::
def on_press(event):
print 'you pressed', event.button, event.xdata, event.ydata
cid = fig.canvas.mpl_connect('button_press_event', on_press)
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
button = None # button pressed None, 1, 2, 3
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
step = None # scroll steps for scroll events
def __init__(self, name, canvas, x, y, button=None, key=None,
step=0, guiEvent=None):
"""
x, y in figure coords, 0,0 = bottom, left
button pressed None, 1, 2, 3, 'up', 'down'
"""
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.button = button
self.key = key
self.step = step
class PickEvent(Event):
"""
a pick event, fired when the user picks a location on the canvas
sufficiently close to an artist.
Attrs: all the :class:`Event` attributes plus
*mouseevent*
the :class:`MouseEvent` that generated the pick
*artist*
the :class:`~matplotlib.artist.Artist` picked
other
extra class dependent attrs -- eg a
:class:`~matplotlib.lines.Line2D` pick may define different
extra attributes than a
:class:`~matplotlib.collections.PatchCollection` pick event
Example usage::
line, = ax.plot(rand(100), 'o', picker=5) # 5 points tolerance
def on_pick(event):
thisline = event.artist
xdata, ydata = thisline.get_data()
ind = event.ind
print 'on pick line:', zip(xdata[ind], ydata[ind])
cid = fig.canvas.mpl_connect('pick_event', on_pick)
"""
def __init__(self, name, canvas, mouseevent, artist, guiEvent=None, **kwargs):
Event.__init__(self, name, canvas, guiEvent)
self.mouseevent = mouseevent
self.artist = artist
self.__dict__.update(kwargs)
class KeyEvent(LocationEvent):
"""
A key event (key press, key release).
Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`.
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
*key*
the key pressed: None, chr(range(255), shift, win, or control
This interface may change slightly when better support for
modifier keys is included.
Example usage::
def on_key(event):
print 'you pressed', event.key, event.xdata, event.ydata
cid = fig.canvas.mpl_connect('key_press_event', on_key)
"""
def __init__(self, name, canvas, key, x=0, y=0, guiEvent=None):
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.key = key
class FigureCanvasBase:
"""
The canvas the figure renders into.
Public attributes
*figure*
A :class:`matplotlib.figure.Figure` instance
"""
events = [
'resize_event',
'draw_event',
'key_press_event',
'key_release_event',
'button_press_event',
'button_release_event',
'scroll_event',
'motion_notify_event',
'pick_event',
'idle_event',
'figure_enter_event',
'figure_leave_event',
'axes_enter_event',
'axes_leave_event'
]
def __init__(self, figure):
figure.set_canvas(self)
self.figure = figure
# a dictionary from event name to a dictionary that maps cid->func
self.callbacks = cbook.CallbackRegistry(self.events)
self.widgetlock = widgets.LockDraw()
self._button = None # the button pressed
self._key = None # the key pressed
self._lastx, self._lasty = None, None
self.button_pick_id = self.mpl_connect('button_press_event',self.pick)
self.scroll_pick_id = self.mpl_connect('scroll_event',self.pick)
if False:
## highlight the artists that are hit
self.mpl_connect('motion_notify_event',self.onHilite)
## delete the artists that are clicked on
#self.mpl_disconnect(self.button_pick_id)
#self.mpl_connect('button_press_event',self.onRemove)
def onRemove(self, ev):
"""
Mouse event processor which removes the top artist
under the cursor. Connect this to the 'mouse_press_event'
using::
canvas.mpl_connect('mouse_press_event',canvas.onRemove)
"""
def sort_artists(artists):
# This depends on stable sort and artists returned
# from get_children in z order.
L = [ (h.zorder, h) for h in artists ]
L.sort()
return [ h for zorder, h in L ]
# Find the top artist under the cursor
under = sort_artists(self.figure.hitlist(ev))
h = None
if under: h = under[-1]
# Try deleting that artist, or its parent if you
# can't delete the artist
while h:
print "Removing",h
if h.remove():
self.draw_idle()
break
parent = None
for p in under:
if h in p.get_children():
parent = p
break
h = parent
def onHilite(self, ev):
"""
Mouse event processor which highlights the artists
under the cursor. Connect this to the 'motion_notify_event'
using::
canvas.mpl_connect('motion_notify_event',canvas.onHilite)
"""
if not hasattr(self,'_active'): self._active = dict()
under = self.figure.hitlist(ev)
enter = [a for a in under if a not in self._active]
leave = [a for a in self._active if a not in under]
print "within:"," ".join([str(x) for x in under])
#print "entering:",[str(a) for a in enter]
#print "leaving:",[str(a) for a in leave]
# On leave restore the captured colour
for a in leave:
if hasattr(a,'get_color'):
a.set_color(self._active[a])
elif hasattr(a,'get_edgecolor'):
a.set_edgecolor(self._active[a][0])
a.set_facecolor(self._active[a][1])
del self._active[a]
# On enter, capture the color and repaint the artist
# with the highlight colour. Capturing colour has to
# be done first in case the parent recolouring affects
# the child.
for a in enter:
if hasattr(a,'get_color'):
self._active[a] = a.get_color()
elif hasattr(a,'get_edgecolor'):
self._active[a] = (a.get_edgecolor(),a.get_facecolor())
else: self._active[a] = None
for a in enter:
if hasattr(a,'get_color'):
a.set_color('red')
elif hasattr(a,'get_edgecolor'):
a.set_edgecolor('red')
a.set_facecolor('lightblue')
else: self._active[a] = None
self.draw_idle()
def pick(self, mouseevent):
if not self.widgetlock.locked():
self.figure.pick(mouseevent)
def blit(self, bbox=None):
"""
blit the canvas in bbox (default entire canvas)
"""
pass
def resize(self, w, h):
"""
set the canvas size in pixels
"""
pass
def draw_event(self, renderer):
"""
This method will be call all functions connected to the
'draw_event' with a :class:`DrawEvent`
"""
s = 'draw_event'
event = DrawEvent(s, self, renderer)
self.callbacks.process(s, event)
def resize_event(self):
"""
This method will be call all functions connected to the
'resize_event' with a :class:`ResizeEvent`
"""
s = 'resize_event'
event = ResizeEvent(s, self)
self.callbacks.process(s, event)
def key_press_event(self, key, guiEvent=None):
"""
This method will be call all functions connected to the
'key_press_event' with a :class:`KeyEvent`
"""
self._key = key
s = 'key_press_event'
event = KeyEvent(s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
def key_release_event(self, key, guiEvent=None):
"""
This method will be call all functions connected to the
'key_release_event' with a :class:`KeyEvent`
"""
s = 'key_release_event'
event = KeyEvent(s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._key = None
def pick_event(self, mouseevent, artist, **kwargs):
"""
This method will be called by artists who are picked and will
fire off :class:`PickEvent` callbacks registered listeners
"""
s = 'pick_event'
event = PickEvent(s, self, mouseevent, artist, **kwargs)
self.callbacks.process(s, event)
def scroll_event(self, x, y, step, guiEvent=None):
"""
Backend derived classes should call this function on any
scroll wheel event. x,y are the canvas coords: 0,0 is lower,
left. button and key are as defined in MouseEvent.
This method will be call all functions connected to the
'scroll_event' with a :class:`MouseEvent` instance.
"""
if step >= 0:
self._button = 'up'
else:
self._button = 'down'
s = 'scroll_event'
mouseevent = MouseEvent(s, self, x, y, self._button, self._key,
step=step, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_press_event(self, x, y, button, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button press. x,y are the canvas coords: 0,0 is lower, left.
button and key are as defined in :class:`MouseEvent`.
This method will be call all functions connected to the
'button_press_event' with a :class:`MouseEvent` instance.
"""
self._button = button
s = 'button_press_event'
mouseevent = MouseEvent(s, self, x, y, button, self._key, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_release_event(self, x, y, button, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button release.
*x*
the canvas coordinates where 0=left
*y*
the canvas coordinates where 0=bottom
*guiEvent*
the native UI event that generated the mpl event
This method will be call all functions connected to the
'button_release_event' with a :class:`MouseEvent` instance.
"""
s = 'button_release_event'
event = MouseEvent(s, self, x, y, button, self._key, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._button = None
def motion_notify_event(self, x, y, guiEvent=None):
"""
Backend derived classes should call this function on any
motion-notify-event.
*x*
the canvas coordinates where 0=left
*y*
the canvas coordinates where 0=bottom
*guiEvent*
the native UI event that generated the mpl event
This method will be call all functions connected to the
'motion_notify_event' with a :class:`MouseEvent` instance.
"""
self._lastx, self._lasty = x, y
s = 'motion_notify_event'
event = MouseEvent(s, self, x, y, self._button, self._key,
guiEvent=guiEvent)
self.callbacks.process(s, event)
def leave_notify_event(self, guiEvent=None):
"""
Backend derived classes should call this function when leaving
canvas
*guiEvent*
the native UI event that generated the mpl event
"""
self.callbacks.process('figure_leave_event', LocationEvent.lastevent)
LocationEvent.lastevent = None
def enter_notify_event(self, guiEvent=None):
"""
Backend derived classes should call this function when entering
canvas
*guiEvent*
the native UI event that generated the mpl event
"""
event = Event('figure_enter_event', self, guiEvent)
self.callbacks.process('figure_enter_event', event)
def idle_event(self, guiEvent=None):
'call when GUI is idle'
s = 'idle_event'
event = IdleEvent(s, self, guiEvent=guiEvent)
self.callbacks.process(s, event)
def draw(self, *args, **kwargs):
"""
Render the :class:`~matplotlib.figure.Figure`
"""
pass
def draw_idle(self, *args, **kwargs):
"""
:meth:`draw` only if idle; defaults to draw but backends can overrride
"""
self.draw(*args, **kwargs)
def draw_cursor(self, event):
"""
Draw a cursor in the event.axes if inaxes is not None. Use
native GUI drawing for efficiency if possible
"""
pass
def get_width_height(self):
"""
return the figure width and height in points or pixels
(depending on the backend), truncated to integers
"""
return int(self.figure.bbox.width), int(self.figure.bbox.height)
filetypes = {
'emf': 'Enhanced Metafile',
'eps': 'Encapsulated Postscript',
'pdf': 'Portable Document Format',
'png': 'Portable Network Graphics',
'ps' : 'Postscript',
'raw': 'Raw RGBA bitmap',
'rgba': 'Raw RGBA bitmap',
'svg': 'Scalable Vector Graphics',
'svgz': 'Scalable Vector Graphics'
}
# All of these print_* functions do a lazy import because
# a) otherwise we'd have cyclical imports, since all of these
# classes inherit from FigureCanvasBase
# b) so we don't import a bunch of stuff the user may never use
def print_emf(self, *args, **kwargs):
from backends.backend_emf import FigureCanvasEMF # lazy import
emf = self.switch_backends(FigureCanvasEMF)
return emf.print_emf(*args, **kwargs)
def print_eps(self, *args, **kwargs):
from backends.backend_ps import FigureCanvasPS # lazy import
ps = self.switch_backends(FigureCanvasPS)
return ps.print_eps(*args, **kwargs)
def print_pdf(self, *args, **kwargs):
from backends.backend_pdf import FigureCanvasPdf # lazy import
pdf = self.switch_backends(FigureCanvasPdf)
return pdf.print_pdf(*args, **kwargs)
def print_png(self, *args, **kwargs):
from backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_png(*args, **kwargs)
def print_ps(self, *args, **kwargs):
from backends.backend_ps import FigureCanvasPS # lazy import
ps = self.switch_backends(FigureCanvasPS)
return ps.print_ps(*args, **kwargs)
def print_raw(self, *args, **kwargs):
from backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_raw(*args, **kwargs)
print_bmp = print_rgb = print_raw
def print_svg(self, *args, **kwargs):
from backends.backend_svg import FigureCanvasSVG # lazy import
svg = self.switch_backends(FigureCanvasSVG)
return svg.print_svg(*args, **kwargs)
def print_svgz(self, *args, **kwargs):
from backends.backend_svg import FigureCanvasSVG # lazy import
svg = self.switch_backends(FigureCanvasSVG)
return svg.print_svgz(*args, **kwargs)
def get_supported_filetypes(self):
return self.filetypes
def get_supported_filetypes_grouped(self):
groupings = {}
for ext, name in self.filetypes.items():
groupings.setdefault(name, []).append(ext)
groupings[name].sort()
return groupings
def print_figure(self, filename, dpi=None, facecolor='w', edgecolor='w',
orientation='portrait', format=None, **kwargs):
"""
Render the figure to hardcopy. Set the figure patch face and edge
colors. This is useful because some of the GUIs have a gray figure
face color background and you'll probably want to override this on
hardcopy.
Arguments are:
*filename*
can also be a file object on image backends
*orientation*
only currently applies to PostScript printing.
*dpi*
the dots per inch to save the figure in; if None, use savefig.dpi
*facecolor*
the facecolor of the figure
*edgecolor*
the edgecolor of the figure
*orientation* '
landscape' | 'portrait' (not supported on all backends)
*format*
when set, forcibly set the file format to save to
"""
if format is None:
if cbook.is_string_like(filename):
format = os.path.splitext(filename)[1][1:]
if format is None or format == '':
format = self.get_default_filetype()
if cbook.is_string_like(filename):
filename = filename.rstrip('.') + '.' + format
format = format.lower()
method_name = 'print_%s' % format
if (format not in self.filetypes or
not hasattr(self, method_name)):
formats = self.filetypes.keys()
formats.sort()
raise ValueError(
'Format "%s" is not supported.\n'
'Supported formats: '
'%s.' % (format, ', '.join(formats)))
if dpi is None:
dpi = rcParams['savefig.dpi']
origDPI = self.figure.dpi
origfacecolor = self.figure.get_facecolor()
origedgecolor = self.figure.get_edgecolor()
self.figure.dpi = dpi
self.figure.set_facecolor(facecolor)
self.figure.set_edgecolor(edgecolor)
try:
result = getattr(self, method_name)(
filename,
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
orientation=orientation,
**kwargs)
finally:
self.figure.dpi = origDPI
self.figure.set_facecolor(origfacecolor)
self.figure.set_edgecolor(origedgecolor)
self.figure.set_canvas(self)
#self.figure.canvas.draw() ## seems superfluous
return result
def get_default_filetype(self):
raise NotImplementedError
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect if there is no window (eg, a PS backend).
"""
if hasattr(self, "manager"):
self.manager.set_window_title(title)
def switch_backends(self, FigureCanvasClass):
"""
instantiate an instance of FigureCanvasClass
This is used for backend switching, eg, to instantiate a
FigureCanvasPS from a FigureCanvasGTK. Note, deep copying is
not done, so any changes to one of the instances (eg, setting
figure size or line props), will be reflected in the other
"""
newCanvas = FigureCanvasClass(self.figure)
return newCanvas
def mpl_connect(self, s, func):
"""
Connect event with string *s* to *func*. The signature of *func* is::
def func(event)
where event is a :class:`matplotlib.backend_bases.Event`. The
following events are recognized
- 'button_press_event'
- 'button_release_event'
- 'draw_event'
- 'key_press_event'
- 'key_release_event'
- 'motion_notify_event'
- 'pick_event'
- 'resize_event'
- 'scroll_event'
For the location events (button and key press/release), if the
mouse is over the axes, the variable ``event.inaxes`` will be
set to the :class:`~matplotlib.axes.Axes` the event occurs is
over, and additionally, the variables ``event.xdata`` and
``event.ydata`` will be defined. This is the mouse location
in data coords. See
:class:`~matplotlib.backend_bases.KeyEvent` and
:class:`~matplotlib.backend_bases.MouseEvent` for more info.
Return value is a connection id that can be used with
:meth:`~matplotlib.backend_bases.Event.mpl_disconnect`.
Example usage::
def on_press(event):
print 'you pressed', event.button, event.xdata, event.ydata
cid = canvas.mpl_connect('button_press_event', on_press)
"""
return self.callbacks.connect(s, func)
def mpl_disconnect(self, cid):
"""
disconnect callback id cid
Example usage::
cid = canvas.mpl_connect('button_press_event', on_press)
#...later
canvas.mpl_disconnect(cid)
"""
return self.callbacks.disconnect(cid)
def flush_events(self):
"""
Flush the GUI events for the figure. Implemented only for
backends with GUIs.
"""
raise NotImplementedError
def start_event_loop(self,timeout):
"""
Start an event loop. This is used to start a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events. This should not be
confused with the main GUI event loop, which is always running
and has nothing to do with this.
This is implemented only for backends with GUIs.
"""
raise NotImplementedError
def stop_event_loop(self):
"""
Stop an event loop. This is used to stop a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events.
This is implemented only for backends with GUIs.
"""
raise NotImplementedError
def start_event_loop_default(self,timeout=0):
"""
Start an event loop. This is used to start a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events. This should not be
confused with the main GUI event loop, which is always running
and has nothing to do with this.
This function provides default event loop functionality based
on time.sleep that is meant to be used until event loop
functions for each of the GUI backends can be written. As
such, it throws a deprecated warning.
Call signature::
start_event_loop_default(self,timeout=0)
This call blocks until a callback function triggers
stop_event_loop() or *timeout* is reached. If *timeout* is
<=0, never timeout.
"""
str = "Using default event loop until function specific"
str += " to this GUI is implemented"
warnings.warn(str,DeprecationWarning)
if timeout <= 0: timeout = np.inf
timestep = 0.01
counter = 0
self._looping = True
while self._looping and counter*timestep < timeout:
self.flush_events()
time.sleep(timestep)
counter += 1
def stop_event_loop_default(self):
"""
Stop an event loop. This is used to stop a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events.
Call signature::
stop_event_loop_default(self)
"""
self._looping = False
class FigureManagerBase:
"""
Helper class for matlab mode, wraps everything up into a neat bundle
Public attibutes:
*canvas*
A :class:`FigureCanvasBase` instance
*num*
The figure nuamber
"""
def __init__(self, canvas, num):
self.canvas = canvas
canvas.manager = self # store a pointer to parent
self.num = num
self.canvas.mpl_connect('key_press_event', self.key_press)
def destroy(self):
pass
def full_screen_toggle (self):
pass
def resize(self, w, h):
'For gui backends: resize window in pixels'
pass
def key_press(self, event):
# these bindings happen whether you are over an axes or not
#if event.key == 'q':
# self.destroy() # how cruel to have to destroy oneself!
# return
if event.key == 'f':
self.full_screen_toggle()
# *h*ome or *r*eset mnemonic
elif event.key == 'h' or event.key == 'r' or event.key == "home":
self.canvas.toolbar.home()
# c and v to enable left handed quick navigation
elif event.key == 'left' or event.key == 'c' or event.key == 'backspace':
self.canvas.toolbar.back()
elif event.key == 'right' or event.key == 'v':
self.canvas.toolbar.forward()
# *p*an mnemonic
elif event.key == 'p':
self.canvas.toolbar.pan()
# z*o*om mnemonic
elif event.key == 'o':
self.canvas.toolbar.zoom()
elif event.key == 's':
self.canvas.toolbar.save_figure(self.canvas.toolbar)
if event.inaxes is None:
return
# the mouse has to be over an axes to trigger these
if event.key == 'g':
event.inaxes.grid()
self.canvas.draw()
elif event.key == 'l':
ax = event.inaxes
scale = ax.get_yscale()
if scale=='log':
ax.set_yscale('linear')
ax.figure.canvas.draw()
elif scale=='linear':
ax.set_yscale('log')
ax.figure.canvas.draw()
elif event.key is not None and (event.key.isdigit() and event.key!='0') or event.key=='a':
# 'a' enables all axes
if event.key!='a':
n=int(event.key)-1
for i, a in enumerate(self.canvas.figure.get_axes()):
if event.x is not None and event.y is not None and a.in_axes(event):
if event.key=='a':
a.set_navigate(True)
else:
a.set_navigate(i==n)
def show_popup(self, msg):
"""
Display message in a popup -- GUI only
"""
pass
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect if there is no window (eg, a PS backend).
"""
pass
# cursors
class Cursors: #namespace
HAND, POINTER, SELECT_REGION, MOVE = range(4)
cursors = Cursors()
class NavigationToolbar2:
"""
Base class for the navigation cursor, version 2
backends must implement a canvas that handles connections for
'button_press_event' and 'button_release_event'. See
:meth:`FigureCanvasBase.mpl_connect` for more information
They must also define
:meth:`save_figure`
save the current figure
:meth:`set_cursor`
if you want the pointer icon to change
:meth:`_init_toolbar`
create your toolbar widget
:meth:`draw_rubberband` (optional)
draw the zoom to rect "rubberband" rectangle
:meth:`press` (optional)
whenever a mouse button is pressed, you'll be notified with
the event
:meth:`release` (optional)
whenever a mouse button is released, you'll be notified with
the event
:meth:`dynamic_update` (optional)
dynamically update the window while navigating
:meth:`set_message` (optional)
display message
:meth:`set_history_buttons` (optional)
you can change the history back / forward buttons to
indicate disabled / enabled state.
That's it, we'll do the rest!
"""
def __init__(self, canvas):
self.canvas = canvas
canvas.toolbar = self
# a dict from axes index to a list of view limits
self._views = cbook.Stack()
self._positions = cbook.Stack() # stack of subplot positions
self._xypress = None # the location and axis info at the time of the press
self._idPress = None
self._idRelease = None
self._active = None
self._lastCursor = None
self._init_toolbar()
self._idDrag=self.canvas.mpl_connect('motion_notify_event', self.mouse_move)
self._button_pressed = None # determined by the button pressed at start
self.mode = '' # a mode string for the status bar
self.set_history_buttons()
def set_message(self, s):
'display a message on toolbar or in status bar'
pass
def back(self, *args):
'move back up the view lim stack'
self._views.back()
self._positions.back()
self.set_history_buttons()
self._update_view()
def dynamic_update(self):
pass
def draw_rubberband(self, event, x0, y0, x1, y1):
'draw a rectangle rubberband to indicate zoom limits'
pass
def forward(self, *args):
'move forward in the view lim stack'
self._views.forward()
self._positions.forward()
self.set_history_buttons()
self._update_view()
def home(self, *args):
'restore the original view'
self._views.home()
self._positions.home()
self.set_history_buttons()
self._update_view()
def _init_toolbar(self):
"""
This is where you actually build the GUI widgets (called by
__init__). The icons ``home.xpm``, ``back.xpm``, ``forward.xpm``,
``hand.xpm``, ``zoom_to_rect.xpm`` and ``filesave.xpm`` are standard
across backends (there are ppm versions in CVS also).
You just need to set the callbacks
home : self.home
back : self.back
forward : self.forward
hand : self.pan
zoom_to_rect : self.zoom
filesave : self.save_figure
You only need to define the last one - the others are in the base
class implementation.
"""
raise NotImplementedError
def mouse_move(self, event):
#print 'mouse_move', event.button
if not event.inaxes or not self._active:
if self._lastCursor != cursors.POINTER:
self.set_cursor(cursors.POINTER)
self._lastCursor = cursors.POINTER
else:
if self._active=='ZOOM':
if self._lastCursor != cursors.SELECT_REGION:
self.set_cursor(cursors.SELECT_REGION)
self._lastCursor = cursors.SELECT_REGION
if self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, lim, trans = self._xypress[0]
self.draw_rubberband(event, x, y, lastx, lasty)
elif (self._active=='PAN' and
self._lastCursor != cursors.MOVE):
self.set_cursor(cursors.MOVE)
self._lastCursor = cursors.MOVE
if event.inaxes and event.inaxes.get_navigate():
try: s = event.inaxes.format_coord(event.xdata, event.ydata)
except ValueError: pass
except OverflowError: pass
else:
if len(self.mode):
self.set_message('%s : %s' % (self.mode, s))
else:
self.set_message(s)
else: self.set_message(self.mode)
def pan(self,*args):
'Activate the pan/zoom tool. pan with left button, zoom with right'
# set the pointer icon and button press funcs to the
# appropriate callbacks
if self._active == 'PAN':
self._active = None
else:
self._active = 'PAN'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect(
'button_press_event', self.press_pan)
self._idRelease = self.canvas.mpl_connect(
'button_release_event', self.release_pan)
self.mode = 'pan/zoom mode'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def press(self, event):
'this will be called whenver a mouse button is pressed'
pass
def press_pan(self, event):
'the press mouse button in pan/zoom mode callback'
if event.button == 1:
self._button_pressed=1
elif event.button == 3:
self._button_pressed=3
else:
self._button_pressed=None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty(): self.push_current()
self._xypress=[]
for i, a in enumerate(self.canvas.figure.get_axes()):
if x is not None and y is not None and a.in_axes(event) and a.get_navigate():
a.start_pan(x, y, event.button)
self._xypress.append((a, i))
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag=self.canvas.mpl_connect('motion_notify_event', self.drag_pan)
self.press(event)
def press_zoom(self, event):
'the press mouse button in zoom to rect mode callback'
if event.button == 1:
self._button_pressed=1
elif event.button == 3:
self._button_pressed=3
else:
self._button_pressed=None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty(): self.push_current()
self._xypress=[]
for i, a in enumerate(self.canvas.figure.get_axes()):
if x is not None and y is not None and a.in_axes(event) \
and a.get_navigate() and a.can_zoom():
self._xypress.append(( x, y, a, i, a.viewLim.frozen(), a.transData.frozen()))
self.press(event)
def push_current(self):
'push the current view limits and position onto the stack'
lims = []; pos = []
for a in self.canvas.figure.get_axes():
xmin, xmax = a.get_xlim()
ymin, ymax = a.get_ylim()
lims.append( (xmin, xmax, ymin, ymax) )
# Store both the original and modified positions
pos.append( (
a.get_position(True).frozen(),
a.get_position().frozen() ) )
self._views.push(lims)
self._positions.push(pos)
self.set_history_buttons()
def release(self, event):
'this will be called whenever mouse button is released'
pass
def release_pan(self, event):
'the release mouse button callback in pan/zoom mode'
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag=self.canvas.mpl_connect('motion_notify_event', self.mouse_move)
for a, ind in self._xypress:
a.end_pan()
if not self._xypress: return
self._xypress = []
self._button_pressed=None
self.push_current()
self.release(event)
self.draw()
def drag_pan(self, event):
'the drag callback in pan/zoom mode'
for a, ind in self._xypress:
#safer to use the recorded button at the press than current button:
#multiple button can get pressed during motion...
a.drag_pan(self._button_pressed, event.key, event.x, event.y)
self.dynamic_update()
def release_zoom(self, event):
'the release mouse button callback in zoom to rect mode'
if not self._xypress: return
last_a = []
for cur_xypress in self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, lim, trans = cur_xypress
# ignore singular clicks - 5 pixels is a threshold
if abs(x-lastx)<5 or abs(y-lasty)<5:
self._xypress = None
self.release(event)
self.draw()
return
x0, y0, x1, y1 = lim.extents
# zoom to rect
inverse = a.transData.inverted()
lastx, lasty = inverse.transform_point( (lastx, lasty) )
x, y = inverse.transform_point( (x, y) )
Xmin,Xmax=a.get_xlim()
Ymin,Ymax=a.get_ylim()
# detect twinx,y axes and avoid double zooming
twinx, twiny = False, False
if last_a:
for la in last_a:
if a.get_shared_x_axes().joined(a,la): twinx=True
if a.get_shared_y_axes().joined(a,la): twiny=True
last_a.append(a)
if twinx:
x0, x1 = Xmin, Xmax
else:
if Xmin < Xmax:
if x<lastx: x0, x1 = x, lastx
else: x0, x1 = lastx, x
if x0 < Xmin: x0=Xmin
if x1 > Xmax: x1=Xmax
else:
if x>lastx: x0, x1 = x, lastx
else: x0, x1 = lastx, x
if x0 > Xmin: x0=Xmin
if x1 < Xmax: x1=Xmax
if twiny:
y0, y1 = Ymin, Ymax
else:
if Ymin < Ymax:
if y<lasty: y0, y1 = y, lasty
else: y0, y1 = lasty, y
if y0 < Ymin: y0=Ymin
if y1 > Ymax: y1=Ymax
else:
if y>lasty: y0, y1 = y, lasty
else: y0, y1 = lasty, y
if y0 > Ymin: y0=Ymin
if y1 < Ymax: y1=Ymax
if self._button_pressed == 1:
a.set_xlim((x0, x1))
a.set_ylim((y0, y1))
elif self._button_pressed == 3:
if a.get_xscale()=='log':
alpha=np.log(Xmax/Xmin)/np.log(x1/x0)
rx1=pow(Xmin/x0,alpha)*Xmin
rx2=pow(Xmax/x0,alpha)*Xmin
else:
alpha=(Xmax-Xmin)/(x1-x0)
rx1=alpha*(Xmin-x0)+Xmin
rx2=alpha*(Xmax-x0)+Xmin
if a.get_yscale()=='log':
alpha=np.log(Ymax/Ymin)/np.log(y1/y0)
ry1=pow(Ymin/y0,alpha)*Ymin
ry2=pow(Ymax/y0,alpha)*Ymin
else:
alpha=(Ymax-Ymin)/(y1-y0)
ry1=alpha*(Ymin-y0)+Ymin
ry2=alpha*(Ymax-y0)+Ymin
a.set_xlim((rx1, rx2))
a.set_ylim((ry1, ry2))
self.draw()
self._xypress = None
self._button_pressed = None
self.push_current()
self.release(event)
def draw(self):
'redraw the canvases, update the locators'
for a in self.canvas.figure.get_axes():
xaxis = getattr(a, 'xaxis', None)
yaxis = getattr(a, 'yaxis', None)
locators = []
if xaxis is not None:
locators.append(xaxis.get_major_locator())
locators.append(xaxis.get_minor_locator())
if yaxis is not None:
locators.append(yaxis.get_major_locator())
locators.append(yaxis.get_minor_locator())
for loc in locators:
loc.refresh()
self.canvas.draw()
def _update_view(self):
'''update the viewlim and position from the view and
position stack for each axes
'''
lims = self._views()
if lims is None: return
pos = self._positions()
if pos is None: return
for i, a in enumerate(self.canvas.figure.get_axes()):
xmin, xmax, ymin, ymax = lims[i]
a.set_xlim((xmin, xmax))
a.set_ylim((ymin, ymax))
# Restore both the original and modified positions
a.set_position( pos[i][0], 'original' )
a.set_position( pos[i][1], 'active' )
self.draw()
def save_figure(self, *args):
'save the current figure'
raise NotImplementedError
def set_cursor(self, cursor):
"""
Set the current cursor to one of the :class:`Cursors`
enums values
"""
pass
def update(self):
'reset the axes stack'
self._views.clear()
self._positions.clear()
self.set_history_buttons()
def zoom(self, *args):
'activate zoom to rect mode'
if self._active == 'ZOOM':
self._active = None
else:
self._active = 'ZOOM'
if self._idPress is not None:
self._idPress=self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease=self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect('button_press_event', self.press_zoom)
self._idRelease = self.canvas.mpl_connect('button_release_event', self.release_zoom)
self.mode = 'Zoom to rect mode'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def set_history_buttons(self):
'enable or disable back/forward button'
pass
| tkaitchuck/nupic | external/linux64/lib/python2.6/site-packages/matplotlib/backend_bases.py | Python | gpl-3.0 | 69,740 | 0.003656 |
# -*- coding: utf-8 -*-
from django import forms
from django.conf import settings
from django.contrib.auth.models import User
from odf.odf2xhtml import ODF2XHTML, load
from tcms.core.contrib.xml2dict.xml2dict import XML2Dict
from tcms.core.forms.fields import UserField, StripURLField
from tinymce.widgets import TinyMCE
from tcms.management.models import Component, Product, Version, TCMSEnvGroup, \
Priority, TestTag
from tcms.testcases.models import TestCaseStatus
from models import TestPlan, TestPlanType
# ===========Plan Fields==============
class PlanFileField(forms.FileField):
default_error_messages = {
'invalid_file_type': 'The file you uploaded is not a correct, '
'Html/Plain text/ODT file.',
'unexcept_odf_error': 'Unable to analyse the file or the file you '
'upload is not Open Document.',
}
def clean(self, data, initial=None):
f = super(PlanFileField, self).clean(data, initial)
if f is None:
return None
elif not data and initial:
return initial
# Detemine the file type, raise error if the file type is not correct
if not (data.content_type == 'text/html'
or data.content_type == 'text/plain'
or data.content_type == 'application/octet-stream'
or data.content_type ==
'application/vnd.oasis.opendocument.text'):
raise forms.ValidationError(
self.error_messages['invalid_file_type'])
# Process the ODF file
if data.content_type == 'application/octet-stream' \
or data.content_type == \
'application/vnd.oasis.opendocument.text':
generatecss = True
embedable = True
odhandler = ODF2XHTML(generatecss, embedable)
try:
doc = load(data)
plan_text = odhandler.odf2xhtml(doc)
except Exception:
raise forms.ValidationError(
self.error_messages['unexcept_odf_error'])
return plan_text
# We need to get a file object. We might have a path or we might
# have to read the data into memory.
if hasattr(data, 'temporary_file_path'):
plan_text = data.temporary_file_path()
elif hasattr(data, 'read'):
plan_text = data.read()
else:
plan_text = data['content']
return plan_text
class CasePlanXMLField(forms.FileField):
"""
Custom field for the XML file.
Use xml2dict to anasisly the file upload.
Based on ImageField built-in Django source code.
"""
default_error_messages = {
'invalid_file': 'The file you uploaded is not a correct XML file.',
'interpret_error': 'The file you uploaded unable to interpret.',
'root_element_is_needed': 'Root element named testopia is need, '
'please use the xml exported by TCMS or '
'testopia.',
'test_case_element_is_needed': 'At least one test case is required '
'in the XML file, plese export the '
'plan with cases.',
'xml_version_is_incorrect': 'XML version is incorrect, please use '
'the xml exported by TCMS or testopia 3.',
'element_could_not_found': 'The element \'%s\' value \'%s\' could '
'not found in database.',
'element_is_required': 'The element \'%s\' is required in XML.'
}
xml_data = ''
def process_case(self, case):
# Check author
element = 'author'
if case.get(element, {}).get('value'):
try:
author = User.objects.get(email=case[element]['value'])
author_id = author.id
except User.DoesNotExist:
raise forms.ValidationError(
self.error_messages['element_could_not_found'] % (
element, case[element]['value']))
else:
raise forms.ValidationError(
self.error_messages['element_is_required'] % element)
# Check default tester
element = 'defaulttester'
if case.get(element, {}).get('value'):
try:
default_tester = User.objects.get(email=case[element]['value'])
default_tester_id = default_tester.id
except User.DoesNotExist:
raise forms.ValidationError(
self.error_messages['element_could_not_found'] % (
element, case[element]['value']))
else:
default_tester_id = None
# Check priority
element = 'priority'
if case.get(element, {}).get('value'):
try:
priority = Priority.objects.get(value=case[element]['value'])
priority_id = priority.id
except Priority.DoesNotExist:
raise forms.ValidationError(
self.error_messages['element_could_not_found'] % (
element, case[element]['value']))
else:
raise forms.ValidationError(
self.error_messages['element_is_required'] % element)
# Check automated status
element = 'automated'
if case.get(element, {}).get('value'):
is_automated = case[element][
'value'] == 'Automatic' and True or False
else:
is_automated = False
# Check status
element = 'status'
if case.get(element, {}).get('value'):
try:
case_status = TestCaseStatus.objects.get(
name=case[element]['value'])
case_status_id = case_status.id
except TestCaseStatus.DoesNotExist:
raise forms.ValidationError(
self.error_messages['element_could_not_found'] % (
element, case[element]['value']))
else:
raise forms.ValidationError(
self.error_messages['element_is_required'] % element)
# Check category
# *** Ugly code here ***
# There is a bug in the XML file, the category is related to product.
# But unfortunate it did not defined product in the XML file.
# So we have to define the category_name at the moment then get the
# product from the plan.
# If we did not found the category of the product we will create one.
element = 'categoryname'
if case.get(element, {}).get('value'):
category_name = case[element]['value']
else:
raise forms.ValidationError(
self.error_messages['element_is_required'] % element)
# Check or create the tag
element = 'tag'
if case.get(element, {}):
tags = []
if isinstance(case[element], dict):
tag, create = TestTag.objects.get_or_create(
name=case[element]['value'])
tags.append(tag)
if isinstance(case[element], list):
for tag_name in case[element]:
tag, create = TestTag.objects.get_or_create(
name=tag_name['value'])
tags.append(tag)
else:
tags = None
new_case = {
'summary': case.get('summary', {}).get('value', ''),
'author_id': author_id,
'author': author,
'default_tester_id': default_tester_id,
'priority_id': priority_id,
'is_automated': is_automated,
'case_status_id': case_status_id,
'category_name': category_name,
'notes': case.get('notes', {}).get('value', ''),
'action': case.get('action', {}).get('value', ''),
'effect': case.get('expectedresults', {}).get('value', ''),
'setup': case.get('setup', {}).get('value', ''),
'breakdown': case.get('breakdown', {}).get('value', ''),
'tags': tags,
}
return new_case
def clean(self, data, initial=None):
"""
Check the file content type is XML or not
"""
f = super(CasePlanXMLField, self).clean(data, initial)
if f is None:
return None
elif not data and initial:
return initial
if not data.content_type == 'text/xml':
raise forms.ValidationError(self.error_messages['invalid_file'])
# We need to get a file object for PIL. We might have a path or we
# might have to read the data into memory.
if hasattr(data, 'temporary_file_path'):
xml_file = data.temporary_file_path()
else:
if hasattr(data, 'read'):
xml_file = data.read()
else:
xml_file = data['content']
# Replace line breaks for XML interpret
xml_file = xml_file.replace('\n', '')
xml_file = xml_file.replace('&testopia_', '&')
# Insert clean code here
try:
xml = XML2Dict()
self.xml_data = xml.fromstring(xml_file)
if not self.xml_data.get('testopia'):
raise forms.ValidationError(
self.error_messages['root_element_is_needed'])
if not self.xml_data['testopia'].get(
'version') != settings.TESTOPIA_XML_VERSION:
raise forms.ValidationError(
self.error_messages['xml_version_is_incorrect'])
if not self.xml_data['testopia'].get('testcase'):
raise forms.ValidationError(
self.error_messages['test_case_element_is_needed'])
new_case_from_xml = []
if isinstance(self.xml_data['testopia']['testcase'], list):
for case in self.xml_data['testopia']['testcase']:
new_case_from_xml.append(self.process_case(case))
elif isinstance(self.xml_data['testopia']['testcase'], dict):
new_case_from_xml.append(
self.process_case(self.xml_data['testopia']['testcase']))
else:
raise forms.ValidationError(
self.error_messages['test_case_element_is_needed'])
except Exception, error:
raise forms.ValidationError('%s: %s' % (
self.error_messages['interpret_error'],
error
))
except SyntaxError, error:
raise forms.ValidationError('%s: %s' % (
self.error_messages['interpret_error'],
error
))
if hasattr(f, 'seek') and callable(f.seek):
f.seek(0)
return new_case_from_xml
# =========== New Plan ModelForm ==============
class PlanModelForm(forms.ModelForm):
class Meta:
model = TestPlan
exclude = ('author', )
# =========== Forms for create/update ==============
class BasePlanForm(forms.Form):
name = forms.CharField(label="Plan name")
type = forms.ModelChoiceField(
label="Type",
queryset=TestPlanType.objects.all(),
empty_label=None,
)
text = forms.CharField(
label="Plan Document",
widget=TinyMCE,
required=False
)
product = forms.ModelChoiceField(
label="Product",
queryset=Product.objects.all(),
empty_label=None,
)
product_version = forms.ModelChoiceField(
label="Product Version",
queryset=Version.objects.none(),
empty_label=None,
)
extra_link = StripURLField(
label='Extra link',
max_length=1024,
required=False
)
env_group = forms.ModelChoiceField(
label="Environment Group",
queryset=TCMSEnvGroup.get_active().all(),
required=False
)
parent = forms.IntegerField(required=False)
owner = forms.CharField(
label="Plan Document",
required=False
)
def clean_parent(self):
try:
p = self.cleaned_data['parent']
if p:
return TestPlan.objects.get(pk=p)
except TestPlan.DoesNotExist:
raise forms.ValidationError('The plan does not exist in database.')
def populate(self, product_id):
if product_id:
self.fields['product_version'].queryset = Version.objects.filter(
product__id=product_id)
else:
self.fields['product_version'].queryset = Version.objects.all()
class NewPlanForm(BasePlanForm):
upload_plan_text = PlanFileField(required=False)
tag = forms.CharField(
label="Tag",
required=False
)
# Display radio buttons instead of checkboxes
auto_to_plan_owner = forms.BooleanField(
label=' plan\'s owner',
required=False
)
auto_to_plan_author = forms.BooleanField(
label=' plan\'s author',
required=False
)
auto_to_case_owner = forms.BooleanField(
label=' the author of the case under a plan',
required=False
)
auto_to_case_default_tester = forms.BooleanField(
label=' the default tester of the case under a plan',
required=False
)
notify_on_plan_update = forms.BooleanField(
label=' when plan is updated',
required=False
)
notify_on_case_update = forms.BooleanField(
label=' when cases of a plan are updated',
required=False
)
notify_on_plan_delete = forms.BooleanField(
label=' when plan is deleted',
required=False
)
def clean_tag(self):
return TestTag.objects.filter(
name__in=TestTag.string_to_list(self.cleaned_data['tag'])
)
def clean(self):
cleaned_data = self.cleaned_data
if cleaned_data.get('upload_plan_text'):
cleaned_data['text'] = cleaned_data['upload_plan_text']
return cleaned_data
class EditPlanForm(NewPlanForm):
product_version = forms.ModelChoiceField(
label="Product Version",
queryset=Version.objects.all(),
empty_label=None,
)
is_active = forms.BooleanField(label="Active", required=False)
owner = UserField(
label=' plan\'s owner',
required=False
)
author = UserField(
label=' plan\'s author',
required=False
)
# =========== Forms for search/filter ==============
class SearchPlanForm(forms.Form):
pk = forms.IntegerField(required=False)
pk__in = forms.CharField(required=False)
parent__pk = forms.IntegerField(required=False)
search = forms.CharField(label="Search", required=False)
plan_id = forms.IntegerField(label="Plan ID", required=False)
name__icontains = forms.CharField(label="Plan name", required=False)
product = forms.ModelChoiceField(
label="Product",
queryset=Product.objects.all(),
required=False
)
product_version = forms.ModelChoiceField(
label="Product Version",
queryset=Version.objects.none(),
required=False
)
type = forms.ModelChoiceField(
label="Type",
queryset=TestPlanType.objects.all(),
required=False,
)
env_group = forms.ModelChoiceField(
label="Environment Group",
queryset=TCMSEnvGroup.get_active().all(),
required=False
)
author__username__startswith = forms.CharField(required=False)
author__email__startswith = forms.CharField(required=False)
owner__username__startswith = forms.CharField(required=False)
case__default_tester__username__startswith = forms.CharField(
required=False)
tag__name__in = forms.CharField(required=False)
is_active = forms.BooleanField(required=False)
create_date__gte = forms.DateTimeField(
label='Create after', required=False,
widget=forms.DateInput(attrs={
'class': 'vDateField',
})
)
create_date__lte = forms.DateTimeField(
label='Create before', required=False,
widget=forms.DateInput(attrs={
'class': 'vDateField',
})
)
def clean_pk__in(self):
from tcms.core.utils import string_to_list
results = string_to_list(self.cleaned_data['pk__in'])
try:
return [int(r) for r in results]
except Exception, e:
raise forms.ValidationError(str(e))
def clean_tag__name__in(self):
return TestTag.string_to_list(self.cleaned_data['tag__name__in'])
def populate(self, product_id=None):
if product_id:
self.fields['product_version'].queryset = Version.objects.filter(
product__id=product_id)
else:
self.fields['product_version'].queryset = Version.objects.all()
class ClonePlanForm(BasePlanForm):
name = forms.CharField(label="Plan name", required=False)
type = forms.ModelChoiceField(
label="Type",
queryset=TestPlanType.objects.all(),
required=False,
)
keep_orignal_author = forms.BooleanField(
label='Keep orignal author',
help_text='Unchecking will make me the author of the copied plan',
required=False,
)
copy_texts = forms.BooleanField(
label='Copy Plan Document',
help_text='Check it to copy texts of the plan.',
required=False,
)
copy_attachements = forms.BooleanField(
label='Copy Plan Attachments',
help_text='Check it to copy attachments of the plan.',
required=False
)
copy_environment_group = forms.BooleanField(
label='Copy environment group',
help_text='Check it on to copy environment group of the plan.',
required=False
)
link_testcases = forms.BooleanField(
label='All Test Cases',
required=False
)
copy_testcases = forms.BooleanField(
label='Create a copy',
help_text='Unchecking will create a link to selected plans',
required=False
)
maintain_case_orignal_author = forms.BooleanField(
label='Maintain original authors',
help_text='Unchecking will make me the author of the copied cases',
required=False
)
keep_case_default_tester = forms.BooleanField(
label='Keep Default Tester',
help_text='Unchecking will make me the default tester of copied cases',
required=False
)
set_parent = forms.BooleanField(
label='Set source plan as parent',
help_text='Check it to set the source plan as parent of new cloned '
'plan.',
required=False
)
# =========== Forms for XML-RPC functions ==============
class XMLRPCNewPlanForm(EditPlanForm):
text = forms.CharField()
class XMLRPCEditPlanForm(EditPlanForm):
name = forms.CharField(
label="Plan name", required=False
)
type = forms.ModelChoiceField(
label="Type",
queryset=TestPlanType.objects.all(),
required=False
)
product = forms.ModelChoiceField(
label="Product",
queryset=Product.objects.all(),
required=False,
)
product_version = forms.ModelChoiceField(
label="Product Version",
queryset=Version.objects.none(),
required=False
)
# =========== Mist forms ==============
class ImportCasesViaXMLForm(forms.Form):
a = forms.CharField(widget=forms.HiddenInput)
xml_file = CasePlanXMLField(
label='Upload XML file:',
help_text='XML file is export with TCMS or Testopia.'
)
class PlanComponentForm(forms.Form):
plan = forms.ModelMultipleChoiceField(
label='',
queryset=TestPlan.objects.none(),
widget=forms.Select(attrs={'style': 'display:none;'}),
)
component = forms.ModelMultipleChoiceField(
queryset=Component.objects.none(),
required=False,
)
def __init__(self, tps, **kwargs):
tp_ids = tps.values_list('pk', flat=True)
product_ids = list(set(tps.values_list('product_id', flat=True)))
if kwargs.get('initial'):
kwargs['initial']['plan'] = tp_ids
super(PlanComponentForm, self).__init__(**kwargs)
self.fields['plan'].queryset = tps
self.fields['component'].queryset = Component.objects.filter(
product__pk__in=product_ids
)
| ShaolongHu/Nitrate | tcms/testplans/forms.py | Python | gpl-2.0 | 20,537 | 0 |
import logging
import requests
import tarfile
from lxml import etree
from . import discover
from . import template
log = logging.getLogger(__name__)
URLPREFIX = 'https://cloud-images.ubuntu.com/precise/current/'
PREFIXES = dict(
server='{release}-server-cloudimg-amd64.',
desktop='{release}-desktop-cloudimg-amd64.',
)
SUFFIX = '.img'
def list_cloud_images(pool, release, flavor):
"""
List all Ubuntu 12.04 Cloud image in the libvirt pool.
Return the keys.
"""
PREFIX = PREFIXES[flavor].format(release=release)
for name in pool.listVolumes():
log.debug('Considering image: %s', name)
if not name.startswith(PREFIX):
continue
if not name.endswith(SUFFIX):
continue
if len(name) <= len(PREFIX) + len(SUFFIX):
# no serial number in the middle
continue
# found one!
log.debug('Saw image: %s', name)
yield name
def find_cloud_image(pool, release, flavor):
"""
Find an Ubuntu 12.04 Cloud image in the libvirt pool.
Return the name.
"""
names = list_cloud_images(pool, release=release, flavor=flavor)
# converting into a list because max([]) raises ValueError, and we
# really don't want to confuse that with exceptions from inside
# the generator
names = list(names)
if not names:
log.debug('No cloud images found.')
return None
# the build serial is zero-padded, hence alphabetically sortable;
# max is the latest image
return max(names)
def upload_volume(vol, fp):
"""
Upload a volume into a libvirt pool.
"""
stream = vol.connect().newStream(flags=0)
vol.upload(stream=stream, offset=0, length=0, flags=0)
def handler(stream, nbytes, _):
data = fp.read(nbytes)
return data
stream.sendAll(handler, None)
stream.finish()
def make_volume(
pool,
fp,
release,
flavor,
serial,
suffix,
):
# volumes have no atomic completion marker; this will forever be
# racy!
name = '{prefix}{serial}{suffix}'.format(
prefix=PREFIXES[flavor].format(release=release),
serial=serial,
suffix=suffix,
)
log.debug('Creating libvirt volume %s ...', name)
volxml = template.volume(
name=name,
# TODO we really should feed in a capacity, but we don't know
# what it should be.. libvirt pool refresh figures it out, but
# that's probably expensive
# capacity=2*1024*1024,
)
# TODO this fails if the image exists already, which means
# there's no clean way to continue after errors, currently
vol = pool.createXML(etree.tostring(volxml), flags=0)
upload_volume(
vol=vol,
fp=fp,
)
return vol
def ensure_cloud_image(conn, release, flavor):
"""
Ensure that the Ubuntu 12.04 Cloud image is in the libvirt pool.
Returns the volume.
"""
log.debug('Opening libvirt pool...')
pool = conn.storagePoolLookupByName('default')
log.debug('Listing cloud image in libvirt...')
name = find_cloud_image(pool=pool, release=release, flavor=flavor)
if name is not None:
# all done
log.debug('Already have cloud image: %s', name)
vol = pool.storageVolLookupByName(name)
return vol
log.debug('Discovering cloud images...')
image = discover.get(release=release, flavor=flavor)
log.debug('Will fetch serial number: %s', image['serial'])
url = image['url']
log.info('Downloading image: %s', url)
r = requests.get(url, stream=True)
t = tarfile.open(fileobj=r.raw, mode='r|*', bufsize=1024*1024)
# reference to the main volume of this vm template
vol = None
for ti in t:
if not ti.isfile():
continue
if ti.name.startswith("README"):
continue
if ti.name.endswith("-root.tar.gz"):
continue
if ti.name.endswith("-loader"):
continue
if "-vmlinuz-" in ti.name:
continue
if "-initrd-" in ti.name:
continue
if ti.name.endswith("-root.tar.gz"):
continue
f = t.extractfile(ti)
if ti.name.endswith("-disk1.img"):
vol = make_volume(
pool=pool,
fp=f,
release=release,
flavor=flavor,
serial=image['serial'],
suffix="-disk1.img",
)
elif ti.name.endswith(".img"):
vol = make_volume(
pool=pool,
fp=f,
release=release,
flavor=flavor,
serial=image['serial'],
suffix=".img",
)
elif ti.name.endswith("-floppy"):
make_volume(
pool=pool,
fp=f,
release=release,
flavor=flavor,
serial=image['serial'],
suffix="-floppy.img",
)
else:
log.warn("Unknown file in cloud-image tarball: %s", ti.name)
continue
# TODO only here to autodetect capacity
pool.refresh(flags=0)
return vol
| tv42/downburst | downburst/image.py | Python | mit | 5,246 | 0.000191 |
# encoding: utf-8
from yast import import_module
import_module('UI')
from yast import *
class TableSortingClient:
def main(self):
UI.OpenDialog(
VBox(
Label("Library"),
MinSize(
30,
10,
Table(
Header("Book Title", "Shelf"),
[
Item(Id(1), "3 Trees", " -6"),
Item(Id(2), "missing", None),
Item(Id(3), "just another book", " 8a"),
Item(Id(4), "Here comes Fred", 12),
Item(Id(5), "Zoo", 25),
Item(Id(6), "Lions", "balbla"),
Item(Id(7), "Elephants ", "8b"),
Item(Id(8), "wild animals", "a7"),
Item(Id(9), "Weather forecast", "15yxc"),
Item(Id(10), "my first Book", 1),
Item(Id(11), "this is yours", 95),
Item(Id(12), "Terra X", " 34 sdf"),
Item(Id(13), "Programming", "dfsdf34"),
Item(Id(14), "More programming", 0)
]
)
),
PushButton("&OK")
)
)
UI.UserInput()
UI.CloseDialog()
TableSortingClient().main()
| yast/yast-python-bindings | examples/Table-sorting.py | Python | gpl-2.0 | 1,203 | 0.004988 |
r"""Fixer for unicode.
* Changes unicode to str and unichr to chr.
* If "...\u..." is not unicode literal change it into "...\\u...".
* Change u"..." into "...".
"""
from ..pgen2 import token
from .. import fixer_base
_mapping = {"unichr" : "chr", "unicode" : "str"}
class FixUnicode(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "STRING | 'unicode' | 'unichr'"
def start_tree(self, tree, filename):
super(FixUnicode, self).start_tree(tree, filename)
self.unicode_literals = 'unicode_literals' in tree.future_features
def transform(self, node, results):
if node.type == token.NAME:
new = node.clone()
new.value = _mapping[node.value]
return new
elif node.type == token.STRING:
val = node.value
if not self.unicode_literals and val[0] in '\'"' and '\\' in val:
val = r'\\'.join([
v.replace('\\u', r'\\u').replace('\\U', r'\\U')
for v in val.split(r'\\')
])
if val[0] in 'uU':
val = val[1:]
if val == node.value:
return node
new = node.clone()
new.value = val
return new
| Microvellum/Fluid-Designer | win64-vc/2.78/python/lib/lib2to3/fixes/fix_unicode.py | Python | gpl-3.0 | 1,256 | 0.002389 |
from __future__ import unicode_literals
from django.apps import AppConfig
class ProvVoConfig(AppConfig):
name = 'prov_vo'
| kristinriebe/django-prov_vo | prov_vo/apps.py | Python | apache-2.0 | 129 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import markdown
from django.template.loader import render_to_string
from wiki.plugins.images import models, settings
IMAGE_RE = re.compile(
r'.*(\[image\:(?P<id>[0-9]+)(\s+align\:(?P<align>right|left))?(\s+size\:(?P<size>default|small|medium|large|orig))?\s*\]).*',
re.IGNORECASE)
class ImageExtension(markdown.Extension):
""" Images plugin markdown extension for django-wiki. """
def extendMarkdown(self, md, md_globals):
""" Insert ImagePreprocessor before ReferencePreprocessor. """
md.preprocessors.add('dw-images', ImagePreprocessor(md), '>html_block')
md.postprocessors.add('dw-images-cleanup', ImagePostprocessor(md), '>raw_html')
class ImagePreprocessor(markdown.preprocessors.Preprocessor):
"""
django-wiki image preprocessor
Parse text for [image:id align:left|right|center] references.
For instance:
[image:id align:left|right|center]
This is the caption text maybe with [a link](...)
So: Remember that the caption text is fully valid markdown!
"""
def run(self, lines): # NOQA
new_text = []
previous_line = ""
line_index = None
previous_line_was_image = False
image = None
image_id = None
alignment = None
size = settings.THUMBNAIL_SIZES['default']
caption_lines = []
for line in lines:
m = IMAGE_RE.match(line)
if m:
previous_line_was_image = True
image_id = m.group('id').strip()
alignment = m.group('align')
if m.group('size'):
size = settings.THUMBNAIL_SIZES[m.group('size')]
try:
image = models.Image.objects.get(
article=self.markdown.article,
id=image_id,
current_revision__deleted=False)
except models.Image.DoesNotExist:
pass
line_index = line.find(m.group(1))
line = line.replace(m.group(1), "")
previous_line = line
caption_lines = []
elif previous_line_was_image:
if line.startswith(" "):
caption_lines.append(line[4:])
line = None
else:
caption_placeholder = "{{{IMAGECAPTION}}}"
width = size.split("x")[0] if size else None
html = render_to_string(
"wiki/plugins/images/render.html",
context={
'image': image,
'caption': caption_placeholder,
'align': alignment,
'size': size,
'width': width
})
html_before, html_after = html.split(caption_placeholder)
placeholder_before = self.markdown.htmlStash.store(
html_before,
safe=True)
placeholder_after = self.markdown.htmlStash.store(
html_after,
safe=True)
new_line = placeholder_before + "\n".join(
caption_lines) + placeholder_after + "\n"
previous_line_was_image = False
if previous_line is not "":
if previous_line[line_index:] is not "":
new_line = new_line[0:-1]
new_text[-1] = (previous_line[0:line_index] +
new_line +
previous_line[line_index:] +
"\n" +
line)
line = None
else:
line = new_line + line
if line is not None:
new_text.append(line)
return new_text
class ImagePostprocessor(markdown.postprocessors.Postprocessor):
def run(self, text):
"""
This cleans up after Markdown's well-intended placing of image tags
inside <p> elements. The problem is that Markdown should put
<p> tags around images as they are inline elements. However, because
we wrap them in <figure>, we don't actually want it and have to
remove it again after.
"""
text = text.replace("<p><figure", "<figure")
text = text.replace("</figure>\n</p>", "</figure>")
return text
| cXhristian/django-wiki | src/wiki/plugins/images/markdown_extensions.py | Python | gpl-3.0 | 4,717 | 0.000424 |
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fetches all advertisers in a DFA account.
This example displays advertiser name, ID and spotlight configuration ID for
the given search criteria. Results are limited to first 10 records.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: advertiser.getAdvertisers
"""
__author__ = 'Joseph DiLallo'
import googleads.dfa
def main(client):
# Initialize appropriate service.
advertiser_service = client.GetService(
'advertiser', 'v1.20', 'https://advertisersapitest.doubleclick.net')
# Create advertiser search criteria structure.
page_number = 1
advertiser_search_criteria = {
'pageSize': '100',
'pageNumber': str(page_number)
}
while True:
# Get advertiser record set.
results = advertiser_service.getAdvertisers(advertiser_search_criteria)
# Display advertiser names, IDs and spotlight configuration IDs.
if results['records']:
for advertiser in results['records']:
print ('Advertiser with name \'%s\', ID \'%s\', and spotlight '
'configuration id \'%s\' was found.'
% (advertiser['name'], advertiser['id'], advertiser['spotId']))
page_number += 1
advertiser_search_criteria['pageNumber'] = str(page_number)
if page_number > int(results['totalNumberOfPages']):
break
print 'Number of results found: %s' % results['totalNumberOfRecords']
if __name__ == '__main__':
# Initialize client object.
dfa_client = googleads.dfa.DfaClient.LoadFromStorage()
main(dfa_client)
| coxmediagroup/googleads-python-lib | examples/dfa/v1_20/get_advertisers.py | Python | apache-2.0 | 2,314 | 0.005618 |
# coding=utf-8
# Copyright 2022 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for (Fashion) MNIST."""
import numpy as np
import scipy
def one_hot(a, num_classes):
return np.squeeze(np.eye(num_classes)[a.reshape(-1)])
def brier_score(y, p):
"""Compute the Brier score.
Brier Score: see
https://www.stat.washington.edu/raftery/Research/PDF/Gneiting2007jasa.pdf,
page 363, Example 1
Args:
y: one-hot encoding of the true classes, size (?, num_classes)
p: numpy array, size (?, num_classes)
containing the output predicted probabilities
Returns:
bs: Brier score.
"""
return np.mean(np.power(p - y, 2))
def calibration(y, p_mean, num_bins=10):
"""Compute the calibration.
References:
https://arxiv.org/abs/1706.04599
https://arxiv.org/abs/1807.00263
Args:
y: one-hot encoding of the true classes, size (?, num_classes)
p_mean: numpy array, size (?, num_classes)
containing the mean output predicted probabilities
num_bins: number of bins
Returns:
ece: Expected Calibration Error
mce: Maximum Calibration Error
"""
# Compute for every test sample x, the predicted class.
class_pred = np.argmax(p_mean, axis=1)
# and the confidence (probability) associated with it.
conf = np.max(p_mean, axis=1)
# Convert y from one-hot encoding to the number of the class
y = np.argmax(y, axis=1)
# Storage
acc_tab = np.zeros(num_bins) # empirical (true) confidence
mean_conf = np.zeros(num_bins) # predicted confidence
nb_items_bin = np.zeros(num_bins) # number of items in the bins
tau_tab = np.linspace(0, 1, num_bins+1) # confidence bins
for i in np.arange(num_bins): # iterate over the bins
# select the items where the predicted max probability falls in the bin
# [tau_tab[i], tau_tab[i + 1)]
sec = (tau_tab[i + 1] > conf) & (conf >= tau_tab[i])
nb_items_bin[i] = np.sum(sec) # Number of items in the bin
# select the predicted classes, and the true classes
class_pred_sec, y_sec = class_pred[sec], y[sec]
# average of the predicted max probabilities
mean_conf[i] = np.mean(conf[sec]) if nb_items_bin[i] > 0 else np.nan
# compute the empirical confidence
acc_tab[i] = np.mean(
class_pred_sec == y_sec) if nb_items_bin[i] > 0 else np.nan
# Cleaning
mean_conf = mean_conf[nb_items_bin > 0]
acc_tab = acc_tab[nb_items_bin > 0]
nb_items_bin = nb_items_bin[nb_items_bin > 0]
# Expected Calibration Error
ece = np.average(
np.absolute(mean_conf - acc_tab),
weights=nb_items_bin.astype(np.float) / np.sum(nb_items_bin))
# Maximum Calibration Error
mce = np.max(np.absolute(mean_conf - acc_tab))
return ece, mce
def ensemble_metrics(x,
y,
model,
log_likelihood_fn,
n_samples=1,
weight_files=None):
"""Evaluate metrics of an ensemble.
Args:
x: numpy array of inputs
y: numpy array of labels
model: tf.keras.Model.
log_likelihood_fn: keras function of log likelihood. For classification
tasks, log_likelihood_fn(...)[1] should return the logits
n_samples: number of Monte Carlo samples to draw per ensemble member (each
weight file).
weight_files: to draw samples from multiple weight sets, specify a list of
weight files to load. These files must have been generated through
keras's model.save_weights(...).
Returns:
metrics_dict: dictionary containing the metrics
"""
if weight_files is None:
ensemble_logprobs = [log_likelihood_fn([x, y])[0] for _ in range(n_samples)]
metric_values = [model.evaluate(x, y, verbose=0)
for _ in range(n_samples)]
ensemble_logits = [log_likelihood_fn([x, y])[1] for _ in range(n_samples)]
else:
ensemble_logprobs = []
metric_values = []
ensemble_logits = []
for filename in weight_files:
model.load_weights(filename)
ensemble_logprobs.extend([log_likelihood_fn([x, y])[0]
for _ in range(n_samples)])
ensemble_logits.extend([log_likelihood_fn([x, y])[1]
for _ in range(n_samples)])
metric_values.extend([model.evaluate(x, y, verbose=0)
for _ in range(n_samples)])
metric_values = np.mean(np.array(metric_values), axis=0)
results = {}
for m, name in zip(metric_values, model.metrics_names):
results[name] = m
ensemble_logprobs = np.array(ensemble_logprobs)
probabilistic_log_likelihood = np.mean(
scipy.special.logsumexp(
np.sum(ensemble_logprobs, axis=2)
if len(ensemble_logprobs.shape) > 2 else ensemble_logprobs,
b=1. / ensemble_logprobs.shape[0],
axis=0),
axis=0)
results['probabilistic_log_likelihood'] = probabilistic_log_likelihood
ensemble_logits = np.array(ensemble_logits)
probs = np.mean(scipy.special.softmax(ensemble_logits, axis=2), axis=0)
class_pred = np.argmax(probs, axis=1)
probabilistic_accuracy = np.mean(np.equal(y, class_pred))
results['probabilistic_accuracy'] = probabilistic_accuracy
results['ece'], results['mce'] = calibration(
one_hot(y, probs.shape[1]), probs)
results['brier_score'] = brier_score(one_hot(y, probs.shape[1]), probs)
return results
| google/uncertainty-baselines | baselines/mnist/utils.py | Python | apache-2.0 | 5,866 | 0.008012 |
""" rewrite of lambdify - This stuff is not stable at all.
It is for internal use in the new plotting module.
It may (will! see the Q'n'A in the source) be rewritten.
It's completely self contained. Especially it does not use lambdarepr.
It does not aim to replace the current lambdify. Most importantly it will never
ever support anything else than sympy expressions (no Matrices, dictionaries
and so on).
"""
from __future__ import print_function, division
import re
from sympy import Symbol, NumberSymbol, I, zoo, oo
from sympy.core.compatibility import exec_
from sympy.utilities.iterables import numbered_symbols
# We parse the expression string into a tree that identifies functions. Then
# we translate the names of the functions and we translate also some strings
# that are not names of functions (all this according to translation
# dictionaries).
# If the translation goes to another module (like numpy) the
# module is imported and 'func' is translated to 'module.func'.
# If a function can not be translated, the inner nodes of that part of the
# tree are not translated. So if we have Integral(sqrt(x)), sqrt is not
# translated to np.sqrt and the Integral does not crash.
# A namespace for all this is generated by crawling the (func, args) tree of
# the expression. The creation of this namespace involves many ugly
# workarounds.
# The namespace consists of all the names needed for the sympy expression and
# all the name of modules used for translation. Those modules are imported only
# as a name (import numpy as np) in order to keep the namespace small and
# manageable.
# Please, if there is a bug, do not try to fix it here! Rewrite this by using
# the method proposed in the last Q'n'A below. That way the new function will
# work just as well, be just as simple, but it wont need any new workarounds.
# If you insist on fixing it here, look at the workarounds in the function
# sympy_expression_namespace and in lambdify.
# Q: Why are you not using python abstract syntax tree?
# A: Because it is more complicated and not much more powerful in this case.
# Q: What if I have Symbol('sin') or g=Function('f')?
# A: You will break the algorithm. We should use srepr to defend against this?
# The problem with Symbol('sin') is that it will be printed as 'sin'. The
# parser will distinguish it from the function 'sin' because functions are
# detected thanks to the opening parenthesis, but the lambda expression won't
# understand the difference if we have also the sin function.
# The solution (complicated) is to use srepr and maybe ast.
# The problem with the g=Function('f') is that it will be printed as 'f' but in
# the global namespace we have only 'g'. But as the same printer is used in the
# constructor of the namespace there will be no problem.
# Q: What if some of the printers are not printing as expected?
# A: The algorithm wont work. You must use srepr for those cases. But even
# srepr may not print well. All problems with printers should be considered
# bugs.
# Q: What about _imp_ functions?
# A: Those are taken care for by evalf. A special case treatment will work
# faster but it's not worth the code complexity.
# Q: Will ast fix all possible problems?
# A: No. You will always have to use some printer. Even srepr may not work in
# some cases. But if the printer does not work, that should be considered a
# bug.
# Q: Is there same way to fix all possible problems?
# A: Probably by constructing our strings ourself by traversing the (func,
# args) tree and creating the namespace at the same time. That actually sounds
# good.
from sympy.external import import_module
import warnings
#TODO debugging output
class vectorized_lambdify(object):
""" Return a sufficiently smart, vectorized and lambdified function.
Returns only reals.
This function uses experimental_lambdify to created a lambdified
expression ready to be used with numpy. Many of the functions in sympy
are not implemented in numpy so in some cases we resort to python cmath or
even to evalf.
The following translations are tried:
only numpy complex
- on errors raised by sympy trying to work with ndarray:
only python cmath and then vectorize complex128
When using python cmath there is no need for evalf or float/complex
because python cmath calls those.
This function never tries to mix numpy directly with evalf because numpy
does not understand sympy Float. If this is needed one can use the
float_wrap_evalf/complex_wrap_evalf options of experimental_lambdify or
better one can be explicit about the dtypes that numpy works with.
Check numpy bug http://projects.scipy.org/numpy/ticket/1013 to know what
types of errors to expect.
"""
def __init__(self, args, expr):
self.args = args
self.expr = expr
self.lambda_func = experimental_lambdify(args, expr, use_np=True)
self.vector_func = self.lambda_func
self.failure = False
def __call__(self, *args):
np = import_module('numpy')
np_old_err = np.seterr(invalid='raise')
try:
temp_args = (np.array(a, dtype=np.complex) for a in args)
results = self.vector_func(*temp_args)
results = np.ma.masked_where(
np.abs(results.imag) > 1e-7 * np.abs(results),
results.real, copy=False)
except Exception as e:
#DEBUG: print 'Error', type(e), e
if ((isinstance(e, TypeError)
and 'unhashable type: \'numpy.ndarray\'' in str(e))
or
(isinstance(e, ValueError)
and ('Invalid limits given:' in str(e)
or 'negative dimensions are not allowed' in str(e) # XXX
or 'sequence too large; must be smaller than 32' in str(e)))): # XXX
# Almost all functions were translated to numpy, but some were
# left as sympy functions. They received an ndarray as an
# argument and failed.
# sin(ndarray(...)) raises "unhashable type"
# Integral(x, (x, 0, ndarray(...))) raises "Invalid limits"
# other ugly exceptions that are not well understood (marked with XXX)
# TODO: Cleanup the ugly special cases marked with xxx above.
# Solution: use cmath and vectorize the final lambda.
self.lambda_func = experimental_lambdify(
self.args, self.expr, use_python_cmath=True)
self.vector_func = np.vectorize(
self.lambda_func, otypes=[np.complex])
results = self.vector_func(*args)
results = np.ma.masked_where(
np.abs(results.imag) > 1e-7 * np.abs(results),
results.real, copy=False)
else:
# Complete failure. One last try with no translations, only
# wrapping in complex((...).evalf()) and returning the real
# part.
if self.failure:
raise e
else:
self.failure = True
self.lambda_func = experimental_lambdify(
self.args, self.expr, use_evalf=True,
complex_wrap_evalf=True)
self.vector_func = np.vectorize(
self.lambda_func, otypes=[np.complex])
results = self.vector_func(*args)
results = np.ma.masked_where(
np.abs(results.imag) > 1e-7 * np.abs(results),
results.real, copy=False)
warnings.warn('The evaluation of the expression is'
' problematic. We are trying a failback method'
' that may still work. Please report this as a bug.')
finally:
np.seterr(**np_old_err)
return results
class lambdify(object):
"""Returns the lambdified function.
This function uses experimental_lambdify to create a lambdified
expression. It uses cmath to lambdify the expression. If the function
is not implemented in python cmath, python cmath calls evalf on those
functions.
"""
def __init__(self, args, expr):
self.args = args
self.expr = expr
self.lambda_func = experimental_lambdify(args, expr, use_evalf=True,
use_python_cmath=True)
self.failure = False
def __call__(self, args):
args = complex(args)
try:
#The result can be sympy.Float. Hence wrap it with complex type.
result = complex(self.lambda_func(args))
if abs(result.imag) > 1e-7 * abs(result):
return None
else:
return result.real
except Exception as e:
# The exceptions raised by sympy, cmath are not consistent and
# hence it is not possible to specify all the exceptions that
# are to be caught. Presently there are no cases for which the code
# reaches this block other than ZeroDivisionError and complex
# comparison. Also the exception is caught only once. If the
# exception repeats itself,
# then it is not caught and the corresponding error is raised.
# XXX: Remove catching all exceptions once the plotting module
# is heavily tested.
if isinstance(e, ZeroDivisionError):
return None
elif isinstance(e, TypeError) and ('no ordering relation is'
' defined for complex numbers'
in str(e) or 'unorderable '
'types' in str(e) or "not "
"supported between instances of"
in str(e)):
self.lambda_func = experimental_lambdify(self.args, self.expr,
use_evalf=True,
use_python_math=True)
result = self.lambda_func(args.real)
return result
else:
if self.failure:
raise e
#Failure
#Try wrapping it with complex(..).evalf()
self.failure = True
self.lambda_func = experimental_lambdify(self.args, self.expr,
use_evalf=True,
complex_wrap_evalf=True)
result = self.lambda_func(args)
warnings.warn('The evaluation of the expression is'
' problematic. We are trying a failback method'
' that may still work. Please report this as a bug.')
if abs(result.imag) > 1e-7 * abs(result):
return None
else:
return result.real
def experimental_lambdify(*args, **kwargs):
l = Lambdifier(*args, **kwargs)
return l
class Lambdifier(object):
def __init__(self, args, expr, print_lambda=False, use_evalf=False,
float_wrap_evalf=False, complex_wrap_evalf=False,
use_np=False, use_python_math=False, use_python_cmath=False,
use_interval=False):
self.print_lambda = print_lambda
self.use_evalf = use_evalf
self.float_wrap_evalf = float_wrap_evalf
self.complex_wrap_evalf = complex_wrap_evalf
self.use_np = use_np
self.use_python_math = use_python_math
self.use_python_cmath = use_python_cmath
self.use_interval = use_interval
# Constructing the argument string
# - check
if not all([isinstance(a, Symbol) for a in args]):
raise ValueError('The arguments must be Symbols.')
# - use numbered symbols
syms = numbered_symbols(exclude=expr.free_symbols)
newargs = [next(syms) for i in args]
expr = expr.xreplace(dict(zip(args, newargs)))
argstr = ', '.join([str(a) for a in newargs])
del syms, newargs, args
# Constructing the translation dictionaries and making the translation
self.dict_str = self.get_dict_str()
self.dict_fun = self.get_dict_fun()
exprstr = str(expr)
# the & and | operators don't work on tuples, see discussion #12108
exprstr = exprstr.replace(" & "," and ").replace(" | "," or ")
newexpr = self.tree2str_translate(self.str2tree(exprstr))
# Constructing the namespaces
namespace = {}
namespace.update(self.sympy_atoms_namespace(expr))
namespace.update(self.sympy_expression_namespace(expr))
# XXX Workaround
# Ugly workaround because Pow(a,Half) prints as sqrt(a)
# and sympy_expression_namespace can not catch it.
from sympy import sqrt
namespace.update({'sqrt': sqrt})
namespace.update({'Eq': lambda x, y: x == y})
# End workaround.
if use_python_math:
namespace.update({'math': __import__('math')})
if use_python_cmath:
namespace.update({'cmath': __import__('cmath')})
if use_np:
try:
namespace.update({'np': __import__('numpy')})
except ImportError:
raise ImportError(
'experimental_lambdify failed to import numpy.')
if use_interval:
namespace.update({'imath': __import__(
'sympy.plotting.intervalmath', fromlist=['intervalmath'])})
namespace.update({'math': __import__('math')})
# Construct the lambda
if self.print_lambda:
print(newexpr)
eval_str = 'lambda %s : ( %s )' % (argstr, newexpr)
self.eval_str = eval_str
exec_("from __future__ import division; MYNEWLAMBDA = %s" % eval_str, namespace)
self.lambda_func = namespace['MYNEWLAMBDA']
def __call__(self, *args, **kwargs):
return self.lambda_func(*args, **kwargs)
##############################################################################
# Dicts for translating from sympy to other modules
##############################################################################
###
# builtins
###
# Functions with different names in builtins
builtin_functions_different = {
'Min': 'min',
'Max': 'max',
'Abs': 'abs',
}
# Strings that should be translated
builtin_not_functions = {
'I': '1j',
# 'oo': '1e400',
}
###
# numpy
###
# Functions that are the same in numpy
numpy_functions_same = [
'sin', 'cos', 'tan', 'sinh', 'cosh', 'tanh', 'exp', 'log',
'sqrt', 'floor', 'conjugate',
]
# Functions with different names in numpy
numpy_functions_different = {
"acos": "arccos",
"acosh": "arccosh",
"arg": "angle",
"asin": "arcsin",
"asinh": "arcsinh",
"atan": "arctan",
"atan2": "arctan2",
"atanh": "arctanh",
"ceiling": "ceil",
"im": "imag",
"ln": "log",
"Max": "amax",
"Min": "amin",
"re": "real",
"Abs": "abs",
}
# Strings that should be translated
numpy_not_functions = {
'pi': 'np.pi',
'oo': 'np.inf',
'E': 'np.e',
}
###
# python math
###
# Functions that are the same in math
math_functions_same = [
'sin', 'cos', 'tan', 'asin', 'acos', 'atan', 'atan2',
'sinh', 'cosh', 'tanh', 'asinh', 'acosh', 'atanh',
'exp', 'log', 'erf', 'sqrt', 'floor', 'factorial', 'gamma',
]
# Functions with different names in math
math_functions_different = {
'ceiling': 'ceil',
'ln': 'log',
'loggamma': 'lgamma'
}
# Strings that should be translated
math_not_functions = {
'pi': 'math.pi',
'E': 'math.e',
}
###
# python cmath
###
# Functions that are the same in cmath
cmath_functions_same = [
'sin', 'cos', 'tan', 'asin', 'acos', 'atan',
'sinh', 'cosh', 'tanh', 'asinh', 'acosh', 'atanh',
'exp', 'log', 'sqrt',
]
# Functions with different names in cmath
cmath_functions_different = {
'ln': 'log',
'arg': 'phase',
}
# Strings that should be translated
cmath_not_functions = {
'pi': 'cmath.pi',
'E': 'cmath.e',
}
###
# intervalmath
###
interval_not_functions = {
'pi': 'math.pi',
'E': 'math.e'
}
interval_functions_same = [
'sin', 'cos', 'exp', 'tan', 'atan', 'log',
'sqrt', 'cosh', 'sinh', 'tanh', 'floor',
'acos', 'asin', 'acosh', 'asinh', 'atanh',
'Abs', 'And', 'Or'
]
interval_functions_different = {
'Min': 'imin',
'Max': 'imax',
'ceiling': 'ceil',
}
###
# mpmath, etc
###
#TODO
###
# Create the final ordered tuples of dictionaries
###
# For strings
def get_dict_str(self):
dict_str = dict(self.builtin_not_functions)
if self.use_np:
dict_str.update(self.numpy_not_functions)
if self.use_python_math:
dict_str.update(self.math_not_functions)
if self.use_python_cmath:
dict_str.update(self.cmath_not_functions)
if self.use_interval:
dict_str.update(self.interval_not_functions)
return dict_str
# For functions
def get_dict_fun(self):
dict_fun = dict(self.builtin_functions_different)
if self.use_np:
for s in self.numpy_functions_same:
dict_fun[s] = 'np.' + s
for k, v in self.numpy_functions_different.items():
dict_fun[k] = 'np.' + v
if self.use_python_math:
for s in self.math_functions_same:
dict_fun[s] = 'math.' + s
for k, v in self.math_functions_different.items():
dict_fun[k] = 'math.' + v
if self.use_python_cmath:
for s in self.cmath_functions_same:
dict_fun[s] = 'cmath.' + s
for k, v in self.cmath_functions_different.items():
dict_fun[k] = 'cmath.' + v
if self.use_interval:
for s in self.interval_functions_same:
dict_fun[s] = 'imath.' + s
for k, v in self.interval_functions_different.items():
dict_fun[k] = 'imath.' + v
return dict_fun
##############################################################################
# The translator functions, tree parsers, etc.
##############################################################################
def str2tree(self, exprstr):
"""Converts an expression string to a tree.
Functions are represented by ('func_name(', tree_of_arguments).
Other expressions are (head_string, mid_tree, tail_str).
Expressions that do not contain functions are directly returned.
Examples
========
>>> from sympy.abc import x, y, z
>>> from sympy import Integral, sin
>>> from sympy.plotting.experimental_lambdify import Lambdifier
>>> str2tree = Lambdifier([x], x).str2tree
>>> str2tree(str(Integral(x, (x, 1, y))))
('', ('Integral(', 'x, (x, 1, y)'), ')')
>>> str2tree(str(x+y))
'x + y'
>>> str2tree(str(x+y*sin(z)+1))
('x + y*', ('sin(', 'z'), ') + 1')
>>> str2tree('sin(y*(y + 1.1) + (sin(y)))')
('', ('sin(', ('y*(y + 1.1) + (', ('sin(', 'y'), '))')), ')')
"""
#matches the first 'function_name('
first_par = re.search(r'(\w+\()', exprstr)
if first_par is None:
return exprstr
else:
start = first_par.start()
end = first_par.end()
head = exprstr[:start]
func = exprstr[start:end]
tail = exprstr[end:]
count = 0
for i, c in enumerate(tail):
if c == '(':
count += 1
elif c == ')':
count -= 1
if count == -1:
break
func_tail = self.str2tree(tail[:i])
tail = self.str2tree(tail[i:])
return (head, (func, func_tail), tail)
@classmethod
def tree2str(cls, tree):
"""Converts a tree to string without translations.
Examples
========
>>> from sympy.abc import x, y, z
>>> from sympy import Integral, sin
>>> from sympy.plotting.experimental_lambdify import Lambdifier
>>> str2tree = Lambdifier([x], x).str2tree
>>> tree2str = Lambdifier([x], x).tree2str
>>> tree2str(str2tree(str(x+y*sin(z)+1)))
'x + y*sin(z) + 1'
"""
if isinstance(tree, str):
return tree
else:
return ''.join(map(cls.tree2str, tree))
def tree2str_translate(self, tree):
"""Converts a tree to string with translations.
Function names are translated by translate_func.
Other strings are translated by translate_str.
"""
if isinstance(tree, str):
return self.translate_str(tree)
elif isinstance(tree, tuple) and len(tree) == 2:
return self.translate_func(tree[0][:-1], tree[1])
else:
return ''.join([self.tree2str_translate(t) for t in tree])
def translate_str(self, estr):
"""Translate substrings of estr using in order the dictionaries in
dict_tuple_str."""
for pattern, repl in self.dict_str.items():
estr = re.sub(pattern, repl, estr)
return estr
def translate_func(self, func_name, argtree):
"""Translate function names and the tree of arguments.
If the function name is not in the dictionaries of dict_tuple_fun then the
function is surrounded by a float((...).evalf()).
The use of float is necessary as np.<function>(sympy.Float(..)) raises an
error."""
if func_name in self.dict_fun:
new_name = self.dict_fun[func_name]
argstr = self.tree2str_translate(argtree)
return new_name + '(' + argstr
else:
template = '(%s(%s)).evalf(' if self.use_evalf else '%s(%s'
if self.float_wrap_evalf:
template = 'float(%s)' % template
elif self.complex_wrap_evalf:
template = 'complex(%s)' % template
# Wrapping should only happen on the outermost expression, which
# is the only thing we know will be a number.
float_wrap_evalf = self.float_wrap_evalf
complex_wrap_evalf = self.complex_wrap_evalf
self.float_wrap_evalf = False
self.complex_wrap_evalf = False
ret = template % (func_name, self.tree2str_translate(argtree))
self.float_wrap_evalf = float_wrap_evalf
self.complex_wrap_evalf = complex_wrap_evalf
return ret
##############################################################################
# The namespace constructors
##############################################################################
@classmethod
def sympy_expression_namespace(cls, expr):
"""Traverses the (func, args) tree of an expression and creates a sympy
namespace. All other modules are imported only as a module name. That way
the namespace is not polluted and rests quite small. It probably causes much
more variable lookups and so it takes more time, but there are no tests on
that for the moment."""
if expr is None:
return {}
else:
funcname = str(expr.func)
# XXX Workaround
# Here we add an ugly workaround because str(func(x))
# is not always the same as str(func). Eg
# >>> str(Integral(x))
# "Integral(x)"
# >>> str(Integral)
# "<class 'sympy.integrals.integrals.Integral'>"
# >>> str(sqrt(x))
# "sqrt(x)"
# >>> str(sqrt)
# "<function sqrt at 0x3d92de8>"
# >>> str(sin(x))
# "sin(x)"
# >>> str(sin)
# "sin"
# Either one of those can be used but not all at the same time.
# The code considers the sin example as the right one.
regexlist = [
r'<class \'sympy[\w.]*?.([\w]*)\'>$',
# the example Integral
r'<function ([\w]*) at 0x[\w]*>$', # the example sqrt
]
for r in regexlist:
m = re.match(r, funcname)
if m is not None:
funcname = m.groups()[0]
# End of the workaround
# XXX debug: print funcname
args_dict = {}
for a in expr.args:
if (isinstance(a, Symbol) or
isinstance(a, NumberSymbol) or
a in [I, zoo, oo]):
continue
else:
args_dict.update(cls.sympy_expression_namespace(a))
args_dict.update({funcname: expr.func})
return args_dict
@staticmethod
def sympy_atoms_namespace(expr):
"""For no real reason this function is separated from
sympy_expression_namespace. It can be moved to it."""
atoms = expr.atoms(Symbol, NumberSymbol, I, zoo, oo)
d = {}
for a in atoms:
# XXX debug: print 'atom:' + str(a)
d[str(a)] = a
return d
| wxgeo/geophar | wxgeometrie/sympy/plotting/experimental_lambdify.py | Python | gpl-2.0 | 26,133 | 0.001378 |
#-*- coding: utf- -*-
import os
import sys
import random
import time
import json
import wikiquote
import tuitear
from threading import Thread
CONGIG_JSON = 'bots.json'
# Variable local, para modificar el intervalo real cambiar la configuración
INTERVALO = 1
stop = False
def start_bot(bot):
""" Hilo que inicia el bot pasado como argumento (diccionario) """
citas = []
for pagina in bot['paginas']:
print 'Cargando', pagina
quotes = wikiquote.get_quotes(pagina.encode('utf8'))
quotes = [(q, pagina) for q in quotes]
citas += quotes
tiempo = 0
while not stop:
if tiempo >= bot['intervalo']:
quote, pagina = random.choice(citas)
tweet = bot['format'].encode('utf8') % dict(pagina = \
pagina.encode('utf8'), frase = quote.encode('utf8'))
if len(tweet) > 138:
#print 'tweet largo'
continue
print "%s: %s" % (bot['name'], tweet.decode('utf8'))
tuitear.tuitear(tweet, bot['consumer_key'], bot['consumer_secret'],
bot['access_token'], bot['access_token_secret'])
tiempo = 0
tiempo += INTERVALO
time.sleep(INTERVALO)
print 'Thread para', bot['name'], 'detenido'
def main():
path = os.path.dirname(__file__)
if len(sys.argv) == 2:
filename = sys.argv[1]
else:
filename = os.path.join(path, CONGIG_JSON)
print 'Cargando bots en', filename
j = json.load(file(filename))
for bot in j['bots']:
if bot.get('disabled'):
continue
thread = Thread(target = start_bot, args=[bot])
thread.daemon = True
thread.start()
print 'Thread para', bot['name'], 'iniciado'
while True:
# Para que no terminen los hilos
pass
if __name__ == '__main__':
main()
| sh4r3m4n/twitter-wikiquote-bot | bot.py | Python | gpl-3.0 | 1,879 | 0.009052 |
#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Hans-Peter Jansen <hpj@urpla.net>.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
# This is only needed for Python v2 but is harmless for Python v3.
import sip
sip.setapi('QString', 2)
from PyQt4 import QtCore, QtGui, QtSvg
import delayedencoding_rc
class MimeData(QtCore.QMimeData):
dataRequested = QtCore.pyqtSignal(str)
def formats(self):
formats = QtCore.QMimeData.formats(self)
formats.append('image/png')
return formats
def retrieveData(self, mimeType, qvtype):
self.dataRequested.emit(mimeType)
return QtCore.QMimeData.retrieveData(self, mimeType, qvtype)
class SourceWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(SourceWidget, self).__init__(parent)
self.mimeData = None
imageFile = QtCore.QFile(':/images/example.svg')
imageFile.open(QtCore.QIODevice.ReadOnly)
self.imageData = imageFile.readAll()
imageFile.close()
imageArea = QtGui.QScrollArea()
self.imageLabel = QtSvg.QSvgWidget()
self.imageLabel.renderer().load(self.imageData)
imageArea.setWidget(self.imageLabel)
instructTopLabel = QtGui.QLabel("This is an SVG drawing:")
instructBottomLabel = QtGui.QLabel("Drag the icon to copy the drawing as a PNG file:")
dragIcon = QtGui.QPushButton("Export")
dragIcon.setIcon(QtGui.QIcon(':/images/drag.png'))
dragIcon.pressed.connect(self.startDrag)
layout = QtGui.QGridLayout()
layout.addWidget(instructTopLabel, 0, 0, 1, 2)
layout.addWidget(imageArea, 1, 0, 2, 2)
layout.addWidget(instructBottomLabel, 3, 0)
layout.addWidget(dragIcon, 3, 1)
self.setLayout(layout)
self.setWindowTitle("Delayed Encoding")
def createData(self, mimeType):
if mimeType != 'image/png':
return
image = QtGui.QImage(self.imageLabel.size(), QtGui.QImage.Format_RGB32)
painter = QtGui.QPainter()
painter.begin(image)
self.imageLabel.renderer().render(painter)
painter.end()
data = QtCore.QByteArray()
buffer = QtCore.QBuffer(data)
buffer.open(QtCore.QIODevice.WriteOnly)
image.save(buffer, 'PNG')
buffer.close()
self.mimeData.setData('image/png', data)
def startDrag(self):
self.mimeData = MimeData()
self.mimeData.dataRequested.connect(self.createData, QtCore.Qt.DirectConnection)
drag = QtGui.QDrag(self)
drag.setMimeData(self.mimeData)
drag.setPixmap(QtGui.QPixmap(':/images/drag.png'))
drag.exec_(QtCore.Qt.CopyAction)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = SourceWidget()
window.show()
sys.exit(app.exec_())
| Khilo84/PyQt4 | examples/draganddrop/delayedencoding/delayedencoding.py | Python | gpl-2.0 | 4,777 | 0.007327 |
from amqpstorm.management import ManagementApi
from amqpstorm.message import Message
from amqpstorm.tests import HTTP_URL
from amqpstorm.tests import PASSWORD
from amqpstorm.tests import USERNAME
from amqpstorm.tests.utility import TestFunctionalFramework
from amqpstorm.tests.utility import setup
class ApiBasicFunctionalTests(TestFunctionalFramework):
@setup(queue=True)
def test_api_basic_publish(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
try:
self.assertEqual(api.basic.publish(self.message, self.queue_name),
{'routed': True})
finally:
api.queue.delete(self.queue_name)
@setup(queue=True)
def test_api_basic_get_message(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
self.assertEqual(api.basic.publish(self.message, self.queue_name),
{'routed': True})
result = api.basic.get(self.queue_name, requeue=False)
self.assertIsInstance(result, list)
self.assertIsInstance(result[0], Message)
self.assertEqual(result[0].body, self.message)
# Make sure the message wasn't re-queued.
self.assertFalse(api.basic.get(self.queue_name, requeue=False))
@setup(queue=True)
def test_api_basic_get_message_requeue(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
self.assertEqual(api.basic.publish(self.message, self.queue_name),
{'routed': True})
result = api.basic.get(self.queue_name, requeue=True)
self.assertIsInstance(result, list)
self.assertIsInstance(result[0], Message)
self.assertEqual(result[0].body, self.message)
# Make sure the message was re-queued.
self.assertTrue(api.basic.get(self.queue_name, requeue=False))
@setup(queue=True)
def test_api_basic_get_message_to_dict(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
self.assertEqual(api.basic.publish(self.message, self.queue_name),
{'routed': True})
result = api.basic.get(self.queue_name, requeue=False, to_dict=True)
self.assertIsInstance(result, list)
self.assertIsInstance(result[0], dict)
self.assertEqual(result[0]['payload'], self.message)
| eandersson/amqp-storm | amqpstorm/tests/functional/management/basic_tests.py | Python | mit | 2,492 | 0 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the `ShuffleAndRepeatFusion` optimization."""
from absl.testing import parameterized
from tensorflow.python.data.experimental.ops import testing
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import options as options_lib
from tensorflow.python.framework import combinations
from tensorflow.python.framework import errors
from tensorflow.python.platform import test
class ShuffleAndRepeatFusionTest(test_base.DatasetTestBase,
parameterized.TestCase):
@combinations.generate(test_base.default_test_combinations())
def testShuffleAndRepeatFusion(self):
expected = "ShuffleAndRepeat"
dataset = dataset_ops.Dataset.range(10).apply(
testing.assert_next([expected])).shuffle(10).repeat(2)
options = options_lib.Options()
options.experimental_optimization.apply_default_optimizations = False
options.experimental_optimization.shuffle_and_repeat_fusion = True
dataset = dataset.with_options(options)
get_next = self.getNext(dataset)
for _ in range(2):
results = []
for _ in range(10):
results.append(self.evaluate(get_next()))
self.assertAllEqual([x for x in range(10)], sorted(results))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
if __name__ == "__main__":
test.main()
| tensorflow/tensorflow | tensorflow/python/data/experimental/kernel_tests/optimization/shuffle_and_repeat_fusion_test.py | Python | apache-2.0 | 2,195 | 0.003645 |
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Gigaword summarization dataset."""
import os
import tensorflow as tf
import tensorflow_datasets.public_api as tfds
_CITATION = """
@article{graff2003english,
title={English gigaword},
author={Graff, David and Kong, Junbo and Chen, Ke and Maeda, Kazuaki},
journal={Linguistic Data Consortium, Philadelphia},
volume={4},
number={1},
pages={34},
year={2003}
}
@article{Rush_2015,
title={A Neural Attention Model for Abstractive Sentence Summarization},
url={http://dx.doi.org/10.18653/v1/D15-1044},
DOI={10.18653/v1/d15-1044},
journal={Proceedings of the 2015 Conference on Empirical Methods in Natural Language Processing},
publisher={Association for Computational Linguistics},
author={Rush, Alexander M. and Chopra, Sumit and Weston, Jason},
year={2015}
}
"""
_DESCRIPTION = """
Headline-generation on a corpus of article pairs from Gigaword consisting of
around 4 million articles. Use the 'org_data' provided by
https://github.com/microsoft/unilm/ which is identical to
https://github.com/harvardnlp/sent-summary but with better format.
There are two features:
- document: article.
- summary: headline.
"""
_URL = "https://drive.google.com/uc?export=download&id=1USoQ8lJgN8kAWnUnRrupMGrPMLlDVqlV"
_DOCUMENT = "document"
_SUMMARY = "summary"
class Gigaword(tfds.core.GeneratorBasedBuilder):
"""Gigaword summarization dataset."""
# 1.0.0 contains a bug that uses validation data as training data.
# 1.1.0 Update to the correct train, validation and test data.
# 1.2.0 Replace <unk> with <UNK> in train/val to be consistent with test.
VERSION = tfds.core.Version("1.2.0")
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict({
_DOCUMENT: tfds.features.Text(),
_SUMMARY: tfds.features.Text()
}),
supervised_keys=(_DOCUMENT, _SUMMARY),
homepage="https://github.com/harvardnlp/sent-summary",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
dl_path = dl_manager.download_and_extract(_URL)
pattern = os.path.join(dl_path, "org_data", "%s.%s.txt")
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
gen_kwargs={
"src_path": pattern % ("train", "src"),
"tgt_path": pattern % ("train", "tgt"),
"replace_unk": True,
},
),
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
gen_kwargs={
"src_path": pattern % ("dev", "src"),
"tgt_path": pattern % ("dev", "tgt"),
"replace_unk": True,
},
),
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
gen_kwargs={
"src_path": pattern % ("test", "src"),
"tgt_path": pattern % ("test", "tgt"),
"replace_unk": False,
},
),
]
def _generate_examples(self, src_path=None, tgt_path=None, replace_unk=None):
"""Yields examples."""
with tf.io.gfile.GFile(src_path) as f_d, tf.io.gfile.GFile(tgt_path) as f_s:
for i, (doc_text, sum_text) in enumerate(zip(f_d, f_s)):
if replace_unk:
yield i, {
_DOCUMENT: doc_text.strip().replace("<unk>", "UNK"),
_SUMMARY: sum_text.strip().replace("<unk>", "UNK")
}
else:
yield i, {_DOCUMENT: doc_text.strip(), _SUMMARY: sum_text.strip()}
| tensorflow/datasets | tensorflow_datasets/summarization/gigaword.py | Python | apache-2.0 | 4,196 | 0.003337 |
#!/usr/bin/env python
from ciscoconfparse import CiscoConfParse
print "We will use this program to parse a cisco config file"
filename = raw_input("Please enter the name of the file that needs to be parsed: ")
#print filename
input_file = CiscoConfParse(filename)
crypto_find = input_file.find_objects_w_child(parentspec=r"^crypto map CRYPTO", childspec=r"pfs group2")
#print crypto_find
for item in crypto_find:
print item.text
| networkpadwan/appliedpython | week1/parse2.py | Python | apache-2.0 | 435 | 0.009195 |
# Copyright 2017 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Cyborg base exception handling.
SHOULD include dedicated exception logging.
"""
from oslo_log import log
import six
from six.moves import http_client
from cyborg.common.i18n import _
from cyborg.conf import CONF
LOG = log.getLogger(__name__)
class CyborgException(Exception):
"""Base Cyborg Exception
To correctly use this class, inherit from it and define
a '_msg_fmt' property. That message will get printf'd
with the keyword arguments provided to the constructor.
If you need to access the message from an exception you should use
six.text_type(exc)
"""
_msg_fmt = _("An unknown exception occurred.")
code = http_client.INTERNAL_SERVER_ERROR
headers = {}
safe = False
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
try:
message = self._msg_fmt % kwargs
except Exception:
# kwargs doesn't match a variable in self._msg_fmt
# log the issue and the kwargs
LOG.exception('Exception in string format operation')
for name, value in kwargs.items():
LOG.error("%s: %s" % (name, value))
if CONF.fatal_exception_format_errors:
raise
else:
# at least get the core self._msg_fmt out if something
# happened
message = self._msg_fmt
super(CyborgException, self).__init__(message)
def __str__(self):
"""Encode to utf-8 then wsme api can consume it as well."""
if not six.PY3:
return unicode(self.args[0]).encode('utf-8')
return self.args[0]
def __unicode__(self):
"""Return a unicode representation of the exception message."""
return unicode(self.args[0])
class AttachHandleAlreadyExists(CyborgException):
_msg_fmt = _("AttachHandle with uuid %(uuid)s already exists.")
class ControlpathIDAlreadyExists(CyborgException):
_msg_fmt = _("ControlpathID with uuid %(uuid)s already exists.")
class ConfigInvalid(CyborgException):
_msg_fmt = _("Invalid configuration file. %(error_msg)s")
class DeviceAlreadyExists(CyborgException):
_msg_fmt = _("Device with uuid %(uuid)s already exists.")
class DeviceProfileAlreadyExists(CyborgException):
_msg_fmt = _("DeviceProfile with uuid %(uuid)s already exists.")
class DeployableAlreadyExists(CyborgException):
_msg_fmt = _("Deployable with uuid %(uuid)s already exists.")
class ExtArqAlreadyExists(CyborgException):
_msg_fmt = _("ExtArq with uuid %(uuid)s already exists.")
class Invalid(CyborgException):
_msg_fmt = _("Invalid parameters.")
code = http_client.BAD_REQUEST
class InvalidIdentity(Invalid):
_msg_fmt = _("Expected a uuid/id but received %(identity)s.")
class InvalidUUID(Invalid):
_msg_fmt = _("Expected a uuid but received %(uuid)s.")
class InvalidJsonType(Invalid):
_msg_fmt = _("%(value)s is not JSON serializable.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
_msg_fmt = _("%(err)s")
class PatchError(Invalid):
_msg_fmt = _("Couldn't apply patch '%(patch)s'. Reason: %(reason)s")
class NotAuthorized(CyborgException):
_msg_fmt = _("Not authorized.")
code = http_client.FORBIDDEN
class HTTPForbidden(NotAuthorized):
_msg_fmt = _("Access was denied to the following resource: %(resource)s")
class NotFound(CyborgException):
_msg_fmt = _("Resource could not be found.")
code = http_client.NOT_FOUND
class ServiceNotFound(NotFound):
msg_fmt = _("Service %(service_id)s could not be found.")
class AttachHandleNotFound(NotFound):
_msg_fmt = _("AttachHandle %(uuid)s could not be found.")
class ControlpathIDNotFound(NotFound):
_msg_fmt = _("ControlpathID %(uuid)s could not be found.")
class ConfGroupForServiceTypeNotFound(ServiceNotFound):
msg_fmt = _("No conf group name could be found for service type "
"%(stype)s.")
class DeviceNotFound(NotFound):
_msg_fmt = _("Device %(uuid)s could not be found.")
class DeviceProfileNotFound(NotFound):
_msg_fmt = _("DeviceProfile %(uuid)s could not be found.")
class DeployableNotFound(NotFound):
_msg_fmt = _("Deployable %(uuid)s could not be found.")
class ExtArqNotFound(NotFound):
_msg_fmt = _("ExtArq %(uuid)s could not be found.")
class InvalidDeployType(CyborgException):
_msg_fmt = _("Deployable have an invalid type")
class Conflict(CyborgException):
_msg_fmt = _('Conflict.')
code = http_client.CONFLICT
class DuplicateDeviceName(Conflict):
_msg_fmt = _("A device with name %(name)s already exists.")
class DuplicateDeviceProfileName(Conflict):
_msg_fmt = _("A device_profile with name %(name)s already exists.")
class DuplicateDeployableName(Conflict):
_msg_fmt = _("A deployable with name %(name)s already exists.")
class PlacementEndpointNotFound(NotFound):
message = _("Placement API endpoint not found")
class PlacementResourceProviderNotFound(NotFound):
message = _("Placement resource provider not found %(resource_provider)s.")
class PlacementInventoryNotFound(NotFound):
message = _("Placement inventory not found for resource provider "
"%(resource_provider)s, resource class %(resource_class)s.")
class PlacementInventoryUpdateConflict(Conflict):
message = _("Placement inventory update conflict for resource provider "
"%(resource_provider)s, resource class %(resource_class)s.")
class ObjectActionError(CyborgException):
_msg_fmt = _('Object action %(action)s failed because: %(reason)s')
class AttributeNotFound(NotFound):
_msg_fmt = _("Attribute %(uuid)s could not be found.")
class AttributeInvalid(CyborgException):
_msg_fmt = _("Attribute is invalid")
class AttributeAlreadyExists(CyborgException):
_msg_fmt = _("Attribute with uuid %(uuid)s already exists.")
# An exception with this name is used on both sides of the placement/
# cyborg interaction.
class ResourceProviderInUse(CyborgException):
msg_fmt = _("Resource provider has allocations.")
class ResourceProviderRetrievalFailed(CyborgException):
msg_fmt = _("Failed to get resource provider with UUID %(uuid)s")
class ResourceProviderAggregateRetrievalFailed(CyborgException):
msg_fmt = _("Failed to get aggregates for resource provider with UUID"
" %(uuid)s")
class ResourceProviderTraitRetrievalFailed(CyborgException):
msg_fmt = _("Failed to get traits for resource provider with UUID"
" %(uuid)s")
class ResourceProviderCreationFailed(CyborgException):
msg_fmt = _("Failed to create resource provider %(name)s")
class ResourceProviderDeletionFailed(CyborgException):
msg_fmt = _("Failed to delete resource provider %(uuid)s")
class ResourceProviderUpdateFailed(CyborgException):
msg_fmt = _("Failed to update resource provider via URL %(url)s: "
"%(error)s")
class ResourceProviderNotFound(NotFound):
msg_fmt = _("No such resource provider %(name_or_uuid)s.")
class ResourceProviderSyncFailed(CyborgException):
msg_fmt = _("Failed to synchronize the placement service with resource "
"provider information supplied by the compute host.")
class PlacementAPIConnectFailure(CyborgException):
msg_fmt = _("Unable to communicate with the Placement API.")
class PlacementAPIConflict(CyborgException):
"""Any 409 error from placement APIs should use (a subclass of) this
exception.
"""
msg_fmt = _("A conflict was encountered attempting to invoke the "
"placement API at URL %(url)s: %(error)s")
class ResourceProviderUpdateConflict(PlacementAPIConflict):
"""A 409 caused by generation mismatch from attempting to update an
existing provider record or its associated data (aggregates, traits, etc.).
"""
msg_fmt = _("A conflict was encountered attempting to update resource "
"provider %(uuid)s (generation %(generation)d): %(error)s")
class InvalidResourceClass(Invalid):
msg_fmt = _("Resource class '%(resource_class)s' invalid.")
class InvalidResourceAmount(Invalid):
msg_fmt = _("Resource amounts must be integers. Received '%(amount)s'.")
class InvalidInventory(Invalid):
msg_fmt = _("Inventory for '%(resource_class)s' on "
"resource provider '%(resource_provider)s' invalid.")
# An exception with this name is used on both sides of the placement/
# cyborg interaction.
class InventoryInUse(InvalidInventory):
# NOTE(mriedem): This message cannot change without impacting the
# cyborg.services.client.report._RE_INV_IN_USE regex.
msg_fmt = _("Inventory for '%(resource_classes)s' on "
"resource provider '%(resource_provider)s' in use.")
class QuotaNotFound(NotFound):
message = _("Quota could not be found")
class QuotaUsageNotFound(QuotaNotFound):
message = _("Quota usage for project %(project_id)s could not be found.")
class QuotaResourceUnknown(QuotaNotFound):
message = _("Unknown quota resources %(unknown)s.")
class InvalidReservationExpiration(Invalid):
message = _("Invalid reservation expiration %(expire)s.")
class GlanceConnectionFailed(CyborgException):
msg_fmt = _("Connection to glance host %(server)s failed: "
"%(reason)s")
class ImageUnacceptable(Invalid):
msg_fmt = _("Image %(image_id)s is unacceptable: %(reason)s")
class ImageNotAuthorized(CyborgException):
msg_fmt = _("Not authorized for image %(image_id)s.")
class ImageNotFound(NotFound):
msg_fmt = _("Image %(image_id)s could not be found.")
class ImageBadRequest(Invalid):
msg_fmt = _("Request of image %(image_id)s got BadRequest response: "
"%(response)s")
class InvalidDriver(Invalid):
_msg_fmt = _("Found an invalid driver: %(name)s")
| openstack/nomad | cyborg/common/exception.py | Python | apache-2.0 | 10,839 | 0 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Resource management library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os as _os
import sys as _sys
from tensorflow.python.util import tf_inspect as _inspect
from tensorflow.python.util.tf_export import tf_export
# pylint: disable=g-import-not-at-top
try:
from rules_python.python.runfiles import runfiles
except ImportError:
runfiles = None
# pylint: enable=g-import-not-at-top
@tf_export(v1=['resource_loader.load_resource'])
def load_resource(path):
"""Load the resource at given path, where path is relative to tensorflow/.
Args:
path: a string resource path relative to tensorflow/.
Returns:
The contents of that resource.
Raises:
IOError: If the path is not found, or the resource can't be opened.
"""
with open(get_path_to_datafile(path), 'rb') as f:
return f.read()
# pylint: disable=protected-access
@tf_export(v1=['resource_loader.get_data_files_path'])
def get_data_files_path():
"""Get a direct path to the data files colocated with the script.
Returns:
The directory where files specified in data attribute of py_test
and py_binary are stored.
"""
return _os.path.dirname(_inspect.getfile(_sys._getframe(1)))
@tf_export(v1=['resource_loader.get_root_dir_with_all_resources'])
def get_root_dir_with_all_resources():
"""Get a root directory containing all the data attributes in the build rule.
Returns:
The path to the specified file present in the data attribute of py_test
or py_binary. Falls back to returning the same as get_data_files_path if it
fails to detect a bazel runfiles directory.
"""
script_dir = get_data_files_path()
# Create a history of the paths, because the data files are located relative
# to the repository root directory, which is directly under runfiles
# directory.
directories = [script_dir]
data_files_dir = ''
while True:
candidate_dir = directories[-1]
current_directory = _os.path.basename(candidate_dir)
if '.runfiles' in current_directory:
# Our file should never be directly under runfiles.
# If the history has only one item, it means we are directly inside the
# runfiles directory, something is wrong, fall back to the default return
# value, script directory.
if len(directories) > 1:
data_files_dir = directories[-2]
break
else:
new_candidate_dir = _os.path.dirname(candidate_dir)
# If we are at the root directory these two will be the same.
if new_candidate_dir == candidate_dir:
break
else:
directories.append(new_candidate_dir)
return data_files_dir or script_dir
@tf_export(v1=['resource_loader.get_path_to_datafile'])
def get_path_to_datafile(path):
"""Get the path to the specified file in the data dependencies.
The path is relative to tensorflow/
Args:
path: a string resource path relative to tensorflow/
Returns:
The path to the specified file present in the data attribute of py_test
or py_binary.
Raises:
IOError: If the path is not found, or the resource can't be opened.
"""
# First, try finding in the new path.
if runfiles:
r = runfiles.Create()
new_fpath = r.Rlocation(
_os.path.abspath(_os.path.join('tensorflow', path)))
if new_fpath is not None and _os.path.exists(new_fpath):
return new_fpath
# Then, the old style path, as people became dependent on this buggy call.
old_filepath = _os.path.join(
_os.path.dirname(_inspect.getfile(_sys._getframe(1))), path)
return old_filepath
@tf_export(v1=['resource_loader.readahead_file_path'])
def readahead_file_path(path, readahead='128M'): # pylint: disable=unused-argument
"""Readahead files not implemented; simply returns given path."""
return path
| karllessard/tensorflow | tensorflow/python/platform/resource_loader.py | Python | apache-2.0 | 4,522 | 0.00774 |
from mock import Mock, patch, MagicMock
from lxc4u.meta import *
def test_initialize_lxc_meta():
meta1 = LXCMeta()
meta1['hello'] = 'hello'
meta2 = LXCMeta(initial=dict(hello=123))
assert meta1['hello'] == 'hello'
assert meta2['hello'] == 123
@patch('__builtin__.open')
@patch('json.loads')
@patch('os.path.exists')
def test_meta_load_from_file(mock_exists, mock_loads, mock_open):
# Setup Mocks
mock_exists.return_value = True
mock_loads.return_value = {}
fake_path = 'path'
# Run Test
meta = LXCMeta.load_from_file(fake_path)
# Assertions
assert isinstance(meta, LXCMeta) == True
@patch('__builtin__.open')
@patch('json.loads')
@patch('os.path.exists')
def test_meta_load_from_file_with_no_file(mock_exists, mock_loads, mock_open):
mock_exists.return_value = False
fake_path = 'path'
# Run Test
meta = LXCMeta.load_from_file(fake_path)
assert mock_loads.called == False, "Mock json was called for some reason"
class TestLXCMeta(object):
def setup(self):
metadata = dict(a=1, b=2, c=3, d='delta')
self.metadata = metadata
self.meta = LXCMeta(initial=metadata)
def test_as_dict(self):
assert self.meta.as_dict() == self.metadata
@patch('lxc4u.meta.BoundLXCMeta')
def test_bind(self, mock_bound_meta_cls):
mock_lxc = Mock()
self.meta.bind(mock_lxc)
mock_bound_meta_cls.bind_to_lxc.assert_called_with(mock_lxc, self.meta)
@patch('lxc4u.meta.BoundLXCMeta')
def test_bind_and_save(self, mock_bound_meta_cls):
self.meta.bind_and_save(None)
mock_bound_meta_cls.bind_to_lxc.return_value.save.assert_called_with()
def test_initialize_bound_lxc_meta():
fake_meta = dict(a=1, b=2, c=3)
mock_lxc = Mock()
bound_meta = BoundLXCMeta.bind_to_lxc(mock_lxc, fake_meta)
bound_meta['hello'] = 'world'
assert bound_meta['a'] == 1
assert bound_meta['hello'] == 'world'
class TestBoundLXCMeta(object):
def setup(self):
mock_meta = MagicMock()
mock_lxc = Mock()
self.bound_meta = BoundLXCMeta.bind_to_lxc(mock_lxc, mock_meta)
self.mock_meta = mock_meta
self.mock_lxc = mock_lxc
@patch('json.dumps', autospec=True)
@patch('__builtin__.open', autospec=True)
def test_save(self, mock_open, mock_dumps):
mock_file = mock_open.return_value
self.bound_meta.save()
mock_open.assert_called_with(self.mock_lxc.path.return_value, 'w')
mock_dumps.assert_called_with(self.mock_meta.as_dict.return_value)
mock_file.write.assert_called_with(mock_dumps.return_value)
mock_file.close.assert_called_with()
def test_as_dict(self):
self.bound_meta.as_dict()
self.mock_meta.as_dict.assert_called_with()
| ravenac95/lxc4u | tests/test_meta.py | Python | mit | 2,809 | 0.001068 |
import xbmc, xbmcaddon, xbmcgui, xbmcplugin,os,sys
import shutil
import urllib2,urllib
import re
import extract
import time
import downloader
import plugintools
import weblogin
import zipfile
import ntpath
ARTPATH = 'http://totalxbmc.tv/totalrevolution/art/' + os.sep
FANART = 'http://totalxbmc.tv/totalrevolution/art/fanart.jpg'
ADDON = xbmcaddon.Addon(id='plugin.program.community.builds')
AddonID = 'plugin.program.community.builds'
AddonTitle = "[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]"
zip = ADDON.getSetting('zip')
localcopy = ADDON.getSetting('localcopy')
dialog = xbmcgui.Dialog()
dp = xbmcgui.DialogProgress()
HOME = xbmc.translatePath('special://home/')
USERDATA = xbmc.translatePath(os.path.join('special://home/userdata',''))
MEDIA = xbmc.translatePath(os.path.join('special://home/media',''))
AUTOEXEC = xbmc.translatePath(os.path.join(USERDATA,'autoexec.py'))
AUTOEXECBAK = xbmc.translatePath(os.path.join(USERDATA,'autoexec_bak.py'))
ADDON_DATA = xbmc.translatePath(os.path.join(USERDATA,'addon_data'))
PLAYLISTS = xbmc.translatePath(os.path.join(USERDATA,'playlists'))
PROFILES = xbmc.translatePath(os.path.join(USERDATA,'profiles'))
DATABASE = xbmc.translatePath(os.path.join(USERDATA,'Database'))
ADDONS = xbmc.translatePath(os.path.join('special://home','addons',''))
CBADDONPATH = xbmc.translatePath(os.path.join(ADDONS,AddonID,'default.py'))
GUISETTINGS = os.path.join(USERDATA,'guisettings.xml')
GUI = xbmc.translatePath(os.path.join(USERDATA,'guisettings.xml'))
GUIFIX = xbmc.translatePath(os.path.join(USERDATA,'guifix.xml'))
INSTALL = xbmc.translatePath(os.path.join(USERDATA,'install.xml'))
FAVS = xbmc.translatePath(os.path.join(USERDATA,'favourites.xml'))
SOURCE = xbmc.translatePath(os.path.join(USERDATA,'sources.xml'))
ADVANCED = xbmc.translatePath(os.path.join(USERDATA,'advancedsettings.xml'))
RSS = xbmc.translatePath(os.path.join(USERDATA,'RssFeeds.xml'))
KEYMAPS = xbmc.translatePath(os.path.join(USERDATA,'keymaps','keyboard.xml'))
USB = xbmc.translatePath(os.path.join(zip))
CBPATH = xbmc.translatePath(os.path.join(USB,'Community Builds',''))
cookiepath = xbmc.translatePath(os.path.join(ADDON_DATA,AddonID,'cookiejar'))
startuppath = xbmc.translatePath(os.path.join(ADDON_DATA,AddonID,'startup.xml'))
tempfile = xbmc.translatePath(os.path.join(ADDON_DATA,AddonID,'temp.xml'))
idfile = xbmc.translatePath(os.path.join(ADDON_DATA,AddonID,'id.xml'))
idfiletemp = xbmc.translatePath(os.path.join(ADDON_DATA,AddonID,'idtemp.xml'))
notifyart = xbmc.translatePath(os.path.join(ADDONS,AddonID,'resources/'))
skin = xbmc.getSkinDir()
EXCLUDES = ['plugin.program.community.builds']
username = ADDON.getSetting('username')
password = ADDON.getSetting('password')
login = ADDON.getSetting('login')
userdatafolder = xbmc.translatePath(os.path.join(ADDON_DATA,AddonID))
GUINEW = xbmc.translatePath(os.path.join(userdatafolder,'guinew.xml'))
guitemp = xbmc.translatePath(os.path.join(userdatafolder,'guitemp',''))
factory = xbmc.translatePath(os.path.join(HOME,'..','factory','_DO_NOT_DELETE.txt'))
#-----------------------------------------------------------------------------------------------------------------
#Simple shortcut to create a notification
def Notify(title,message,times,icon):
icon = notifyart+icon
print "icon: "+str(icon)
xbmc.executebuiltin("XBMC.Notification("+title+","+message+","+times+","+icon+")")
#-----------------------------------------------------------------------------------------------------------------
#Popup class - thanks to whoever codes the help popup in TVAddons Maintenance for this section. Unfortunately there doesn't appear to be any author details in that code so unable to credit by name.
class SPLASH(xbmcgui.WindowXMLDialog):
def __init__(self,*args,**kwargs): self.shut=kwargs['close_time']; xbmc.executebuiltin("Skin.Reset(AnimeWindowXMLDialogClose)"); xbmc.executebuiltin("Skin.SetBool(AnimeWindowXMLDialogClose)")
def onFocus(self,controlID): pass
def onClick(self,controlID):
if controlID==12: xbmc.Player().stop(); self._close_dialog()
def onAction(self,action):
if action in [5,6,7,9,10,92,117] or action.getButtonCode() in [275,257,261]: xbmc.Player().stop(); self._close_dialog()
def _close_dialog(self):
xbmc.executebuiltin("Skin.Reset(AnimeWindowXMLDialogClose)"); time.sleep( .4 ); self.close()
#-----------------------------------------------------------------------------------------------------------------
#Set popup xml based on platform
def pop():
popup=SPLASH('totalxbmc.xml',ADDON.getAddonInfo('path'),'DefaultSkin',close_time=34)
popup.doModal()
del popup
#-----------------------------------------------------------------------------------------------------------------
#Initial online check for new video
def VideoCheck():
print skin
import yt
unlocked = 'no'
if not os.path.exists(userdatafolder):
os.makedirs(userdatafolder)
if not os.path.exists(startuppath):
localfile = open(startuppath, mode='w+')
localfile.write('date="01011001"\nversion="0.0"')
localfile.close()
if not os.path.exists(idfile):
localfile = open(idfile, mode='w+')
localfile.write('id="None"\nname="None"')
localfile.close()
BaseURL='http://totalxbmc.tv/totalrevolution/Community_Builds/update.txt'
link = OPEN_URL(BaseURL).replace('\n','').replace('\r','')
datecheckmatch = re.compile('date="(.+?)"').findall(link)
videomatch = re.compile('video="https://www.youtube.com/watch\?v=(.+?)"').findall(link)
# splashmatch = re.compile('splash="(.+?)"').findall(link)
# splashmatch2 = re.compile('splash2="(.+?)"').findall(link)
datecheck = datecheckmatch[0] if (len(datecheckmatch) > 0) else ''
videocheck = videomatch[0] if (len(videomatch) > 0) else ''
# splashcheck = splashmatch[0] if (len(splashmatch) > 0) else ''
# splashcheck2 = splashmatch2[0] if (len(splashmatch2) > 0) else ''
localfile = open(startuppath, mode='r')
content = file.read(localfile)
file.close(localfile)
localdatecheckmatch = re.compile('date="(.+?)"').findall(content)
localdatecheck = localdatecheckmatch[0] if (len(localdatecheckmatch) > 0) else ''
localversionmatch = re.compile('version="(.+?)"').findall(content)
localversioncheck = localversionmatch[0] if (len(localversionmatch) > 0) else ''
localfile2 = open(idfile, mode='r')
content2 = file.read(localfile2)
file.close(localfile2)
localidmatch = re.compile('id="(.+?)"').findall(content2)
localidcheck = localidmatch[0] if (len(localidmatch) > 0) else 'None'
localbuildmatch = re.compile('name="(.+?)"').findall(content2)
localbuildcheck = localbuildmatch[0] if (len(localbuildmatch) > 0) else ''
print "localbuildmatch: "+str(localbuildmatch)
print "localbuildcheck: "+str(localbuildcheck)
# if localidcheck == "None":
# if os.path.exists(INSTALL):
# os.remove(INSTALL)
if int(localdatecheck) < int(datecheck):
replacefile = content.replace(localdatecheck,datecheck)
writefile = open(startuppath, mode='w')
writefile.write(str(replacefile))
writefile.close()
yt.PlayVideo(videocheck, forcePlayer=True)
xbmc.sleep(500)
while xbmc.Player().isPlaying():
xbmc.sleep(500)
else:
pass
logged_in = weblogin.doLogin(cookiepath,username,password)
if login == 'true':
if logged_in == True:
unlocked = 'yes'
Notify('Login Successful', 'Welcome back '+username,'4000','tick.png')
elif logged_in == False:
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','There is an error with your login information, please check','your username and password, remember this is case','sensitive so use capital letters where needed.')
ADDON.openSettings(sys.argv[0])
else:
pop()
CATEGORIES(localbuildcheck,localversioncheck,localidcheck,unlocked)
#-----------------------------------------------------------------------------------------------------------------
#Function to create a text box
def TextBoxes(heading,anounce):
class TextBox():
WINDOW=10147
CONTROL_LABEL=1
CONTROL_TEXTBOX=5
def __init__(self,*args,**kwargs):
xbmc.executebuiltin("ActivateWindow(%d)" % (self.WINDOW, )) # activate the text viewer window
self.win=xbmcgui.Window(self.WINDOW) # get window
xbmc.sleep(500) # give window time to initialize
self.setControls()
def setControls(self):
self.win.getControl(self.CONTROL_LABEL).setLabel(heading) # set heading
try: f=open(anounce); text=f.read()
except: text=anounce
self.win.getControl(self.CONTROL_TEXTBOX).setText(str(text))
return
TextBox()
#---------------------------------------------------------------------------------------------------
#Create a community (universal) backup - this renames paths to special:// and removes unwanted folders
def COMMUNITY_BACKUP():
CHECK_DOWNLOAD_PATH()
path = xbmc.translatePath(os.path.join(USB,'tempbackup'))
fullbackuppath = xbmc.translatePath(os.path.join(USB,'Community Builds','My Builds',''))
if not os.path.exists(fullbackuppath):
os.makedirs(fullbackuppath)
if os.path.exists(path):
shutil.rmtree(path)
vq = _get_keyboard( heading="Enter a name for this backup" )
# if blank or the user cancelled the keyboard, return
if ( not vq ): return False, 0
# we need to set the title to our query
title = urllib.quote_plus(vq)
choice = xbmcgui.Dialog().yesno("VERY IMPORTANT", 'Do you want to include your addon_data folder?', 'This contains ALL addon settings including passwords.', 'We strongly recommend against this unless all data has been removed.', yeslabel='Yes',nolabel='No')
if choice == 1:
inc_data = ''
elif choice == 0:
inc_data = 'addon_data'
shutil.copytree(HOME, path, symlinks=False, ignore=shutil.ignore_patterns(inc_data,"cache","system","xbmc.log","xbmc.old.log","kodi.log","kodi.old.log","Thumbnails","Textures13.db","peripheral_data","library","keymaps","plugin.program.community.builds","packages",".DS_Store",".setup_complete","XBMCHelper.conf")) #Create temp folder ready for zipping
FIX_SPECIAL(path)
backup_zip = xbmc.translatePath(os.path.join(fullbackuppath,title+'.zip'))
ARCHIVE_FILE(path,backup_zip)
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Deleting temporary files",'', 'Please Wait')
GUIname = xbmc.translatePath(os.path.join(fullbackuppath, title+'_guisettings.zip'))
zf = zipfile.ZipFile(GUIname, mode='w')
zf.write(xbmc.translatePath(os.path.join(path,'userdata','guisettings.xml')), 'guisettings.xml', zipfile.ZIP_DEFLATED) #Create guisettings.zip
zf.close()
shutil.rmtree(path)
dp.close()
dialog.ok("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] Community Builds[/B]", 'You Are Now Backed Up. If you\'d like to share this build with', 'the community please post details on the forum at', '[COLOR=lime][B]www.totalxbmc.tv[/COLOR][/B]')
#---------------------------------------------------------------------------------------------------
#Convert physical paths to special paths
def FIX_SPECIAL(url):
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Renaming paths...",'', 'Please Wait')
for root, dirs, files in os.walk(url): #Search all xml files and replace physical with special
for file in files:
if file.endswith(".xml"):
dp.update(0,"Fixing",file, 'Please Wait')
a=open((os.path.join(root, file))).read()
b=a.replace(USERDATA, 'special://profile/').replace(ADDONS,'special://home/addons/')
f = open((os.path.join(root, file)), mode='w')
f.write(str(b))
f.close()
#---------------------------------------------------------------------------------------------------
#Backup the full XBMC system
def BACKUP():
choice = xbmcgui.Dialog().yesno("Are you sure you want this option?", 'This is a FULL Backup for personal use only. If you', 'want to create a build that works universally and can', 'be restored via the addon use the Community Build option.', yeslabel='Continue',nolabel='Cancel')
if choice == 1:
pass
elif choice == 0:
return
CHECK_DOWNLOAD_PATH()
path = xbmc.translatePath(os.path.join(USB,'tempbackup'))
fullbackuppath = xbmc.translatePath(os.path.join(USB,'Full Backup',''))
if not os.path.exists(fullbackuppath):
os.makedirs(fullbackuppath)
if os.path.exists(path):
shutil.rmtree(path)
vq = _get_keyboard( heading="Enter a name for this backup" )
# if blank or the user cancelled the keyboard, return
if ( not vq ): return False, 0
# we need to set the title to our query
title = urllib.quote_plus(vq)
to_backup = xbmc.translatePath(HOME)
backup_zip = xbmc.translatePath(os.path.join(fullbackuppath,title+'.zip'))
DeletePackages()
localfile = open(idfiletemp, mode='w+')
localfile.write('id="Local"\nname="'+title+'"')
localfile.close()
ARCHIVE_FILE(to_backup,backup_zip)
dialog.ok("Backup Complete", 'This is a FULL Backup for personal use only. It may', 'fail to restore via the addon, if you want a build that can', 'be restored via the addon use the Community Build option.')
#---------------------------------------------------------------------------------------------------
#Zip up tree
def ARCHIVE_FILE(sourcefile, destfile):
zipobj = zipfile.ZipFile(destfile , 'w', zipfile.ZIP_DEFLATED)
rootlen = len(sourcefile)
for_progress = []
ITEM =[]
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Archiving...",'', 'Please Wait')
for base, dirs, files in os.walk(sourcefile):
for file in files:
ITEM.append(file)
N_ITEM =len(ITEM)
for base, dirs, files in os.walk(sourcefile):
for file in files:
for_progress.append(file)
progress = len(for_progress) / float(N_ITEM) * 100
dp.update(int(progress),"Backing Up",'[COLOR yellow]%s[/COLOR]'%file, 'Please Wait')
fn = os.path.join(base, file)
if not 'temp' in dirs:
if not 'plugin.program.community.builds' in dirs:
import time
FORCE= '01/01/1980'
FILE_DATE=time.strftime('%d/%m/%Y', time.gmtime(os.path.getmtime(fn)))
if FILE_DATE > FORCE:
zipobj.write(fn, fn[rootlen:])
zipobj.close()
dp.close()
#---------------------------------------------------------------------------------------------------
#Read a zip file and extract the relevant data
def READ_ZIP(url):
import zipfile
z = zipfile.ZipFile(url, "r")
for filename in z.namelist():
if 'guisettings.xml' in filename:
a = z.read(filename)
r='<setting type="(.+?)" name="%s.(.+?)">(.+?)</setting>'% skin
match=re.compile(r).findall(a)
print match
for type,string,setting in match:
setting=setting.replace('"','') .replace('&','&')
xbmc.executebuiltin("Skin.Set%s(%s,%s)"%(type.title(),string,setting))
if 'favourites.xml' in filename:
a = z.read(filename)
f = open(FAVS, mode='w')
f.write(a)
f.close()
if 'sources.xml' in filename:
a = z.read(filename)
f = open(SOURCE, mode='w')
f.write(a)
f.close()
if 'advancedsettings.xml' in filename:
a = z.read(filename)
f = open(ADVANCED, mode='w')
f.write(a)
f.close()
if 'RssFeeds.xml' in filename:
a = z.read(filename)
f = open(RSS, mode='w')
f.write(a)
f.close()
if 'keyboard.xml' in filename:
a = z.read(filename)
f = open(KEYMAPS, mode='w')
f.write(a)
f.close()
#---------------------------------------------------------------------------------------------------
def FACTORY(localbuildcheck,localversioncheck,id,unlocked):
pass
# if localbuildcheck == factoryname:
# updatecheck = Check_For_Factory_Update(localbuildcheck,localversioncheck,id)
# if updatecheck == True:
# addDir('[COLOR=dodgerblue]'+localbuildcheck+':[/COLOR] [COLOR=lime]NEW VERSION AVAILABLE[/COLOR]',id,'showinfo','','','','')
# else:
# addDir('[COLOR=lime]Current Build Installed: [/COLOR][COLOR=dodgerblue]'+localbuildcheck+'[/COLOR]',id,'showinfo','','','','')
#---------------------------------------------------------------------------------------------------
#Function to populate the search based on the initial first filter
def COMMUNITY(url):
CHECK_DOWNLOAD_PATH()
username = ADDON.getSetting('username')
password = ADDON.getSetting('password')
xbmc_version=xbmc.getInfoLabel("System.BuildVersion")
version=float(xbmc_version[:4])
if version < 14:
xbmcversion = 'gotham'
else:
xbmcversion = 'helix'
if ADDON.getSetting('adult') == 'true':
adult = ''
else:
adult = 'no'
buildsURL = 'http://totalxbmc.tv/totalrevolution/Community_Builds/sortbyname.php?xbmc=%s&adult=%s&%s' % (xbmcversion, adult, url)
link = OPEN_URL(buildsURL).replace('\n','').replace('\r','')
# addBuildDir('[COLOR=lime]Add another filter to the search[/COLOR]',buildsURL,'genres2','genres.png','','','','','')
match=re.compile('name="(.+?)" <br> id="(.+?)" <br> Thumbnail="(.+?)" <br> Fanart="(.+?)"', re.DOTALL).findall(link)
for name,url,iconimage,fanart in match:
addBuildDir(name,url,'community_menu',iconimage,fanart,'','','','')
#---------------------------------------------------------------------------------------------------
#Function to populate the search based on a second filter
def COMMUNITY2(url):
print "COM2 START URL::"+str(url)
link = OPEN_URL(url).replace('\n','').replace('\r','')
match=re.compile('name="(.+?)" <br> id="(.+?)" <br> Thumbnail="(.+?)" <br> Fanart="(.+?)"', re.DOTALL).findall(link)
addBuildDir('[COLOR=lime]Add another filter to the search[/COLOR]',url,'genres2','genres.png','','','','','')
for name,url,iconimage,fanart in match:
addBuildDir(name,url,'community_menu',iconimage,fanart,'','','','')
#---------------------------------------------------------------------------------------------------
#Function to populate the search based on the initial first filter
def COMMUNITY_SEARCH(url):
if zip == '':
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','You have not set your backup storage folder.\nPlease update the addon settings and try again.','','')
ADDON.openSettings(sys.argv[0])
username = ADDON.getSetting('username')
password = ADDON.getSetting('password')
xbmc_version=xbmc.getInfoLabel("System.BuildVersion")
version=float(xbmc_version[:4])
if version < 14:
xbmcversion = 'gotham'
else:
xbmcversion = 'helix'
if ADDON.getSetting('adult') == 'true':
adult = ''
else:
adult = 'no'
buildsURL = 'http://totalxbmc.tv/totalrevolution/Community_Builds/sortbyname.php?xbmc=%s&adult=%s&%s&genre=' % (xbmcversion, adult, url)
link = OPEN_URL(buildsURL).replace('\n','').replace('\r','')
addBuildDir('[COLOR=lime]Add another filter to the search[/COLOR]',buildsURL,'genres2','genres.png','','','','','')
match=re.compile('name="(.+?)" <br> id="(.+?)" <br> Thumbnail="(.+?)" <br> Fanart="(.+?)"', re.DOTALL).findall(link)
for name,url,iconimage,fanart in match:
addBuildDir(name,url,'community_menu',iconimage,fanart,'','','','')
#---------------------------------------------------------------------------------------------------
#Function to read the contents of a URL
def OPEN_URL(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
return link
#---------------------------------------------------------------------------------------------------
#Function to restore a community build
def RESTORE_COMMUNITY(name,url,description,skins,guisettingslink):
import time
CHECK_DOWNLOAD_PATH()
# lib=os.path.join(USB, 'guifix.zip')
if os.path.exists(GUINEW):
if os.path.exists(GUI):
os.remove(GUINEW)
else:
os.rename(GUINEW,GUI)
if os.path.exists(GUIFIX):
os.remove(GUIFIX)
if not os.path.exists(tempfile): #Function for debugging, creates a file that was created in previous call and subsequently deleted when run
localfile = open(tempfile, mode='w+')
if os.path.exists(guitemp):
os.removedirs(guitemp)
try: os.rename(GUI,GUINEW) #Rename guisettings.xml to guinew.xml so we can edit without XBMC interfering.
except:
dialog.ok("NO GUISETTINGS!",'No guisettings.xml file has been found.', 'Please exit XBMC and try again','')
return
choice = xbmcgui.Dialog().yesno(name, 'We highly recommend backing up your existing build before', 'installing any community builds.', 'Would you like to perform a backup first?', nolabel='Backup',yeslabel='Install')
if choice == 0:
BACKUP()
elif choice == 1:
dialog.ok('Would you like to MERGE or WIPE?','You will now have the option to merge or wipe...','[COLOR=lime]1) MERGE[/COLOR] the new build with your existing setup (keeps your addons and settings).','[COLOR=red]2) WIPE[/COLOR] your existing install and install a fresh build.')
choice2 = xbmcgui.Dialog().yesno(name, 'Would you like to merge with your existing build', 'or wipe your existing data and have a fresh', 'install with this new build?', nolabel='Merge',yeslabel='Wipe')
if choice2 == 0: pass
elif choice2 == 1:
WipeInstall()
if choice2 != 1 or (choice2 == 1 and skin == 'skin.confluence'):
choice3 = xbmcgui.Dialog().yesno(name, 'Would you like to keep your existing database', 'files or overwrite? Overwriting will wipe any', 'existing library you may have scanned in.', nolabel='Overwrite',yeslabel='Keep Existing')
if choice3 == 0: pass
elif choice3 == 1:
tempdbpath=os.path.join(USB,'Database')
if os.path.exists(tempdbpath):
shutil.rmtree(tempdbpath)
shutil.copytree(DATABASE, tempdbpath, symlinks=False, ignore=shutil.ignore_patterns("Textures13.db","Addons16.db","Addons15.db","saltscache.db-wal","saltscache.db-shm","saltscache.db","onechannelcache.db")) #Create temp folder for databases, give user option to overwrite existing library
backup_zip = xbmc.translatePath(os.path.join(USB,'Database.zip'))
ARCHIVE_FILE(DATABASE,backup_zip)
dp.create("Community Builds","Downloading "+description +" build.",'', 'Please Wait')
lib=os.path.join(CBPATH, description+'.zip')
if not os.path.exists(CBPATH):
os.makedirs(CBPATH)
downloader.download(url, lib, dp)
readfile = open(CBADDONPATH, mode='r')
default_contents = readfile.read()
readfile.close()
READ_ZIP(lib)
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Checking ",'', 'Please Wait')
HOME = xbmc.translatePath(os.path.join('special://','home'))
dp.update(0,"", "Extracting Zip Please Wait")
extract.all(lib,HOME,dp)
time.sleep(1)
localfile = open(tempfile, mode='r')
content = file.read(localfile)
file.close(localfile)
temp = re.compile('id="(.+?)"').findall(content)
tempcheck = temp[0] if (len(temp) > 0) else ''
tempname = re.compile('name="(.+?)"').findall(content)
namecheck = tempname[0] if (len(tempname) > 0) else ''
tempversion = re.compile('version="(.+?)"').findall(content)
versioncheck = tempversion[0] if (len(tempversion) > 0) else ''
writefile = open(idfile, mode='w+')
writefile.write('id="'+str(tempcheck)+'"\nname="'+namecheck+'"\nversion="'+versioncheck+'"')
writefile.close()
localfile = open(startuppath, mode='r')
content = file.read(localfile)
file.close(localfile)
localversionmatch = re.compile('version="(.+?)"').findall(content)
localversioncheck = localversionmatch[0] if (len(localversionmatch) > 0) else ''
replacefile = content.replace(localversioncheck,versioncheck)
writefile = open(startuppath, mode='w')
writefile.write(str(replacefile))
writefile.close()
os.remove(tempfile)
if localcopy == False:
os.remove(lib)
cbdefaultpy = open(CBADDONPATH, mode='w+')
cbdefaultpy.write(default_contents)
cbdefaultpy.close()
os.rename(GUI,GUIFIX)
time.sleep(1)
localfile = open(GUINEW, mode='r') #Read the original skinsettings tags and store in memory ready to replace in guinew.xml
content = file.read(localfile)
file.close(localfile)
skinsettingsorig = re.compile('<skinsettings>[\s\S]*?<\/skinsettings>').findall(content)
skinorig = skinsettingsorig[0] if (len(skinsettingsorig) > 0) else ''
skindefault = re.compile('<skin default[\s\S]*?<\/skin>').findall(content)
skindefaultorig = skindefault[0] if (len(skindefault) > 0) else ''
lookandfeelorig = re.compile('<lookandfeel>[\s\S]*?<\/lookandfeel>').findall(content)
lookandfeel = lookandfeelorig[0] if (len(lookandfeelorig) > 0) else ''
localfile2 = open(GUIFIX, mode='r')
content2 = file.read(localfile2)
file.close(localfile2)
skinsettingscontent = re.compile('<skinsettings>[\s\S]*?<\/skinsettings>').findall(content2)
skinsettingstext = skinsettingscontent[0] if (len(skinsettingscontent) > 0) else ''
skindefaultcontent = re.compile('<skin default[\s\S]*?<\/skin>').findall(content2)
skindefaulttext = skindefaultcontent[0] if (len(skindefaultcontent) > 0) else ''
lookandfeelcontent = re.compile('<lookandfeel>[\s\S]*?<\/lookandfeel>').findall(content2)
lookandfeeltext = lookandfeelcontent[0] if (len(lookandfeelcontent) > 0) else ''
replacefile = content.replace(skinorig,skinsettingstext).replace(lookandfeel,lookandfeeltext).replace(skindefaultorig,skindefaulttext)
writefile = open(GUINEW, mode='w+')
writefile.write(str(replacefile))
writefile.close()
if os.path.exists(GUI):
os.remove(GUI)
os.rename(GUINEW,GUI)
os.remove(GUIFIX)
if choice3 == 1:
extract.all(backup_zip,DATABASE,dp) #This folder first needs zipping up
shutil.rmtree(tempdbpath)
# os.remove(backup_zip)
os.makedirs(guitemp)
time.sleep(1)
xbmc.executebuiltin('UnloadSkin()')
time.sleep(1)
xbmc.executebuiltin('ReloadSkin()')
time.sleep(1)
xbmc.executebuiltin("ActivateWindow(appearancesettings)")
while xbmc.executebuiltin("Window.IsActive(appearancesettings)"):
xbmc.sleep(500)
try: xbmc.executebuiltin("LoadProfile(Master user)")
except: pass
dialog.ok('Step 1 complete','Change the skin to: [COLOR=lime]'+skins,'[/COLOR]Once done come back and choose install step 2 which will','re-install the guisettings.xml - this file contains all custom skin settings.')
xbmc.executebuiltin("ActivateWindow(appearancesettings)")
CHECK_GUITEMP(guisettingslink)
#---------------------------------------------------------------------------------------------------
#Check whether or not the guisettings fix has been done, loops on a timer.
def CHECK_GUITEMP(url):
time.sleep(120)
if os.path.exists(guitemp):
choice = xbmcgui.Dialog().yesno('Run step 2 of install', 'You still haven\'t completed step 2 of the', 'install. Would you like to complete it now?', '', nolabel='No, not yet',yeslabel='Yes, complete setup')
if choice == 0:
CHECK_GUITEMP(url)
elif choice == 1:
try: xbmc.executebuiltin("PlayerControl(Stop)")
except: pass
xbmc.executebuiltin("ActivateWindow(appearancesettings)")
GUI_MERGE(url)
#---------------------------------------------------------------------------------------------------
#Function to restore a zip file
def CHECK_DOWNLOAD_PATH():
if zip == '':
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','You have not set your ZIP Folder.\nPlease update the addon settings and try again.','','')
ADDON.openSettings(sys.argv[0])
path = xbmc.translatePath(os.path.join(zip,'testCBFolder'))
if not os.path.exists(zip):
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','The download location you have stored does not exist .\nPlease update the addon settings and try again.','','')
ADDON.openSettings(sys.argv[0])
if os.path.exists(zip):
try:
os.makedirs(path)
os.removedirs(path)
except:
dialog.ok('[COLOR=red]CANNOT WRITE TO PATH[/COLOR]', 'Kodi cannot write to the path you\'ve chosen. Try choosing', 'a path with write permissions such as a USB stick.', '')
ADDON.openSettings(sys.argv[0])
#---------------------------------------------------------------------------------------------------
#Function to restore a zip file
def RESTORE():
import time
CHECK_DOWNLOAD_PATH()
filename = xbmcgui.Dialog().browse(1, 'Select the backup file you want to restore', 'files', '.zip', False, False, USB)
if filename == '':
return
localfile = open(idfiletemp, mode='w+')
clean_title = ntpath.basename(filename)
localfile.write('id="Local"\nname="'+clean_title+'"')
localfile.close()
READ_ZIP(filename)
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Checking ",'', 'Please Wait')
HOME = xbmc.translatePath(os.path.join('special://','home'))
dp.update(0,"", "Extracting Zip Please Wait")
extract.all(filename,HOME,dp)
localfile = open(idfile, mode='w+')
localfile.write('id="Local"\nname="Incomplete"')
localfile.close()
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','Step 1 complete. Now please change the skin to','the one this build was designed for. Once done come back','to this addon and click the link to complete the setup.')
xbmc.executebuiltin("ActivateWindow(appearancesettings)")
# dialog.ok("Community Builds - Install Complete", 'To ensure the skin settings are set correctly XBMC will now', 'close. If XBMC doesn\'t close please force close (pull power', 'or force close in your OS - [COLOR=lime]DO NOT exit via XBMC menu[/COLOR])')
# killxbmc()
#---------------------------------------------------------------------------------------------------
#Function to restore a zip file
def REMOVE_BUILD():
CHECK_DOWNLOAD_PATH()
filename = xbmcgui.Dialog().browse(1, 'Select the backup file you want to DELETE', 'files', '.zip', False, False, USB)
if filename == '':
return
clean_title = ntpath.basename(filename)
choice = xbmcgui.Dialog().yesno('Delete Backup File', 'This will completely remove '+clean_title, 'Are you sure you want to delete?', '', nolabel='No, Cancel',yeslabel='Yes, Delete')
if choice == 0:
return
elif choice == 1:
os.remove(filename)
#---------------------------------------------------------------------------------------------------
#Kill Commands - these will make sure guisettings.xml sticks.
#ANDROID STILL NOT WORKING
def killxbmc():
choice = xbmcgui.Dialog().yesno('Force Close XBMC/Kodi', 'We will now attempt to force close Kodi, this is', 'to be used if having problems with guisettings.xml', 'sticking. Would you like to continue?', nolabel='No, Cancel',yeslabel='Yes, Close')
if choice == 0:
return
elif choice == 1:
pass
myplatform = platform()
print "Platform: " + str(myplatform)
if myplatform == 'osx': # OSX
print "############ try osx force close #################"
try: os.system('killall -9 XBMC')
except: pass
try: os.system('killall -9 Kodi')
except: pass
dialog.ok("[COLOR=red][B]WARNING !!![/COLOR][/B]", "If you\'re seeing this message it means the force close", "was unsuccessful. Please force close XBMC/Kodi [COLOR=lime]DO NOT[/COLOR] exit cleanly via the menu.",'')
elif myplatform == 'linux': #Linux
print "############ try linux force close #################"
try: os.system('killall XBMC')
except: pass
try: os.system('killall Kodi')
except: pass
try: os.system('killall -9 xbmc.bin')
except: pass
try: os.system('killall -9 kodi.bin')
except: pass
dialog.ok("[COLOR=red][B]WARNING !!![/COLOR][/B]", "If you\'re seeing this message it means the force close", "was unsuccessful. Please force close XBMC/Kodi [COLOR=lime]DO NOT[/COLOR] exit cleanly via the menu.",'')
elif myplatform == 'android': # Android
print "############ try android force close #################"
try: os.system('adb shell am force-stop org.xbmc.kodi')
except: pass
try: os.system('adb shell am force-stop org.kodi')
except: pass
try: os.system('adb shell am force-stop org.xbmc.xbmc')
except: pass
try: os.system('adb shell am force-stop org.xbmc')
except: pass
dialog.ok("[COLOR=red][B]WARNING !!![/COLOR][/B]", "Your system has been detected as Android, you ", "[COLOR=yellow][B]MUST[/COLOR][/B] force close XBMC/Kodi. [COLOR=lime]DO NOT[/COLOR] exit cleanly via the menu.","Pulling the power cable is the simplest method to force close.")
elif myplatform == 'windows': # Windows
print "############ try windows force close #################"
try:
os.system('@ECHO off')
os.system('tskill XBMC.exe')
except: pass
try:
os.system('@ECHO off')
os.system('tskill Kodi.exe')
except: pass
try:
os.system('@ECHO off')
os.system('TASKKILL /im Kodi.exe /f')
except: pass
try:
os.system('@ECHO off')
os.system('TASKKILL /im XBMC.exe /f')
except: pass
dialog.ok("[COLOR=red][B]WARNING !!![/COLOR][/B]", "If you\'re seeing this message it means the force close", "was unsuccessful. Please force close XBMC/Kodi [COLOR=lime]DO NOT[/COLOR] exit cleanly via the menu.","Use task manager and NOT ALT F4")
else: #ATV
print "############ try atv force close #################"
try: os.system('killall AppleTV')
except: pass
print "############ try raspbmc force close #################" #OSMC / Raspbmc
try: os.system('sudo initctl stop kodi')
except: pass
try: os.system('sudo initctl stop xbmc')
except: pass
dialog.ok("[COLOR=red][B]WARNING !!![/COLOR][/B]", "If you\'re seeing this message it means the force close", "was unsuccessful. Please force close XBMC/Kodi [COLOR=lime]DO NOT[/COLOR] exit via the menu.","Your platform could not be detected so just pull the power cable.")
#---------------------------------------------------------------------------------------------------
#Root menu of addon
def CATEGORIES(localbuildcheck,localversioncheck,id,unlocked):
if os.path.exists(factory):
FACTORY()
if unlocked == 'yes':
if id != 'None':
if id != 'Local':
updatecheck = Check_For_Update(localbuildcheck,localversioncheck,id)
if updatecheck == True:
addDir('[COLOR=dodgerblue]'+localbuildcheck+':[/COLOR] [COLOR=lime]NEW VERSION AVAILABLE[/COLOR]',id,'showinfo','','','','')
else:
addDir('[COLOR=lime]Current Build Installed: [/COLOR][COLOR=dodgerblue]'+localbuildcheck+'[/COLOR]',id,'showinfo','','','','')
else:
if localbuildcheck == 'Incomplete':
addDir('[COLOR=lime]Your last restore is not yet completed[/COLOR]','url',CHECK_LOCAL_INSTALL(),'','','','')
else:
addDir('[COLOR=lime]Current Build Installed: [/COLOR][COLOR=dodgerblue]Local Build ('+localbuildcheck+')[/COLOR]','','','','','','')
else:
addDir('[COLOR=lime]REGISTER FOR FREE TO UNLOCK FEATURES[/COLOR]','None','pop','','','','')
addDir('[COLOR=orange]How To Use This Addon[/COLOR]','url','instructions','How_To.png','','','Instructions')
addDir('Install Community Build','none','cb_root_menu','Community_Builds.png','','','Install Community Build')
addDir('Backup My Content','url','backup_option','Backup.png','','','Back Up Your Data')
addDir('Restore My Content','url','restore_option','Restore.png','','','Restore Your Data')
addDir('Additional Tools','url','additional_tools','Additional_Tools.png','','','Restore Your Data')
#---------------------------------------------------------------------------------------------------
# Extra tools menu
def ADDITIONAL_TOOLS():
addDir('Delete Builds From Device','url','remove_build','Delete_Builds.png','','','Delete Build')
addDir('Wipe My Setup (Fresh Start)','url','wipe_xbmc','Fresh_Start.png','','','Wipe your special XBMC/Kodi directory which will revert back to a vanillla build.')
addDir('Convert Physical Paths To Special',HOME,'fix_special','Special_Paths.png','','','Convert Physical Paths To Special')
addDir('Force Close Kodi','url','kill_xbmc','Kill_XBMC.png','','','Force close kodi, to be used as last resort')
#---------------------------------------------------------------------------------------------------
# Check local file version name and number against db
def SHOWINFO(url):
BaseURL='http://totalxbmc.tv/totalrevolution/Community_Builds/community_builds.php?id=%s' % (url)
link = OPEN_URL(BaseURL).replace('\n','').replace('\r','')
namematch = re.compile('name="(.+?)"').findall(link)
authormatch = re.compile('author="(.+?)"').findall(link)
versionmatch = re.compile('version="(.+?)"').findall(link)
updatedmatch = re.compile('updated="(.+?)"').findall(link)
name = namematch[0] if (len(namematch) > 0) else ''
author = authormatch[0] if (len(authormatch) > 0) else ''
version = versionmatch[0] if (len(versionmatch) > 0) else ''
updated = updatedmatch[0] if (len(updatedmatch) > 0) else ''
dialog.ok(name,'Author: '+author,'Latest Version: '+version,'Latest Update: '+updated)
return
#---------------------------------------------------------------------------------------------------
# Check local file version name and number against db
def Check_For_Update(localbuildcheck,localversioncheck,id):
print "Local Version Check: "+localversioncheck
if localbuildcheck == factoryname: pass
BaseURL = 'http://totalxbmc.tv/totalrevolution/Community_Builds/buildupdate.php?id=%s' % (id)
link = OPEN_URL(BaseURL).replace('\n','').replace('\r','')
if id != 'None':
versioncheckmatch = re.compile('version="(.+?)"').findall(link)
versioncheck = versioncheckmatch[0] if (len(versioncheckmatch) > 0) else ''
if localversioncheck < versioncheck:
print "local build: "+str(localbuildcheck)
print "new version available"
return True
else:
return False
print "local build: "+str(localbuildcheck)
print "all good in the hood"
#---------------------------------------------------------------------------------------------------
#Build the root search menu for installing community builds
def CB_Root_Menu():
logged_in = weblogin.doLogin(cookiepath,username,password)
xbmc_version=xbmc.getInfoLabel("System.BuildVersion")
version=float(xbmc_version[:4])
if login == 'true':
if logged_in == True:
addDir('Manual Search','url','manual_search','Manual_Search.png','','','')
addDir('Search By Genre','url','genres','Search_Genre.png','','','')
addDir('Search By Country/Language','url','countries','Search_Country.png','','','')
if version < 14:
addDir('Show All Gotham Compatible Builds','genre=','grab_builds','TRCOMMUNITYGOTHAMBUILDS.png','','','')
else:
addDir('Show All Helix Compatible Builds','genre=','grab_builds','TRCOMMUNITYHELIXBUILDS.png','','','')
elif logged_in == False:
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','There is an error with your login information, please check','your username and password, remember this is case','sensitive so use capital letters where needed.')
ADDON.openSettings(sys.argv[0])
else:
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','[COLOR=blue][B]Did you know you\'re missing out of some great features?[/B][/COLOR]','To unlock this content simply register on the forum for [COLOR=yellow]FREE[/COLOR]','at [COLOR=lime]www.totalxbmc.tv[/COLOR] and enter details in the addon settings.')
if version < 14:
addDir('Show All Gotham Compatible Builds','genre=','grab_builds','TRCOMMUNITYGOTHAMBUILDS.png','','','')
else:
addDir('Show All Helix Compatible Builds','genre=','grab_builds','TRCOMMUNITYHELIXBUILDS.png','','','')
#---------------------------------------------------------------------------------------------------
#Search in description
def MANUAL_SEARCH():
addDir('Search By Name','name','search_builds','Manual_Search.png','','','')
addDir('Search By Uploader','author','search_builds','Search_Genre.png','','','')
addDir('Search By Audio Addons Installed','audio','search_builds','Search_Addons.png','','','')
addDir('Search By Picture Addons Installed','pics','search_builds','Search_Addons.png','','','')
addDir('Search By Program Addons Installed','progs','search_builds','Search_Addons.png','','','')
addDir('Search By Video Addons Installed','vids','search_builds','Search_Addons.png','','','')
addDir('Search By Skins Installed','vids','search_builds','Search_Addons.png','','','')
#---------------------------------------------------------------------------------------------------
#Search in description
def SEARCH_BUILDS():
if url == 'name':
searchUrl = 'name='
elif url == 'author':
searchUrl = 'author='
elif url == 'audio':
searchUrl = 'audio='
elif url == 'pics':
searchUrl = 'pics='
elif url == 'progs':
searchUrl = 'progs='
elif url == 'vids':
searchUrl = 'vids='
elif url == 'skins':
searchUrl = 'skins='
vq = _get_keyboard( heading="Search for content" )
# if blank or the user cancelled the keyboard, return
if ( not vq ): return False, 0
# we need to set the title to our query
title = urllib.quote_plus(vq)
searchUrl += title
print "Searching URL: " + searchUrl
COMMUNITY_SEARCH(searchUrl)
#---------------------------------------------------------------------------------------------------
#Get keyboard
def _get_keyboard( default="", heading="", hidden=False ):
""" shows a keyboard and returns a value """
keyboard = xbmc.Keyboard( default, heading, hidden )
keyboard.doModal()
if ( keyboard.isConfirmed() ):
return unicode( keyboard.getText(), "utf-8" )
return default
#-----------------------------------------------------------------------------------------------------------------
#Build Genres Menu (First Filter)
def GENRES():
addDir('Anime','genre=anime','grab_builds','anime.png','','','')
addDir('Audiobooks','genre=audiobooks','grab_builds','audiobooks.png','','','')
addDir('Comedy','genre=comedy','grab_builds','comedy.png','','','')
addDir('Comics','genre=comics','grab_builds','comics.png','','','')
addDir('Documentary','genre=documentary','grab_builds','documentary.png','','','')
addDir('Downloads','genre=downloads','grab_builds','downloads.png','','','')
addDir('Food','genre=food','grab_builds','food.png','','','')
addDir('Gaming','genre=gaming','grab_builds','gaming.png','','','')
addDir('Health','genre=health','grab_builds','health.png','','','')
addDir('How To...','genre=howto','grab_builds','howto.png','','','')
addDir('Kids','genre=kids','grab_builds','kids.png','','','')
addDir('Live TV','genre=livetv','grab_builds','livetv.png','','','')
addDir('Movies','genre=movies','grab_builds','movies.png','','','')
addDir('Music','genre=music','grab_builds','music.png','','','')
addDir('News','genre=news','grab_builds','news.png','','','')
addDir('Photos','genre=photos','grab_builds','photos.png','','','')
addDir('Podcasts','genre=podcasts','grab_builds','podcasts.png','','','')
addDir('Radio','genre=radio','grab_builds','radio.png','','','')
addDir('Religion','genre=religion','grab_builds','religion.png','','','')
addDir('Space','genre=space','grab_builds','space.png','','','')
addDir('Sports','genre=sports','grab_builds','sports.png','','','')
addDir('Technology','genre=tech','grab_builds','tech.png','','','')
addDir('Trailers','genre=trailers','grab_builds','trailers.png','','','')
addDir('TV Shows','genre=tv','grab_builds','tv.png','','','')
addDir('Misc.','genre=other','grab_builds','other.png','','','')
if ADDON.getSetting('adult') == 'true':
addDir('XXX','genre=adult','grab_builds','adult.png','','','')
#---------------------------------------------------------------------------------------------------
#Build Countries Menu (First Filter)
def COUNTRIES():
addDir('African','genre=african','grab_builds','african.png','','','')
addDir('Arabic','genre=arabic','grab_builds','arabic.png','','','')
addDir('Asian','genre=asian','grab_builds','asian.png','','','')
addDir('Australian','genre=australian','grab_builds','australian.png','','','')
addDir('Austrian','genre=austrian','grab_builds','austrian.png','','','')
addDir('Belgian','genre=belgian','grab_builds','belgian.png','','','')
addDir('Brazilian','genre=brazilian','grab_builds','brazilian.png','','','')
addDir('Canadian','genre=canadian','grab_builds','canadian.png','','','')
addDir('Columbian','genre=columbian','grab_builds','columbian.png','','','')
addDir('Czech','genre=czech','grab_builds','czech.png','','','')
addDir('Danish','genre=danish','grab_builds','danish.png','','','')
addDir('Dominican','genre=dominican','grab_builds','dominican.png','','','')
addDir('Dutch','genre=dutch','grab_builds','dutch.png','','','')
addDir('Egyptian','genre=egyptian','grab_builds','egyptian.png','','','')
addDir('Filipino','genre=filipino','grab_builds','filipino.png','','','')
addDir('Finnish','genre=finnish','grab_builds','finnish.png','','','')
addDir('French','genre=french','grab_builds','french.png','','','')
addDir('German','genre=german','grab_builds','german.png','','','')
addDir('Greek','genre=greek','grab_builds','greek.png','','','')
addDir('Hebrew','genre=hebrew','grab_builds','hebrew.png','','','')
addDir('Hungarian','genre=hungarian','grab_builds','hungarian.png','','','')
addDir('Icelandic','genre=icelandic','grab_builds','icelandic.png','','','')
addDir('Indian','genre=indian','grab_builds','indian.png','','','')
addDir('Irish','genre=irish','grab_builds','irish.png','','','')
addDir('Italian','genre=italian','grab_builds','italian.png','','','')
addDir('Japanese','genre=japanese','grab_builds','japanese.png','','','')
addDir('Korean','genre=korean','grab_builds','korean.png','','','')
addDir('Lebanese','genre=lebanese','grab_builds','lebanese.png','','','')
addDir('Mongolian','genre=mongolian','grab_builds','mongolian.png','','','')
addDir('Nepali','genre=nepali','grab_builds','nepali.png','','','')
addDir('New Zealand','genre=newzealand','grab_builds','newzealand.png','','','')
addDir('Norwegian','genre=norwegian','grab_builds','norwegian.png','','','')
addDir('Pakistani','genre=pakistani','grab_builds','pakistani.png','','','')
addDir('Polish','genre=polish','grab_builds','polish.png','','','')
addDir('Portuguese','genre=portuguese','grab_builds','portuguese.png','','','')
addDir('Romanian','genre=romanian','grab_builds','romanian.png','','','')
addDir('Russian','genre=russian','grab_builds','russian.png','','','')
addDir('Singapore','genre=singapore','grab_builds','singapore.png','','','')
addDir('Spanish','genre=spanish','grab_builds','spanish.png','','','')
addDir('Swedish','genre=swedish','grab_builds','swedish.png','','','')
addDir('Swiss','genre=swiss','grab_builds','swiss.png','','','')
addDir('Syrian','genre=syrian','grab_builds','syrian.png','','','')
addDir('Tamil','genre=tamil','grab_builds','tamil.png','','','')
addDir('Thai','genre=thai','grab_builds','thai.png','','','')
addDir('Turkish','genre=turkish','grab_builds','turkish.png','','','')
addDir('UK','genre=uk','grab_builds','uk.png','','','')
addDir('USA','&genre=usa','grab_builds','usa.png','','','')
addDir('Vietnamese','genre=vietnamese','grab_builds','vietnamese.png','','','')
#---------------------------------------------------------------------------------------------------
#Build Countries and Genre Menu (Second Filter)
def GENRES2(url):
addDir('[COLOR=lime]GENRES[/COLOR]','None','None','genres.png','','','')
addDir('Anime',url+',anime','grab_builds2','anime.png','','','')
addDir('Audiobooks',url+',audiobooks','grab_builds2','audiobooks.png','','','')
addDir('Comedy',url+',comedy','grab_builds2','comedy.png','','','')
addDir('Comics',url+',comics','grab_builds2','comics.png','','','')
addDir('Documentary',url+',documentary','grab_builds2','documentary.png','','','')
addDir('Downloads',url+',downloads','grab_builds2','downloads.png','','','')
addDir('Food',url+',food','grab_builds2','food.png','','','')
addDir('Gaming',url+',gaming','grab_builds2','gaming.png','','','')
addDir('Health',url+',health','grab_builds2','health.png','','','')
addDir('How To...',url+',howto','grab_builds2','howto.png','','','')
addDir('Kids',url+',kids','grab_builds2','kids.png','','','')
addDir('Live TV',url+',livetv','grab_builds2','livetv.png','','','')
addDir('Movies',url+',movies','grab_builds2','movies.png','','','')
addDir('Music',url+',music','grab_builds2','music.png','','','')
addDir('News',url+',news','grab_builds2','news.png','','','')
addDir('Photos',url+',photos','grab_builds2','photos.png','','','')
addDir('Podcasts',url+',podcasts','grab_builds2','podcasts.png','','','')
addDir('Radio',url+',radio','grab_builds2','radio.png','','','')
addDir('Religion',url+',religion','grab_builds2','religion.png','','','')
addDir('Space',url+',space','grab_builds2','space.png','','','')
addDir('Sports',url+',sports','grab_builds2','sports.png','','','')
addDir('Technology',url+',tech','grab_builds2','tech.png','','','')
addDir('Trailers',url+',trailers','grab_builds2','trailers.png','','','')
addDir('TV Shows',url+',tv','grab_builds2','tv.png','','','')
addDir('Misc.',url+',other','grab_builds2','other.png','','','')
if ADDON.getSetting('adult') == 'true':
addDir('XXX','genre2=adult','grab_builds2','adult.png','','','')
addDir('[COLOR=lime]COUNTRIES[/COLOR]','None','None','countries.png','','','')
addDir('African',url+',african','grab_builds2','african.png','','','')
addDir('Arabic',url+',arabic','grab_builds2','arabic.png','','','')
addDir('Asian',url+',asian','grab_builds2','asian.png','','','')
addDir('Australian',url+',australian','grab_builds2','australian.png','','','')
addDir('Austrian',url+',austrian','grab_builds2','austrian.png','','','')
addDir('Belgian',url+',belgian','grab_builds2','belgian.png','','','')
addDir('Brazilian',url+',brazilian','grab_builds2','brazilian.png','','','')
addDir('Canadian',url+',canadian','grab_builds2','canadian.png','','','')
addDir('Columbian',url+',columbian','grab_builds2','columbian.png','','','')
addDir('Czech',url+',czech','grab_builds2','czech.png','','','')
addDir('Danish',url+',danish','grab_builds2','danish.png','','','')
addDir('Dominican',url+',dominican','grab_builds2','dominican.png','','','')
addDir('Dutch',url+',dutch','grab_builds2','dutch.png','','','')
addDir('Egyptian',url+',egyptian','grab_builds2','egyptian.png','','','')
addDir('Filipino',url+',filipino','grab_builds2','filipino.png','','','')
addDir('Finnish',url+',finnish','grab_builds2','finnish.png','','','')
addDir('French',url+',french','grab_builds2','french.png','','','')
addDir('German',url+',german','grab_builds2','german.png','','','')
addDir('Greek',url+',greek','grab_builds2','greek.png','','','')
addDir('Hebrew',url+',hebrew','grab_builds2','hebrew.png','','','')
addDir('Hungarian',url+',hungarian','grab_builds2','hungarian.png','','','')
addDir('Icelandic',url+',icelandic','grab_builds2','icelandic.png','','','')
addDir('Indian',url+',indian','grab_builds2','indian.png','','','')
addDir('Irish',url+',irish','grab_builds2','irish.png','','','')
addDir('Italian',url+',italian','grab_builds2','italian.png','','','')
addDir('Japanese',url+',japanese','grab_builds2','japanese.png','','','')
addDir('Korean',url+',korean','grab_builds2','korean.png','','','')
addDir('Lebanese',url+',lebanese','grab_builds2','lebanese.png','','','')
addDir('Mongolian',url+',mongolian','grab_builds2','mongolian.png','','','')
addDir('Nepali',url+',nepali','grab_builds2','nepali.png','','','')
addDir('New Zealand',url+',newzealand','grab_builds2','newzealand.png','','','')
addDir('Norwegian',url+',norwegian','grab_builds2','norwegian.png','','','')
addDir('Pakistani',url+',pakistani','grab_builds2','pakistani.png','','','')
addDir('Polish',url+',polish','grab_builds2','polish.png','','','')
addDir('Portuguese',url+',portuguese','grab_builds2','portuguese.png','','','')
addDir('Romanian',url+',romanian','grab_builds2','romanian.png','','','')
addDir('Russian',url+',russian','grab_builds2','russian.png','','','')
addDir('Singapore',url+',singapore','grab_builds2','singapore.png','','','')
addDir('Spanish',url+',spanish','grab_builds2','spanish.png','','','')
addDir('Swedish',url+',swedish','grab_builds2','swedish.png','','','')
addDir('Swiss',url+',swiss','grab_builds2','swiss.png','','','')
addDir('Syrian',url+',syrian','grab_builds2','syrian.png','','','')
addDir('Tamil',url+',tamil','grab_builds2','tamil.png','','','')
addDir('Thai',url+',thai','grab_builds2','thai.png','','','')
addDir('Turkish',url+',turkish','grab_builds2','turkish.png','','','')
addDir('UK',url+',uk','grab_builds2','uk.png','','','')
addDir('USA',url+',usa','grab_builds2','usa.png','','','')
addDir('Vietnamese',url+',vietnamese','grab_builds2','vietnamese.png','','','')
#---------------------------------------------------------------------------------------------------
#Call the yt module for playing videos. Thanks to spoyser for this module.
def PLAYVIDEO(url):
import yt
yt.PlayVideo(url)
#---------------------------------------------------------------------------------------------------
#Create How To (instructions) menu
def INSTRUCTIONS(url):
addDir('[COLOR=dodgerblue][TEXT GUIDE][/COLOR] What is Community Builds?','url','instructions_3','How_To.png','','','')
addDir('[COLOR=dodgerblue][TEXT GUIDE][/COLOR] Creating a Community Build','url','instructions_1','How_To.png','','','')
addDir('[COLOR=dodgerblue][TEXT GUIDE][/COLOR] Installing a Community Build','url','instructions_2','How_To.png','','','')
addDir('[COLOR=lime][VIDEO GUIDE][/COLOR] IMPORTANT initial settings',"1vXniHsEMEg",'play_video','howto.png','','','')
addDir('[COLOR=lime][VIDEO GUIDE][/COLOR] Install a Community Build',"kLsVOapuM1A",'play_video','howto.png','','','')
addDir('[COLOR=lime][VIDEO GUIDE][/COLOR] Fixing a half installed build (guisettings.xml fix)',"X8QYLziFzQU",'play_video','howto.png','','','')
addDir('[COLOR=lime][VIDEO GUIDE][/COLOR] [COLOR=yellow](OLD METHOD)[/COLOR]Create a Community Build (part 1)',"3rMScZF2h_U",'play_video','howto.png','','','')
addDir('[COLOR=lime][VIDEO GUIDE][/COLOR] [COLOR=yellow](OLD METHOD)[/COLOR]Create a Community Build (part 2)',"C2IPhn0OSSw",'play_video','howto.png','','','')
# addDir('[COLOR=dodgerblue][TEXT GUIDE] Submitting A Community Backup[/COLOR]','url',16,'','','','')
# addDir('[COLOR=dodgerblue][TEXT GUIDE] Creating A Local Backup[/COLOR]','url',17,'','','','')
# addDir('[COLOR=dodgerblue][TEXT GUIDE] Restoring A Local Backup[/COLOR]','url',18,'','','','')
# addDir('[COLOR=dodgerblue][TEXT GUIDE] Fresh Start XBMC/Kodi[/COLOR]','url',19,'','','','')
#---------------------------------------------------------------------------------------------------
#(Instructions) Create a community backup
def Instructions_1():
TextBoxes('Creating A Community Backup',
'[COLOR=yellow]NEW METHOD[/COLOR][CR][COLOR=blue][B]Step 1:[/COLOR] Remove any sensitive data[/B][CR]Make sure you\'ve removed any sensitive data such as passwords and usernames in your addon_data folder.'
'[CR][CR][COLOR=blue][B]Step 2:[/COLOR] Backup your system[/B][CR]Choose the backup option from the main menu, in there you\'ll find the option to create a Community Build and this will create two zip files that you need to upload to a server.'
'[CR][CR][COLOR=blue][B]Step 3:[/COLOR] Upload the zips[/B][CR]Upload the two zip files to a server that Kodi can access, it has to be a direct link and not somewhere that asks for captcha - Dropbox and archive.org are two good examples.'
'[CR][CR][COLOR=blue][B]Step 4:[/COLOR] Submit build at TotalXBMC[/B]'
'[CR]Create a thread on the Community Builds section of the forum at [COLOR=lime][B]www.totalxbmc.tv[/COLOR][/B].[CR]Full details can be found on there of the template you should use when posting.'
'[CR][CR][COLOR=yellow]OLD METHOD[/COLOR][CR][COLOR=blue][B]Step 1: Backup your system[/B][/COLOR][CR]Choose the backup option from the main menu, you will be asked whether you would like to delete your addon_data folder. If you decide to choose this option [COLOR=yellow][B]make sure[/COLOR][/B] you already have a full backup of your system as it will completely wipe your addon settings (any stored settings such as passwords or any other changes you\'ve made to addons since they were first installed). If sharing a build with the community it\'s highly advised that you wipe your addon_data but if you\'ve made changes or installed extra data packages (e.g. skin artwork packs) then backup the whole build and then manually delete these on your PC and zip back up again (more on this later).'
'[CR][CR][COLOR=blue][B]Step 2: Edit zip file on your PC[/B][/COLOR][CR]Copy your backup.zip file to your PC, extract it and delete all the addons and addon_data that isn\'t required.'
'[CR][COLOR=blue]What to delete:[/COLOR][CR][COLOR=lime]/addons/packages[/COLOR] This folder contains zip files of EVERY addon you\'ve ever installed - it\'s not needed.'
'[CR][COLOR=lime]/addons/<skin.xxx>[/COLOR] Delete any skins that aren\'t used, these can be very big files.'
'[CR][COLOR=lime]/addons/<addon_id>[/COLOR] Delete any other addons that aren\'t used, it\'s easy to forget you\'ve got things installed that are no longer needed.'
'[CR][COLOR=lime]/userdata/addon_data/<addon_id>[/COLOR] Delete any folders that don\'t contain important changes to addons. If you delete these the associated addons will just reset to their default values.'
'[CR][COLOR=lime]/userdata/<all other folders>[/COLOR] Delete all other folders in here such as keymaps. If you\'ve setup profiles make sure you [COLOR=yellow][B]keep the profiles directory[/COLOR][/B].'
'[CR][COLOR=lime]/userdata/Thumbnails/[/COLOR] Delete this folder, it contains all cached artwork. You can safely delete this but must also delete the file listed below.'
'[CR][COLOR=lime]/userdata/Database/Textures13.db[/COLOR] Delete this and it will tell XBMC to regenerate your thumbnails - must do this if delting thumbnails folder.'
'[CR][COLOR=lime]/xbmc.log (or Kodi.log)[/COLOR] Delete your log files, this includes any crashlog files you may have.'
'[CR][CR][COLOR=blue][B]Step 3: Compress and upload[/B][/COLOR][CR]Use a program like 7zip to create a zip file of your remaining folders and upload to a file sharing site like dropbox.'
'[CR][CR][COLOR=blue][B]Step 4: Submit build at TotalXBMC[/B][/COLOR]'
'[CR]Create a thread on the Community Builds section of the forum at [COLOR=lime][B]www.totalxbmc.tv[/COLOR][/B].[CR]Full details can be found on there of the template you should use when posting.')
#---------------------------------------------------------------------------------------------------
#(Instructions) Install a community build
def Instructions_2():
TextBoxes('Installing a community build', '[COLOR=blue][B]Step 1 (Optional): Backup your system[/B][/COLOR][CR]We highly recommend creating a backup of your system in case you don\'t like the build and want to revert back. Choose the backup option from the main menu, you will be asked whether you would like to delete your addon_data folder, select no unless you want to lose all your settings. If you ever need your backup it\'s stored in the location you\'ve selected in the addon settings.'
'[CR][CR][COLOR=blue][B]Step 2: Browse the Community Builds[/B][/COLOR][CR]Find a community build you like the look of and make sure you read the description as it could contain unsuitable content or have specific install instructions. Once you\'ve found the build you want to install click on the install option and you\'ll have the option of a fresh install or a merge . The merge option will leave all your existing addons and userdata in place and just add the contents of the new build whereas the fresh (wipe) option will completely wipe your existing data and replace with content on the new build. Once you make your choice the download and extraction process will begin.'
'[CR][CR][COLOR=blue][B]Step 3: [/COLOR][COLOR=red]VERY IMPORTANT[/COLOR][/B][CR]For the install to complete properly you MUST change the skin to the relevant skin used for that build. You will see a dialog box telling you which skin to switch to and then you\'ll be taken to the appearance settings where you can switch skins.'
'[CR][CR][COLOR=blue][B]Step 4:[/B][/COLOR] Now go back to the Community Builds addon and in the same section wehre you clicked on step 1 of the install process you now need to select step 2 so it can install the guisettings.xml. This is extremely important, if you don\'t do this step then you\'ll end up with a real mish-mash hybrid install!'
'[CR][CR][COLOR=blue][B]Step 5:[/B][/COLOR] You will now need to restart Kodi so the settings stick, just quit and it should all be fine. If for any reason the settings did not stick and it still doesn\'t look quite right just do step 2 of the install process again (guisettings.xml fix)')
#---------------------------------------------------------------------------------------------------
#(Instructions) What is a community build
def Instructions_3():
TextBoxes('What is a community build', 'Community Builds are pre-configured builds of XBMC/Kodi based on different users setups. Have you ever watched youtube videos or seen screenshots of Kodi in action and thought "wow I wish I could do that"? Well now you can have a brilliant setup at the click of a button, completely pre-configured by users on the [COLOR=lime][B]www.totalxbmc.tv[/COLOR][/B] forum. If you\'d like to get involved yourself and share your build with the community it\'s very simple to do, just go to the forum where you\'ll find full details or you can follow the guide in this addon.')
#---------------------------------------------------------------------------------------------------
# This creates the final menu showing build details, video and install link
def COMMUNITY_MENU(url):
BaseURL='http://totalxbmc.tv/totalrevolution/Community_Builds/community_builds.php?id=%s' % (url)
link = OPEN_URL(BaseURL).replace('\n','').replace('\r','')
videoguidematch = re.compile('videoguide="(.+?)"').findall(link)
videopreviewmatch = re.compile('videopreview="(.+?)"').findall(link)
namematch = re.compile('name="(.+?)"').findall(link)
authormatch = re.compile('author="(.+?)"').findall(link)
versionmatch = re.compile('version="(.+?)"').findall(link)
descmatch = re.compile('description="(.+?)"').findall(link)
downloadmatch = re.compile('DownloadURL="(.+?)"').findall(link)
updatedmatch = re.compile('updated="(.+?)"').findall(link)
defaultskinmatch = re.compile('defaultskin="(.+?)"').findall(link)
skinsmatch = re.compile('skins="(.+?)"').findall(link)
videoaddonsmatch = re.compile('videoaddons="(.+?)"').findall(link)
audioaddonsmatch = re.compile('audioaddons="(.+?)"').findall(link)
programaddonsmatch = re.compile('programaddons="(.+?)"').findall(link)
pictureaddonsmatch = re.compile('pictureaddons="(.+?)"').findall(link)
sourcesmatch = re.compile('sources="(.+?)"').findall(link)
adultmatch = re.compile('adult="(.+?)"').findall(link)
guisettingsmatch = re.compile('guisettings="(.+?)"').findall(link)
name = namematch[0] if (len(namematch) > 0) else ''
author = authormatch[0] if (len(authormatch) > 0) else ''
version = versionmatch[0] if (len(versionmatch) > 0) else ''
description = descmatch[0] if (len(descmatch) > 0) else 'No information available'
updated = updatedmatch[0] if (len(updatedmatch) > 0) else ''
defaultskin = defaultskinmatch[0] if (len(defaultskinmatch) > 0) else ''
skins = skinsmatch[0] if (len(skinsmatch) > 0) else ''
videoaddons = videoaddonsmatch[0] if (len(videoaddonsmatch) > 0) else ''
audioaddons = audioaddonsmatch[0] if (len(audioaddonsmatch) > 0) else ''
programaddons = programaddonsmatch[0] if (len(programaddonsmatch) > 0) else ''
pictureaddons = pictureaddonsmatch[0] if (len(pictureaddonsmatch) > 0) else ''
sources = sourcesmatch[0] if (len(sourcesmatch) > 0) else ''
adult = adultmatch[0] if (len(adultmatch) > 0) else ''
guisettingslink = guisettingsmatch[0] if (len(guisettingsmatch) > 0) else 'None'
downloadURL = downloadmatch[0] if (len(downloadmatch) > 0) else 'None'
videopreview = videopreviewmatch[0] if (len(videopreviewmatch) > 0) else 'None'
videoguide = videoguidematch[0] if (len(videoguidematch) > 0) else 'None'
localfile = open(tempfile, mode='w+')
localfile.write('id="'+url+'"\nname="'+name+'"\nversion="'+version+'"')
localfile.close()
addDescDir('Full description','None','description','BUILDDETAILS.png',fanart,name,author,version,description,updated,skins,videoaddons,audioaddons,programaddons,pictureaddons,sources,adult)
if videopreview=='None':
pass
else:
addDir('Watch Preview Video',videopreview,'play_video','Video_Preview.png',fanart,'','')
if videoguide=='None':
pass
else:
addDir('Watch Video Guide',videoguide,'play_video','Video_Guide.png',fanart,'','')
if downloadURL=='None':
addBuildDir('[COLOR=gold]Sorry this build is currently unavailable[COLOR]','','','','','','','','')
else:
addBuildDir('[COLOR=lime]Install 1: Download '+name+'[/COLOR]',downloadURL,'restore_community',iconimage,fanart,'',name,defaultskin,guisettingslink)
if guisettingslink=='None':
pass
else:
addDir('[COLOR=dodgerblue]Install 2: Apply guisettings.xml fix[/COLOR]',guisettingslink,'guisettingsfix','FixMy_Build.png',fanart,'','')
#---------------------------------------------------------------------------------------------------
#Option to download guisettings fix that merges with existing settings.
def GUISETTINGS_FIX(url):
CHECK_DOWNLOAD_PATH()
choice = xbmcgui.Dialog().yesno(name, 'This will over-write your existing guisettings.xml.', 'Are you sure this is the build you have installed?', '', nolabel='No, Cancel',yeslabel='Yes, Fix')
if choice == 0:
return
elif choice == 1:
GUI_MERGE(url)
#---------------------------------------------------------------------------------------------------
#Function to download guisettings.xml and merge with existing.
def INSTALL_PART2(url):
BaseURL='http://totalxbmc.tv/totalrevolution/Community_Builds/guisettings.php?id=%s' % (url)
link = OPEN_URL(BaseURL).replace('\n','').replace('\r','')
guisettingsmatch = re.compile('guisettings="(.+?)"').findall(link)
guisettingslink = guisettingsmatch[0] if (len(guisettingsmatch) > 0) else 'None'
GUI_MERGE(guisettingslink)
#---------------------------------------------------------------------------------------------------
#Function to download guisettings.xml and merge with existing.
def GUI_MERGE(url):
if os.path.exists(GUIFIX):
os.remove(GUIFIX)
lib=os.path.join(USB, 'guifix.zip')
dp.create("Community Builds","Downloading guisettings.xml",'', 'Please Wait')
os.rename(GUI,GUINEW) #Rename guisettings.xml to guinew.xml so we can edit without XBMC interfering.
downloader.download(url, lib, dp) #Download guisettings from the build
READ_ZIP(lib)
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Checking ",'', 'Please Wait')
dp.update(0,"", "Extracting Zip Please Wait")
extract.all(lib,USERDATA,dp)
os.rename(GUI,GUIFIX)
time.sleep(1)
localfile = open(GUINEW, mode='r') #Read the original skinsettings tags and store in memory ready to replace in guinew.xml
content = file.read(localfile)
file.close(localfile)
skinsettingsorig = re.compile('<skinsettings>[\s\S]*?<\/skinsettings>').findall(content)
skinorig = skinsettingsorig[0] if (len(skinsettingsorig) > 0) else ''
skindefault = re.compile('<skin default[\s\S]*?<\/skin>').findall(content)
skindefaultorig = skindefault[0] if (len(skindefault) > 0) else ''
lookandfeelorig = re.compile('<lookandfeel>[\s\S]*?<\/lookandfeel>').findall(content)
lookandfeel = lookandfeelorig[0] if (len(lookandfeelorig) > 0) else ''
localfile2 = open(GUIFIX, mode='r')
content2 = file.read(localfile2)
file.close(localfile2)
skinsettingscontent = re.compile('<skinsettings>[\s\S]*?<\/skinsettings>').findall(content2)
skinsettingstext = skinsettingscontent[0] if (len(skinsettingscontent) > 0) else ''
skindefaultcontent = re.compile('<skin default[\s\S]*?<\/skin>').findall(content2)
skindefaulttext = skindefaultcontent[0] if (len(skindefaultcontent) > 0) else ''
lookandfeelcontent = re.compile('<lookandfeel>[\s\S]*?<\/lookandfeel>').findall(content2)
lookandfeeltext = lookandfeelcontent[0] if (len(lookandfeelcontent) > 0) else ''
replacefile = content.replace(skinorig,skinsettingstext).replace(lookandfeel,lookandfeeltext).replace(skindefaultorig,skindefaulttext)
writefile = open(GUINEW, mode='w+')
writefile.write(str(replacefile))
writefile.close()
if os.path.exists(GUI):
os.remove(GUI)
os.rename(GUINEW,GUI)
os.remove(GUIFIX)
if os.path.exists(guitemp):
os.removedirs(guitemp)
# if os.path.exists(INSTALL):
# os.remove(INSTALL)
# if os.path.exists(AUTOEXEC):
# os.remove(AUTOEXEC)
# if os.path.exists(AUTOEXECBAK):
# os.rename(AUTOEXECBAK,AUTOEXEC)
xbmc.executebuiltin('UnloadSkin')
xbmc.executebuiltin("ReloadSkin")
dialog.ok("guisettings.xml fix complete", 'Please restart Kodi. If the skin does\'t look', 'quite right on the next boot you may need to', 'force close Kodi.')
# killxbmc()
#---------------------------------------------------------------------------------------------------
#Show full description of build
def DESCRIPTION(name,url,buildname,author,version,description,updated,skins,videoaddons,audioaddons,programaddons,pictureaddons,sources,adult):
TextBoxes(buildname+' v.'+version, '[COLOR=yellow][B]Author: [/B][/COLOR]'+author+'[COLOR=yellow][B] Last Updated: [/B][/COLOR]'+updated+'[COLOR=yellow][B] Adult Content: [/B][/COLOR]'+adult+'[CR][CR][COLOR=yellow][B]Description:[CR][/B][/COLOR]'+description+
'[CR][CR][COLOR=blue][B]Skins: [/B][/COLOR]'+skins+'[CR][CR][COLOR=blue][B]Video Addons: [/B][/COLOR]'+videoaddons+'[CR][CR][COLOR=blue][B]Audio Addons: [/B][/COLOR]'+audioaddons+
'[CR][CR][COLOR=blue][B]Program Addons: [/B][/COLOR]'+programaddons+'[CR][CR][COLOR=blue][B]Picture Addons: [/B][/COLOR]'+pictureaddons+'[CR][CR][COLOR=blue][B]Sources: [/B][/COLOR]'+sources+
'[CR][CR][COLOR=gold]Disclaimer: [/COLOR]These are community builds and they may overwrite some of your existing settings, '
'things like system location and screen calibration will almost certainly have to be changed once the install has completed. TotalXBMC take no responsibility over what content '
'is included in these builds, it\'s up to the individual who uploads the build to state what\'s included and then the users decision to decide whether or not that content is suitable for them.')
#---------------------------------------------------------------------------------------------------
#Create backup menu
def BACKUP_OPTION():
dialog.ok("[COLOR=red][B]VERY IMPORTANT![/COLOR][/B]", 'If you plan on creating a backup to share [COLOR=lime]ALWAYS[/COLOR] make', 'sure you\'ve deleted your addon_data folder as uninstalling', 'an addon does not remove personal data such as passwords.')
addDir('[COLOR=lime]Create A Commnity Build[/COLOR]','url','community_backup','Backup.png','','','Back Up Your Full System')
addDir('Full Backup','url','backup','Backup.png','','','Back Up Your Full System')
addDir('Backup Just Your Addons','addons','restore_zip','Backup.png','','','Back Up Your Addons')
addDir('Backup Just Your Addon UserData','addon_data','restore_zip','Backup.png','','','Back Up Your Addon Userdata')
addDir('Backup Guisettings.xml',GUI,'resore_backup','Backup.png','','','Back Up Your guisettings.xml')
if os.path.exists(FAVS):
addDir('Backup Favourites.xml',FAVS,'resore_backup','Backup.png','','','Back Up Your favourites.xml')
if os.path.exists(SOURCE):
addDir('Backup Source.xml',SOURCE,'resore_backup','Backup.png','','','Back Up Your sources.xml')
if os.path.exists(ADVANCED):
addDir('Backup Advancedsettings.xml',ADVANCED,'resore_backup','Backup.png','','','Back Up Your advancedsettings.xml')
if os.path.exists(KEYMAPS):
addDir('Backup Advancedsettings.xml',KEYMAPS,'resore_backup','Backup.png','','','Back Up Your keyboard.xml')
if os.path.exists(RSS):
addDir('Backup RssFeeds.xml',RSS,'resore_backup','Backup.png','','','Back Up Your RssFeeds.xml')
#---------------------------------------------------------------------------------------------------
#Create restore menu
def CHECK_LOCAL_INSTALL():
localfile = open(idfile, mode='r')
content = file.read(localfile)
file.close(localfile)
localbuildmatch = re.compile('name="(.+?)"').findall(content)
localbuildcheck = localbuildmatch[0] if (len(localbuildmatch) > 0) else ''
if localbuildcheck == "Incomplete":
choice = xbmcgui.Dialog().yesno("Finish Restore Process", 'If you\'re certain the correct skin has now been set click OK', 'to finish the install process, once complete XBMC/Kodi will', ' then close. Do you want to finish the install process?', yeslabel='Yes',nolabel='No')
if choice == 1:
FINISH_LOCAL_RESTORE()
elif choice ==0:
return
#---------------------------------------------------------------------------------------------------
def FINISH_LOCAL_RESTORE():
os.remove(idfile)
os.rename(idfiletemp,idfile)
xbmc.executebuiltin('UnloadSkin')
xbmc.executebuiltin("ReloadSkin")
dialog.ok("Local Restore Complete", 'XBMC/Kodi will now close.', '', '')
xbmc.executebuiltin("Quit")
#---------------------------------------------------------------------------------------------------
#Create restore menu
def RESTORE_OPTION():
CHECK_LOCAL_INSTALL()
if os.path.exists(os.path.join(USB,'backup.zip')):
addDir('[COLOR=dodgerblue]FULL RESTORE[/COLOR]','url','restore','Restore.png','','','Back Up Your Full System')
if os.path.exists(os.path.join(USB,'addons.zip')):
addDir('Restore Your Addons','addons','restore_zip','Restore.png','','','Restore Your Addons')
if os.path.exists(os.path.join(USB,'addon_data.zip')):
addDir('Restore Your Addon UserData','addon_data','restore_zip','Restore.png','','','Restore Your Addon UserData')
if os.path.exists(os.path.join(USB,'guisettings.xml')):
addDir('Restore Guisettings.xml',GUI,'resore_backup','Restore.png','','','Restore Your guisettings.xml')
if os.path.exists(os.path.join(USB,'favourites.xml')):
addDir('Restore Favourites.xml',FAVS,'resore_backup','Restore.png','','','Restore Your favourites.xml')
if os.path.exists(os.path.join(USB,'sources.xml')):
addDir('Restore Source.xml',SOURCE,'resore_backup','Restore.png','','','Restore Your sources.xml')
if os.path.exists(os.path.join(USB,'advancedsettings.xml')):
addDir('Restore Advancedsettings.xml',ADVANCED,'resore_backup','Restore.png','','','Restore Your advancedsettings.xml')
if os.path.exists(os.path.join(USB,'keyboard.xml')):
addDir('Restore Advancedsettings.xml',KEYMAPS,'resore_backup','Restore.png','','','Restore Your keyboard.xml')
if os.path.exists(os.path.join(USB,'RssFeeds.xml')):
addDir('Restore RssFeeds.xml',RSS,'resore_backup','Restore.png','','','Restore Your RssFeeds.xml')
#---------------------------------------------------------------------------------------------------
#Function to restore a previously backed up zip, this includes full backup, addons or addon_data.zip
def RESTORE_ZIP_FILE(url):
CHECK_DOWNLOAD_PATH()
if 'addons' in url:
ZIPFILE = xbmc.translatePath(os.path.join(USB,'addons.zip'))
DIR = ADDONS
to_backup = ADDONS
backup_zip = xbmc.translatePath(os.path.join(USB,'addons.zip'))
else:
ZIPFILE = xbmc.translatePath(os.path.join(USB,'addon_data.zip'))
DIR = ADDON_DATA
if 'Backup' in name:
DeletePackages()
import zipfile
import sys
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Backing Up",'', 'Please Wait')
zipobj = zipfile.ZipFile(ZIPFILE , 'w', zipfile.ZIP_DEFLATED)
rootlen = len(DIR)
for_progress = []
ITEM =[]
for base, dirs, files in os.walk(DIR):
for file in files:
ITEM.append(file)
N_ITEM =len(ITEM)
for base, dirs, files in os.walk(DIR):
for file in files:
for_progress.append(file)
progress = len(for_progress) / float(N_ITEM) * 100
dp.update(int(progress),"Backing Up",'[COLOR yellow]%s[/COLOR]'%file, 'Please Wait')
fn = os.path.join(base, file)
if not 'temp' in dirs:
if not 'plugin.program.community.builds' in dirs:
import time
FORCE= '01/01/1980'
FILE_DATE=time.strftime('%d/%m/%Y', time.gmtime(os.path.getmtime(fn)))
if FILE_DATE > FORCE:
zipobj.write(fn, fn[rootlen:])
zipobj.close()
dp.close()
dialog.ok("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]", "You Are Now Backed Up", '','')
else:
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Checking ",'', 'Please Wait')
import time
dp.update(0,"", "Extracting Zip Please Wait")
extract.all(ZIPFILE,DIR,dp)
time.sleep(1)
xbmc.executebuiltin('UpdateLocalAddons ')
xbmc.executebuiltin("UpdateAddonRepos")
if 'Backup' in name:
killxbmc()
dialog.ok("Community Builds - Install Complete", 'To ensure the skin settings are set correctly XBMC will now', 'close. If XBMC doesn\'t close please force close (pull power', 'or force close in your OS - [COLOR=lime]DO NOT exit via XBMC menu[/COLOR])')
else:
dialog.ok("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]", "You Are Now Restored", '','')
#---------------------------------------------------------------------------------------------------
#Function to restore a backup xml file (guisettings, sources, RSS)
def RESTORE_BACKUP_XML(name,url,description):
if 'Backup' in name:
TO_READ = open(url).read()
TO_WRITE = os.path.join(USB,description.split('Your ')[1])
f = open(TO_WRITE, mode='w')
f.write(TO_READ)
f.close()
else:
if 'guisettings.xml' in description:
a = open(os.path.join(USB,description.split('Your ')[1])).read()
r='<setting type="(.+?)" name="%s.(.+?)">(.+?)</setting>'% skin
match=re.compile(r).findall(a)
print match
for type,string,setting in match:
setting=setting.replace('"','') .replace('&','&')
xbmc.executebuiltin("Skin.Set%s(%s,%s)"%(type.title(),string,setting))
else:
TO_WRITE = os.path.join(url)
TO_READ = open(os.path.join(USB,description.split('Your ')[1])).read()
f = open(TO_WRITE, mode='w')
f.write(TO_READ)
f.close()
dialog.ok("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]", "", 'All Done !','')
#---------------------------------------------------------------------------------------------------
#Function to delete the packages folder
def DeletePackages():
print '############################################################ DELETING PACKAGES ###############################################################'
packages_cache_path = xbmc.translatePath(os.path.join('special://home/addons/packages', ''))
for root, dirs, files in os.walk(packages_cache_path):
file_count = 0
file_count += len(files)
# Count files and give option to delete
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#---------------------------------------------------------------------------------------------------
#Function to delete the userdata/addon_data folder
def DeleteUserData():
print '############################################################ DELETING USERDATA ###############################################################'
packages_cache_path = xbmc.translatePath(os.path.join('special://home/userdata/addon_data', ''))
for root, dirs, files in os.walk(packages_cache_path):
file_count = 0
file_count += len(files)
# Count files and give option to delete
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#---------------------------------------------------------------------------------------------------
#Function to do a full wipe. Thanks to kozz for working out how to add an exclude clause so community builds addon_data and addon isn't touched.
def WipeXBMC():
if skin!= "skin.confluence":
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','Please switch to the default Confluence skin','before performing a wipe.','')
xbmc.executebuiltin("ActivateWindow(appearancesettings)")
return
else:
choice = xbmcgui.Dialog().yesno("VERY IMPORTANT", 'This will completely wipe your install.', 'Would you like to create a backup before proceeding?', '', yeslabel='Yes',nolabel='No')
if choice == 1:
BACKUP()
choice = xbmcgui.Dialog().yesno("ABSOLUTELY CERTAIN?!!!", 'Are you absolutely certain you want to wipe this install?', '', 'All addons and settings will be completely wiped!', yeslabel='Yes',nolabel='No')
if choice == 0:
return
elif choice == 1:
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Wiping Install",'', 'Please Wait')
try:
for root, dirs, files in os.walk(HOME,topdown=True):
dirs[:] = [d for d in dirs if d not in EXCLUDES]
for name in files:
try:
os.remove(os.path.join(root,name))
os.rmdir(os.path.join(root,name))
except: pass
for name in dirs:
try: os.rmdir(os.path.join(root,name)); os.rmdir(root)
except: pass
# if not failed:
# print"community.builds.WipeXBMC All user files removed, you now have a clean install"
# dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','Wipe Successful, please restart XBMC/Kodi for changes to take effect.','','')
# else:
# print"community.builds.WipeXBMC User files partially removed"
# dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','Wipe Successful, please restart XBMC/Kodi for changes to take effect.','','')
except: pass
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','Wipe Successful, please restart XBMC/Kodi for changes to take effect.','','')
#---------------------------------------------------------------------------------------------------
#Function to do remove all empty folders after delete
def REMOVE_EMPTY_FOLDERS():
#initialize the counters
print"########### Start Removing Empty Folders #########"
empty_count = 0
used_count = 0
for curdir, subdirs, files in os.walk(HOME):
if len(subdirs) == 0 and len(files) == 0: #check for empty directories. len(files) == 0 may be overkill
empty_count += 1 #increment empty_count
os.rmdir(curdir) #delete the directory
print "successfully removed: "+curdir
elif len(subdirs) > 0 and len(files) > 0: #check for used directories
used_count += 1 #increment used_count
#---------------------------------------------------------------------------------------------------
#Function to do a full wipe - this is called when doing a fresh CB install.
#Thanks to kozz for working out how to add an exclude clause so community builds addon_data and addon isn't touched.
def WipeInstall():
if skin!= "skin.confluence":
dialog.ok('[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]','Please switch to the default Confluence skin','before performing a wipe.','')
xbmc.executebuiltin("ActivateWindow(appearancesettings)")
else:
choice = xbmcgui.Dialog().yesno("ABSOLUTELY CERTAIN?!!!", 'Are you absolutely certain you want to wipe this install?', '', 'All addons and settings will be completely wiped!', yeslabel='Yes',nolabel='No')
if choice == 0:
return
elif choice == 1:
dp.create("[COLOR=blue][B]T[/COLOR][COLOR=dodgerblue]R[/COLOR] [COLOR=white]Community Builds[/COLOR][/B]","Wiping Install",'', 'Please Wait')
addonPath=xbmcaddon.Addon(id=AddonID).getAddonInfo('path'); addonPath=xbmc.translatePath(addonPath);
xbmcPath=os.path.join(addonPath,"..",".."); xbmcPath=os.path.abspath(xbmcPath); plugintools.log("community.builds.WipeXBMC xbmcPath="+xbmcPath); failed=False
try:
for root, dirs, files in os.walk(xbmcPath,topdown=True):
dirs[:] = [d for d in dirs if d not in EXCLUDES]
for name in files:
try: os.remove(os.path.join(root,name))
except: pass
for name in dirs:
try: os.rmdir(os.path.join(root,name))
except: pass
except: pass
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
REMOVE_EMPTY_FOLDERS()
#---------------------------------------------------------------------------------------------------
#Get params and clean up into string or integer
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
#---------------------------------------------------------------------------------------------------
#Main addDirectory function - xbmcplugin.addDirectoryItem()
def addDirectoryItem(handle, url, listitem, isFolder):
xbmcplugin.addDirectoryItem(handle, url, listitem, isFolder)
#---------------------------------------------------------------------------------------------------
#Add a standard directory and grab fanart and iconimage from artpath defined in global variables
def addDir(name,url,mode,iconimage = '',fanart = '',video = '',description = ''):
if len(iconimage) > 0:
iconimage = ARTPATH + iconimage
else:
iconimage = 'DefaultFolder.png'
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&fanart="+urllib.quote_plus(fanart)+"&video="+urllib.quote_plus(video)+"&description="+urllib.quote_plus(description)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name, "Plot": description } )
liz.setProperty( "Fanart_Image", fanart )
liz.setProperty( "Build.Video", video )
if (mode==None) or (mode=='additional_tools') or (mode=='search_builds') or (mode=='manual_search') or (mode=='genres2') or (mode=='restore_option') or (mode=='backup_option') or (mode=='cb_root_menu') or (mode=='genres') or (mode=='grab_builds') or (mode=='grab_builds2') or (mode=='community_menu') or (mode=='instructions') or (mode=='countries')or (url==None) or (len(url)<1):
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
else:
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
#---------------------------------------------------------------------------------------------------
#Add a standard directory for the builds. Essentially the same as above but grabs unique artwork from previous call
def addBuildDir(name,url,mode,iconimage,fanart,video,description,skins,guisettingslink):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+urllib.quote_plus(iconimage)+"&fanart="+urllib.quote_plus(fanart)+"&video="+urllib.quote_plus(video)+"&description="+urllib.quote_plus(description)+"&skins="+urllib.quote_plus(skins)+"&guisettingslink="+urllib.quote_plus(guisettingslink)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name, "Plot": description } )
liz.setProperty( "Fanart_Image", fanart )
liz.setProperty( "Build.Video", video )
if (mode==None) or (mode=='genres2') or (mode=='restore_option') or (mode=='backup_option') or (mode=='cb_root_menu') or (mode=='genres') or (mode=='grab_builds') or (mode=='grab_builds2') or (mode=='community_menu') or (mode=='instructions') or (mode=='countries')or (url==None) or (len(url)<1):
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
else:
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
#---------------------------------------------------------------------------------------------------
#Add a directory for the description, this requires multiple string to be called from previous menu
def addDescDir(name,url,mode,iconimage,fanart,buildname,author,version,description,updated,skins,videoaddons,audioaddons,programaddons,pictureaddons,sources,adult):
iconimage = ARTPATH + iconimage
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+urllib.quote_plus(iconimage)+"&fanart="+urllib.quote_plus(fanart)+"&author="+urllib.quote_plus(author)+"&description="+urllib.quote_plus(description)+"&version="+urllib.quote_plus(version)+"&buildname="+urllib.quote_plus(buildname)+"&updated="+urllib.quote_plus(updated)+"&skins="+urllib.quote_plus(skins)+"&videoaddons="+urllib.quote_plus(videoaddons)+"&audioaddons="+urllib.quote_plus(audioaddons)+"&buildname="+urllib.quote_plus(buildname)+"&programaddons="+urllib.quote_plus(programaddons)+"&pictureaddons="+urllib.quote_plus(pictureaddons)+"&sources="+urllib.quote_plus(sources)+"&adult="+urllib.quote_plus(adult)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name, "Plot": description } )
liz.setProperty( "Fanart_Image", fanart )
liz.setProperty( "Build.Video", video )
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
#---------------------------------------------------------------------------------------------------
#Function to return the platform XBMC is currently running on.
#Could possibly do away with this and use xbmc.getInfoLabel("System.BuildVersion") in the killxbmc function
def platform():
if xbmc.getCondVisibility('system.platform.android'):
return 'android'
elif xbmc.getCondVisibility('system.platform.linux'):
return 'linux'
elif xbmc.getCondVisibility('system.platform.windows'):
return 'windows'
elif xbmc.getCondVisibility('system.platform.osx'):
return 'osx'
elif xbmc.getCondVisibility('system.platform.atv2'):
return 'atv2'
elif xbmc.getCondVisibility('system.platform.ios'):
return 'ios'
#---------------------------------------------------------------------------------------------------
# Addon starts here
params=get_params()
url=None
name=None
buildname=None
updated=None
author=None
version=None
mode=None
iconimage=None
description=None
video=None
link=None
skins=None
videoaddons=None
audioaddons=None
programaddons=None
audioaddons=None
sources=None
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
guisettingslink=urllib.unquote_plus(params["guisettingslink"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
mode=str(params["mode"])
except:
pass
try:
link=urllib.unquote_plus(params["link"])
except:
pass
try:
skins=urllib.unquote_plus(params["skins"])
except:
pass
try:
videoaddons=urllib.unquote_plus(params["videoaddons"])
except:
pass
try:
audioaddons=urllib.unquote_plus(params["audioaddons"])
except:
pass
try:
programaddons=urllib.unquote_plus(params["programaddons"])
except:
pass
try:
pictureaddons=urllib.unquote_plus(params["pictureaddons"])
except:
pass
try:
sources=urllib.unquote_plus(params["sources"])
except:
pass
try:
adult=urllib.unquote_plus(params["adult"])
except:
pass
try:
buildname=urllib.unquote_plus(params["buildname"])
except:
pass
try:
updated=urllib.unquote_plus(params["updated"])
except:
pass
try:
version=urllib.unquote_plus(params["version"])
except:
pass
try:
author=urllib.unquote_plus(params["author"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
try:
video=urllib.unquote_plus(params["video"])
except:
pass
if mode==None or url==None or len(url)<1:
VideoCheck()
elif mode=='backup_option':
BACKUP_OPTION()
elif mode=='restore':
print "############ RESTORE #################"
RESTORE()
elif mode=='additional_tools':
print "############ ADDITIONAL TOOLS #################"
ADDITIONAL_TOOLS()
elif mode=='community_backup':
print "############ COMMUNITY BACKUP #################"
COMMUNITY_BACKUP()
elif mode=='backup':
print "############ BACKUP #################"
BACKUP()
elif mode=='restore_backup':
print "############ RESTORE_BACKUP_XML #################"
RESTORE_BACKUP_XML(name,url,description)
elif mode=='restore_option':
print "############ RESTORE_OPTION #################"
RESTORE_OPTION()
elif mode=='restore_zip':
print "############ RESTORE_ZIP_FILE #################"
RESTORE_ZIP_FILE(url)
elif mode=='restore_community':
print "############ RESTORE_COMMUNITY BUILD #################"
RESTORE_COMMUNITY(name,url,description,skins,guisettingslink)
elif mode=='grab_builds':
print "############ CALL COMMUNITY SECTION #################"
COMMUNITY(url)
elif mode=='grab_builds2':
print "############ CALL COMMUNITY SECTION #################"
COMMUNITY2(url)
elif mode=='wipe_xbmc':
print "############ WIPE XBMC #################"
WipeXBMC()
elif mode=='description':
print "############ BUILD DESCRIPTION #################"
DESCRIPTION(name,url,buildname,author,version,description,updated,skins,videoaddons,audioaddons,programaddons,pictureaddons,sources,adult)
elif mode=='community_menu':
print "############ BUILD COMMUNITY LIST #################"
COMMUNITY_MENU(url)
elif mode=='play_video':
print "############ PLAY VIDEO #################"
PLAYVIDEO(url)
elif mode=='instructions':
print "############ INSTRUCTIONS MENU #################"
INSTRUCTIONS(url)
elif mode=='instructions_1':
print "############ SHOW INSTRUCTIONS 1 #################"
Instructions_1()
elif mode=='instructions_2':
print "############ SHOW INSTRUCTIONS 2 #################"
Instructions_2()
elif mode=='instructions_3':
print "############ SHOW INSTRUCTIONS 3 #################"
Instructions_3()
elif mode=='instructions_4':
print "############ SHOW INSTRUCTIONS 4 #################"
Instructions_4()
elif mode=='instructions_5':
print "############ SHOW INSTRUCTIONS 5 #################"
Instructions_5()
elif mode=='instructions_6':
print "############ SHOW INSTRUCTIONS 6 #################"
Instructions_6()
elif mode=='cb_root_menu':
print "############ Community Builds Menu #################"
CB_Root_Menu()
elif mode=='genres':
print "############ Build GENRE1 Menu #################"
GENRES()
elif mode=='countries':
print "############ Build COUNTRIES Menu #################"
COUNTRIES()
elif mode=='genres2':
print "############ Build GENRE2 Menu #################"
GENRES2(url)
elif mode=='search_builds':
print "############ MANUAL SEARCH BUILDS #################"
SEARCH_BUILDS()
elif mode=='manual_search':
print "############ MANUAL SEARCH BUILDS #################"
MANUAL_SEARCH()
elif mode=='community_search':
print "############ MANUAL SEARCH BUILDS #################"
COMMUNITY_SEARCH()
elif mode=='guisettingsfix':
print "############ GUISETTINGS FIX #################"
GUISETTINGS_FIX(url)
elif mode=='showinfo':
print "############ SHOW BASIC BUILD INFO #################"
SHOWINFO(url)
elif mode=='remove_build':
print "############ SHOW BASIC BUILD INFO #################"
REMOVE_BUILD()
elif mode=='kill_xbmc':
print "############ ATTEMPT TO KILL XBMC/KODI #################"
killxbmc()
elif mode=='fix_special':
print "############ FIX SPECIAL PATHS #################"
FIX_SPECIAL(url)
xbmcplugin.endOfDirectory(int(sys.argv[1])) | rlazojr/totalinstaller | plugin.program.totalinstaller/default.py | Python | gpl-2.0 | 104,757 | 0.029945 |
#!/usr/bin/python3
#################################### BY MIKICAT ###############################################
###############################A WORLDEV AFFILIATE#############################################
###IF YOU DETECT BUGS, PLEASE OPEN AN ISSUE OR REPORT THEM TO http://mikicatantivirus.weebly.com/contact.html ##
import os
import platform
import time
import webbrowser
from tkinter import *
# Auto dir setup.
if platform.system() == "Windows":
linux = False
windows = True
elif platform.system() == "Linux":
linux = True
windows = False
else:
print("Mikicat Antivirus is not compatible with your operative system.")
os._exit(1)
home = os.path.expanduser('~')
extfiles = []
files = []
directory = ("{0}\AppData\Local".format(home) if windows else "{0}/.config".format(home))
directory2 = ("C:\Windows\system32" if windows else "/sbin")
directory3 = ("{0}\AppData\Roaming\Microsoft\Windows\Start Menu\Programs\Startup".format(home) if windows else "/etc/init.d")
directory4 = ("{0}\Downloads".format(home) if windows else "{0}/Downloads".format(home))
# Extensions
ext = '.bat'
ext2 = '.Gen'
ext3 = '.gen'
ext4 = '.vbs' # Do not scan it in system32
ext5 = '.inf' # Do not scan it in system32
ext6 = '.vbe' # Do not scan it in system32
ext7 = '.vb' # Do not scan it in system32
ext8 = '.gzquar'
ext9 = '.vexe'
ext10 = '.sys' # Important: Only detect this if found in Downloads. If it is in any other detection of any other part of the code, please delete the "or file.endswith(ext10)". If it is put in the system32, it will delete essential system files.
ext11 = '.aru'
ext12 = '.smtmp'
ext13 = '.ctbl'
ext14 = '.dxz'
ext15 = '.cih'
ext16 = '.kcd'
ext17 = '.sop'
ext18 = '.tsa'
ext19 = '.xir'
ext20 = '.fnr'
ext21 = '.dom'
ext22 = '.hlw'
ext23 = '.lik'
ext24 = '.s7p'
ext25 = '.rhk'
ext26 = '.dlb'
ext27 = '.bll'
ext28 = '.dyz'
ext29 = '.fag'
ext30 = '.xtbl'
ext31 = '.fjl'
ext32 = '.cryptolocker'
ext33 = '.mjz'
ext34 = '.osa'
ext35 = '.bxz'
ext36 = '.mfu'
ext37 = '.ezt'
ext38 = '.dyv'
ext39 = '.iws'
ext40 = '.xdu'
ext41 = '.dllx'
ext42 = '.uzy'
ext43 = '.ska'
ext44 = '.mjg'
ext45 = '.txs'
ext46 = '.upa'
ext47 = '.bls'
ext48 = '.cc'
ext49 = '.lkh'
ext50 = '.tko'
ext51 = '.tti'
ext52 = '.dli'
ext53 = '.ceo'
ext54 = '.rna'
ext55 = '.delf'
ext56 = '.spam'
ext57 = '.cxq'
ext58 = '.vzr'
ext59 = '.bmw'
ext60 = '.atm'
ext61 = '.fuj'
ext62 = '.ce0'
ext63 = '.lok'
ext64 = '.ssy'
ext65 = '.hts'
ext66 = '.hsq'
ext67 = '.qit'
ext68 = '.pid'
ext69 = '.aepl'
ext70 = '.xnt'
ext71 = '.aut'
ext72 = '.dx'
ext73 = '.zvz'
ext74 = '.bqf'
ext75 = '.iva'
ext76 = '.pr'
ext77 = '.let'
ext78 = '.cyw'
ext79 = '.bup'
ext80 = '.bps'
ext81 = '.epub.exe'
# Extensions
ideatxt = open("assets/idea.txt").read()
def idea():
print('opening')
tk = Tk()
tk.title("Idea --> Content from idea.txt")
tk.resizable(0, 0)
Label(tk, text=ideatxt).grid(row=1, sticky=W)
Button(tk, text="Quit", command=tk.destroy).grid(row=2, column=2, sticky=W)
def done4():
root = Tk()
root.title("Mikicat's Antivirus™: Finished")
root.resizable(0, 0)
Label(root, text="DONE: No virus found in %s." % directory4).grid(row=1, sticky=W)
Label(root, text="\n").grid(row=2, sticky=W)
Label(root, text="Thanks for using Miquel's Antivirus!").grid(row=3, sticky=W)
Button(root, text="Quit", command=root.destroy).grid(row=4, column=2, sticky=W)
Button(root, text="Idea", command=idea).grid(row=4, sticky=W)
print("4")
def finish():
root = Tk()
Label(root, text="Thanks for using Miquel's Antivirus!").grid(row=3, sticky=W)
Button(root, text="Quit", command=root.destroy).grid(row=4, column=2, sticky=W)
Button(root, text="Idea", command=idea).grid(row=4, sticky=W)
def yes4():
for item in extfiles:
os.remove(directory4 + ("/" if linux else "\\") + item)
del files[:]
del extfiles[:]
root = Tk()
root.title("Done")
root.resizable(0, 0)
Label(root, text="Done").grid(row=1, sticky=W)
Button(root, text="Finish", command=finish).grid(row=2, sticky=W)
print("Done")
def detection4():
del files[:]
del extfiles[:]
for file in os.listdir(directory4):
if file.endswith(ext) or file.endswith(ext2) or file.endswith(ext3) or file.endswith(ext4) or file.endswith(ext5)\
or file.endswith(ext6) or file.endswith(ext7) or file.endswith(ext8) or file.endswith(ext9) or file.endswith(ext10)\
or file.endswith(ext11) or file.endswith(ext12) or file.endswith(ext13) or file.endswith(ext14) or file.endswith(ext15)\
or file.endswith(ext16) or file.endswith(ext17) or file.endswith(ext18) or file.endswith(ext19) or file.endswith(ext20)\
or file.endswith(ext21) or file.endswith(ext22) or file.endswith(ext23) or file.endswith(ext24) or file.endswith(ext25)\
or file.endswith(ext26) or file.endswith(ext27) or file.endswith(ext28) or file.endswith(ext29) or file.endswith(ext30)\
or file.endswith(ext31) or file.endswith(ext32) or file.endswith(ext33) or file.endswith(ext34) or file.endswith(ext35)\
or file.endswith(ext36) or file.endswith(ext37) or file.endswith(ext38) or file.endswith(ext39) or file.endswith(ext40)\
or file.endswith(ext41) or file.endswith(ext42) or file.endswith(ext43) or file.endswith(ext44) or file.endswith(ext45)\
or file.endswith(ext46) or file.endswith(ext47) or file.endswith(ext48) or file.endswith(ext49) or file.endswith(ext50)\
or file.endswith(ext51) or file.endswith(ext52) or file.endswith(ext53) or file.endswith(ext54) or file.endswith(ext55)\
or file.endswith(ext56) or file.endswith(ext57) or file.endswith(ext58) or file.endswith(ext59) or file.endswith(ext60)\
or file.endswith(ext61) or file.endswith(ext62) or file.endswith(ext63) or file.endswith(ext64) or file.endswith(ext65)\
or file.endswith(ext66) or file.endswith(ext67) or file.endswith(ext68) or file.endswith(ext69) or file.endswith(ext70)\
or file.endswith(ext71) or file.endswith(ext72) or file.endswith(ext73) or file.endswith(ext74) or file.endswith(ext75)\
or file.endswith(ext76) or file.endswith(ext77) or file.endswith(ext78) or file.endswith(ext79) or file.endswith(ext80)\
or file.endswith(ext81):
extfiles.append(file)
for file in os.listdir(directory4):
files.append(file)
if extfiles != []:
tk = Tk()
tk.title("WARNING")
tk.resizable(0, 0)
Label(tk, text="WARNING: POSSIBLE VIRUS DETECTED").grid(row=1, sticky=W)
Label(tk, text="Possible virus: %s" % extfiles).grid(row=2, sticky=W)
Button(tk, text="Delete", command=yes4).grid(row=8, column=2, sticky=W)
Button(tk, text="Cancel", command=tk.destroy).grid(row=8, sticky=W)
if extfiles == []:
done4()
def done3():
root = Tk()
root.title("Mikicat's Antivirus™: Done")
root.resizable(0, 0)
Label(root, text="DONE: No virus found in %s" % directory3).grid(row=1, sticky=W)
Button(root, text="Continue", command=detection4).grid(row=3, column=2, sticky=W)
Button(root, text="Quit", command=root.destroy).grid(row=3, sticky=W)
print("3")
def yes3():
for item in extfiles:
os.remove(directory3 + ("/" if linux else "\\") + item)
del files[:]
del extfiles[:]
root = Tk()
root.title("Done")
root.resizable(0, 0)
Label(root, text="Done").grid(row=1, sticky=W)
Button(root, text="Continue", command=detection4).grid(row=2, column=2, sticky=W)
Button(root, text="Quit", command=root.destroy).grid(row=2, sticky=W)
print("Done")
def detection3():
del files[:]
del extfiles[:]
for file in os.listdir(directory3):
if file.endswith(ext) or file.endswith(ext2) or file.endswith(ext3) or file.endswith(ext8) or file.endswith(ext9)\
or file.endswith(ext11) or file.endswith(ext12) or file.endswith(ext13) or file.endswith(ext14) or file.endswith(ext15)\
or file.endswith(ext16) or file.endswith(ext17) or file.endswith(ext18) or file.endswith(ext19) or file.endswith(ext20)\
or file.endswith(ext21) or file.endswith(ext22) or file.endswith(ext23) or file.endswith(ext24) or file.endswith(ext25)\
or file.endswith(ext26) or file.endswith(ext27) or file.endswith(ext28) or file.endswith(ext29) or file.endswith(ext30)\
or file.endswith(ext31) or file.endswith(ext32) or file.endswith(ext33) or file.endswith(ext34) or file.endswith(ext35)\
or file.endswith(ext36) or file.endswith(ext37) or file.endswith(ext38) or file.endswith(ext39) or file.endswith(ext40)\
or file.endswith(ext41) or file.endswith(ext42) or file.endswith(ext43) or file.endswith(ext44) or file.endswith(ext45)\
or file.endswith(ext46) or file.endswith(ext47) or file.endswith(ext48) or file.endswith(ext49) or file.endswith(ext50)\
or file.endswith(ext51) or file.endswith(ext52) or file.endswith(ext53) or file.endswith(ext54) or file.endswith(ext55)\
or file.endswith(ext56) or file.endswith(ext57) or file.endswith(ext58) or file.endswith(ext59) or file.endswith(ext60)\
or file.endswith(ext61) or file.endswith(ext62) or file.endswith(ext63) or file.endswith(ext64) or file.endswith(ext65)\
or file.endswith(ext66) or file.endswith(ext67) or file.endswith(ext68) or file.endswith(ext69) or file.endswith(ext70)\
or file.endswith(ext71) or file.endswith(ext72) or file.endswith(ext73) or file.endswith(ext74) or file.endswith(ext75)\
or file.endswith(ext76) or file.endswith(ext77) or file.endswith(ext78) or file.endswith(ext79) or file.endswith(ext80)\
or file.endswith(ext81):
extfiles.append(file)
for file in os.listdir(directory3):
files.append(file)
if extfiles != []:
tk = Tk()
tk.title("WARNING")
tk.resizable(0, 0)
Label(tk, text="WARNING: POSSIBLE VIRUS DETECTED").grid(row=1, sticky=W)
Label(tk, text="Possible virus: %s" % extfiles).grid(row=2, sticky=W)
Button(tk, text="Delete", command=yes3).grid(row=8, column=2, sticky=W)
Button(tk, text="Cancel", command=tk.destroy).grid(row=8, sticky=W)
if extfiles == []:
done3()
def done2():
root = Tk()
root.title("Mikicat's Antivirus™: Done")
root.resizable(0, 0)
Label(root, text="DONE: No virus found in %s" % directory2).grid(row=1, sticky=W)
Button(root, text="Continue", command=detection3).grid(row=3, column=2, sticky=W)
Button(root, text="Quit", command=root.destroy).grid(row=3, sticky=W)
print("2")
def yes2():
for item in extfiles:
os.remove(directory2 + ("/" if linux else "\\") + item)
del files[:]
del extfiles[:]
root = Tk()
root.title("Done")
root.resizable(0, 0)
Label(root, text="Done").grid(row=1, sticky=W)
Button(root, text="Continue", command=detection3).grid(row=2, column=2, sticky=W)
Button(root, text="Quit", command=root.destroy).grid(row=2, sticky=W)
print("Done")
def detection2():
del files[:]
del extfiles[:]
for file in os.listdir(directory2):
if file.endswith(ext) or file.endswith(ext2) or file.endswith(ext3) or file.endswith(ext8) or file.endswith(ext9)\
or file.endswith(ext11) or file.endswith(ext12) or file.endswith(ext13) or file.endswith(ext14) or file.endswith(ext15)\
or file.endswith(ext16) or file.endswith(ext17) or file.endswith(ext18) or file.endswith(ext19) or file.endswith(ext20)\
or file.endswith(ext21) or file.endswith(ext22) or file.endswith(ext23) or file.endswith(ext24) or file.endswith(ext25)\
or file.endswith(ext26) or file.endswith(ext27) or file.endswith(ext28) or file.endswith(ext29) or file.endswith(ext30)\
or file.endswith(ext31) or file.endswith(ext32) or file.endswith(ext33) or file.endswith(ext34) or file.endswith(ext35)\
or file.endswith(ext36) or file.endswith(ext37) or file.endswith(ext38) or file.endswith(ext39) or file.endswith(ext40)\
or file.endswith(ext41) or file.endswith(ext42) or file.endswith(ext43) or file.endswith(ext44) or file.endswith(ext45)\
or file.endswith(ext46) or file.endswith(ext47) or file.endswith(ext48) or file.endswith(ext49) or file.endswith(ext50)\
or file.endswith(ext51) or file.endswith(ext52) or file.endswith(ext53) or file.endswith(ext54) or file.endswith(ext55)\
or file.endswith(ext56) or file.endswith(ext57) or file.endswith(ext58) or file.endswith(ext59) or file.endswith(ext60)\
or file.endswith(ext61) or file.endswith(ext62) or file.endswith(ext63) or file.endswith(ext64) or file.endswith(ext65)\
or file.endswith(ext66) or file.endswith(ext67) or file.endswith(ext68) or file.endswith(ext69) or file.endswith(ext70)\
or file.endswith(ext71) or file.endswith(ext72) or file.endswith(ext73) or file.endswith(ext74) or file.endswith(ext75)\
or file.endswith(ext76) or file.endswith(ext77) or file.endswith(ext78) or file.endswith(ext79) or file.endswith(ext80)\
or file.endswith(ext81):
extfiles.append(file)
for file in os.listdir(directory2):
files.append(file)
if extfiles != []:
tk = Tk()
tk.title("WARNING")
tk.resizable(0, 0)
Label(tk, text="WARNING: POSSIBLE VIRUS DETECTED").grid(row=1, sticky=W)
Label(tk, text="Possible virus: %s" % extfiles).grid(row=2, sticky=W)
Button(tk, text="Delete", command=yes2).grid(row=8, column=2, sticky=W)
Button(tk, text="Cancel", command=tk.destroy).grid(row=8, sticky=W)
if extfiles == []:
done2()
def done1():
root = Tk()
root.resizable(0, 0)
root.title("Mikicat's Antivirus™: Done")
Label(root, text="DONE: No virus found in %s" % directory).grid(row=1, sticky=W)
Button(root, text="Continue", command=detection2).grid(row=3, column=2, sticky=W)
Button(root, text="Quit", command=root.destroy).grid(row=3, sticky=W)
print("1")
def yes1():
for item in extfiles:
os.remove(directory + ("/" if linux else "\\") + item)
del files[:]
del extfiles[:]
root = Tk()
root.resizable(0, 0)
root.title("Done")
Label(root, text="Done").grid(row=1, sticky=W)
Button(root, text="Continue", command=detection2).grid(row=2, column=2, sticky=W)
Button(root, text="Quit", command=root.destroy).grid(row=2, sticky=W)
print("Done")
def detection1():
del files[:]
del extfiles[:]
for file in os.listdir(directory):
if file.endswith(ext) or file.endswith(ext2) or file.endswith(ext3) or file.endswith(ext4) or file.endswith(ext5)\
or file.endswith(ext6) or file.endswith(ext7) or file.endswith(ext8) or file.endswith(ext9) or file.endswith(ext11)\
or file.endswith(ext12) or file.endswith(ext13) or file.endswith(ext14) or file.endswith(ext15)\
or file.endswith(ext16) or file.endswith(ext17) or file.endswith(ext18) or file.endswith(ext19) or file.endswith(ext20)\
or file.endswith(ext21) or file.endswith(ext22) or file.endswith(ext23) or file.endswith(ext24) or file.endswith(ext25)\
or file.endswith(ext26) or file.endswith(ext27) or file.endswith(ext28) or file.endswith(ext29) or file.endswith(ext30)\
or file.endswith(ext31) or file.endswith(ext32) or file.endswith(ext33) or file.endswith(ext34) or file.endswith(ext35)\
or file.endswith(ext36) or file.endswith(ext37) or file.endswith(ext38) or file.endswith(ext39) or file.endswith(ext40)\
or file.endswith(ext41) or file.endswith(ext42) or file.endswith(ext43) or file.endswith(ext44) or file.endswith(ext45)\
or file.endswith(ext46) or file.endswith(ext47) or file.endswith(ext48) or file.endswith(ext49) or file.endswith(ext50)\
or file.endswith(ext51) or file.endswith(ext52) or file.endswith(ext53) or file.endswith(ext54) or file.endswith(ext55)\
or file.endswith(ext56) or file.endswith(ext57) or file.endswith(ext58) or file.endswith(ext59) or file.endswith(ext60)\
or file.endswith(ext61) or file.endswith(ext62) or file.endswith(ext63) or file.endswith(ext64) or file.endswith(ext65)\
or file.endswith(ext66) or file.endswith(ext67) or file.endswith(ext68) or file.endswith(ext69) or file.endswith(ext70)\
or file.endswith(ext71) or file.endswith(ext72) or file.endswith(ext73) or file.endswith(ext74) or file.endswith(ext75)\
or file.endswith(ext76) or file.endswith(ext77) or file.endswith(ext78) or file.endswith(ext79) or file.endswith(ext80)\
or file.endswith(ext81):
extfiles.append(file)
for file in os.listdir(directory):
files.append(file)
if extfiles != []:
tk = Tk()
tk.title("WARNING")
tk.resizable(0, 0)
Label(tk, text="WARNING: POSSIBLE VIRUS DETECTED").grid(row=1, sticky=W)
Label(tk, text="Possible virus: %s" % extfiles).grid(row=2, sticky=W)
Button(tk, text="Delete", command=yes1).grid(row=8, column=2, sticky=W)
Button(tk, text="Cancel", command=tk.destroy).grid(row=8, sticky=W)
if extfiles == []:
done1()
lic = """This program is free software: You can redistribute it and/or modify it under the terms of the General Public License version 3
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND,
EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.
SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION."""
imp = open("assets/IMPORTANT.txt").read()
helptxt = open("assets/help.txt").read()
def gpl():
webbrowser.open("http://www.gnu.org/licenses/gpl-3.0.en.html")
def licinfo():
print('opening')
tk = Tk()
tk.resizable(0, 0)
tk.title("License")
Label(tk, text=lic).grid(row=1, sticky=W)
Button(tk, text="Return", command=tk.destroy).grid(row=2, column=1, sticky=W)
Button(tk, text="GPLv3", command=gpl).grid(row=2, column=0, sticky=W)
def important():
print('opening')
tk = Tk()
tk.resizable(0, 0)
tk.title("IMPORTANT --> Content from IMPORTANT.txt")
Label(tk, text=imp).grid(row=1, sticky=W)
Button(tk, text="Return", command=tk.destroy).grid(row=2, column=2, sticky=W)
time.sleep(0.5)
def infohelp():
tk = Tk()
tk.resizable(0, 0)
tk.title("Help --> Content from help.txt")
Label(tk, text=helptxt).grid(row=1, sticky=W)
Button(tk, text="Return", command=tk.destroy).grid(row=2, sticky=W)
def main():
master = Tk()
master.title("Mikicat's Antivirus™")
master.resizable(0, 0)
Button(master, text="Help", command=infohelp).grid(row=0, column=5, sticky=W)
photo = PhotoImage(file="antivirus2.gif")
label = Label(image=photo)
label.image = photo
label.grid(row=1)
Label(master, text="Today is %s !" % time.asctime()).grid(row=2, sticky=W)
Label(master, text="\n").grid(row=3, sticky=W)
Label(master, text="\n").grid(row=5, sticky=W)
Label(master, text="By Mikicat || A Worldev project").grid(row=6, sticky=W)
Label(master, text="\n").grid(row=7, sticky=W)
Button(master, text="Start", underline=0, command=detection1).grid(row=8, column=2, sticky=W)
Button(master, text="Quit", underline=0, command=master.destroy).grid(row=8, sticky=W)
Button(master, text="License", underline=0, command=licinfo).grid(row=8, column=3, sticky=W)
Button(master, text="Important", underline=0, command=important).grid(row=8, column=4, sticky=W)
Button(master, text="Idea", underline=0, command=idea).grid(row=8, column=5, sticky=W)
print("Starting tkinter cleanly")
main()
mainloop()
| Worldev/Mikicat-Antivirus | antivirus.py | Python | gpl-3.0 | 20,145 | 0.0076 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Liberty
Revision ID: liberty
Revises: None
Create Date: 2015-11-13 00:00:00.000000
"""
# revision identifiers, used by Alembic.
revision = 'liberty'
down_revision = None
def upgrade():
"""A no-op migration for marking the Liberty release."""
pass
| nuagenetworks/nuage-openstack-neutron | nuage_neutron/db/migration/alembic_migrations/versions/liberty_release.py | Python | apache-2.0 | 838 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import models
| sysadminmatmoz/odoo-clearcorp | purchase_prediction/__init__.py | Python | agpl-3.0 | 1,003 | 0.000997 |
# -*- coding: utf-8 -*-
from django.apps import AppConfig
class AuthorizationConfig(AppConfig):
label = 'userspace_library_authorizations'
name = 'apps.userspace.library.authorization'
| erudit/zenon | eruditorg/apps/userspace/library/authorization/apps.py | Python | gpl-3.0 | 196 | 0 |
# coding: utf-8
from __future__ import absolute_import
import flask
import auth
import config
import model
import util
from main import app
twitter_config = dict(
access_token_url='https://api.twitter.com/oauth/access_token',
api_base_url='https://api.twitter.com/1.1/',
authorize_url='https://api.twitter.com/oauth/authenticate',
client_id=config.CONFIG_DB.twitter_consumer_key,
client_secret=config.CONFIG_DB.twitter_consumer_secret,
request_token_url='https://api.twitter.com/oauth/request_token',
signature_method='HMAC-SHA1',
save_request_token=auth.save_oauth1_request_token,
fetch_request_token=auth.fetch_oauth1_request_token,
)
twitter = auth.create_oauth_app(twitter_config, 'twitter')
@app.route('/api/auth/callback/twitter/')
def twitter_authorized():
id_token = twitter.authorize_access_token()
if id_token is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
response = twitter.get('account/verify_credentials.json')
user_db = retrieve_user_from_twitter(response.json())
return auth.signin_user_db(user_db)
@app.route('/signin/twitter/')
def signin_twitter():
return auth.signin_oauth(twitter)
def retrieve_user_from_twitter(response):
auth_id = 'twitter_%s' % response['id_str']
user_db = model.User.get_by('auth_ids', auth_id)
if user_db:
return user_db
return auth.create_user_db(
auth_id=auth_id,
name=response['name'] or response['screen_name'],
username=response['screen_name'],
)
| mdxs/gae-init | main/auth/twitter.py | Python | mit | 1,532 | 0.00718 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import io
import os
from unittest import mock
from xml.etree import ElementTree
import fixtures
from testtools.matchers import HasLength
import snapcraft
from snapcraft import tests
from snapcraft.plugins import maven
class MavenPluginTestCase(tests.TestCase):
def setUp(self):
super().setUp()
class Options:
maven_options = []
maven_targets = ['']
self.options = Options()
self.project_options = snapcraft.ProjectOptions()
patcher = mock.patch('snapcraft.repo.Ubuntu')
self.ubuntu_mock = patcher.start()
self.addCleanup(patcher.stop)
@staticmethod
def _canonicalize_settings(settings):
with io.StringIO(settings) as f:
tree = ElementTree.parse(f)
for element in tree.iter():
if element.text is not None and element.text.isspace():
element.text = None
if element.tail is not None and element.tail.isspace():
element.tail = None
with io.StringIO() as f:
tree.write(
f, encoding='unicode',
default_namespace='http://maven.apache.org/SETTINGS/1.0.0')
return f.getvalue() + '\n'
def test_get_build_properties(self):
expected_build_properties = ['maven-options', 'maven-targets']
resulting_build_properties = maven.MavenPlugin.get_build_properties()
self.assertThat(resulting_build_properties,
HasLength(len(expected_build_properties)))
for property in expected_build_properties:
self.assertIn(property, resulting_build_properties)
def assertSettingsEqual(self, expected, observed):
print(repr(self._canonicalize_settings(expected)))
print(repr(self._canonicalize_settings(observed)))
self.assertEqual(
self._canonicalize_settings(expected),
self._canonicalize_settings(observed))
def test_schema(self):
schema = maven.MavenPlugin.schema()
properties = schema['properties']
self.assertTrue('maven-options' in properties,
'Expected "maven-options" to be included in '
'properties')
maven_options = properties['maven-options']
self.assertTrue(
'type' in maven_options,
'Expected "type" to be included in "maven-options"')
self.assertEqual(maven_options['type'], 'array',
'Expected "maven-options" "type" to be "array", but '
'it was "{}"'.format(maven_options['type']))
self.assertTrue(
'minitems' in maven_options,
'Expected "minitems" to be included in "maven-options"')
self.assertEqual(maven_options['minitems'], 1,
'Expected "maven-options" "minitems" to be 1, but '
'it was "{}"'.format(maven_options['minitems']))
self.assertTrue(
'uniqueItems' in maven_options,
'Expected "uniqueItems" to be included in "maven-options"')
self.assertTrue(
maven_options['uniqueItems'],
'Expected "maven-options" "uniqueItems" to be "True"')
maven_targets = properties['maven-targets']
self.assertTrue(
'type' in maven_targets,
'Expected "type" to be included in "maven-targets"')
self.assertEqual(maven_targets['type'], 'array',
'Expected "maven-targets" "type" to be "array", but '
'it was "{}"'.format(maven_targets['type']))
self.assertTrue(
'minitems' in maven_targets,
'Expected "minitems" to be included in "maven-targets"')
self.assertEqual(maven_targets['minitems'], 1,
'Expected "maven-targets" "minitems" to be 1, but '
'it was "{}"'.format(maven_targets['minitems']))
self.assertTrue(
'uniqueItems' in maven_targets,
'Expected "uniqueItems" to be included in "maven-targets"')
self.assertTrue(
maven_targets['uniqueItems'],
'Expected "maven-targets" "uniqueItems" to be "True"')
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build(self, run_mock):
env_vars = (
('http_proxy', None),
('https_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package']),
])
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_fail(self, run_mock):
env_vars = (
('http_proxy', None),
('https_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
os.makedirs(plugin.sourcedir)
self.assertRaises(RuntimeError, plugin.build)
run_mock.assert_has_calls([
mock.call(['mvn', 'package']),
])
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_war(self, run_mock):
env_vars = (
('http_proxy', None),
('https_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.war'), 'w').close()
run_mock.side_effect = side
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package']),
])
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_targets(self, run_mock):
env_vars = (
('http_proxy', None),
('https_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
opts = self.options
opts.maven_targets = ['child1', 'child2']
plugin = maven.MavenPlugin('test-part', opts,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir,
'child1', 'target'))
os.makedirs(os.path.join(plugin.builddir,
'child2', 'target'))
open(os.path.join(plugin.builddir,
'child1', 'target', 'child1.jar'), 'w').close()
open(os.path.join(plugin.builddir,
'child2', 'target', 'child2.jar'), 'w').close()
run_mock.side_effect = side
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package']),
])
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_http_proxy(self, run_mock):
env_vars = (
('http_proxy', 'http://localhost:3132'),
('https_proxy', None),
('no_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_http_proxy_and_no_proxy(self, run_mock):
env_vars = (
('http_proxy', 'http://localhost:3132'),
('https_proxy', None),
('no_proxy', 'internal'),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <nonProxyHosts>internal</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_http_proxy_and_no_proxies(self, run_mock):
env_vars = (
('http_proxy', 'http://localhost:3132'),
('https_proxy', None),
('no_proxy', 'internal, pseudo-dmz'),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <nonProxyHosts>internal|pseudo-dmz</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_http_and_https_proxy(self, run_mock):
env_vars = (
('http_proxy', 'http://localhost:3132'),
('https_proxy', 'http://localhost:3133'),
('no_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' <proxy>\n'
' <id>https_proxy</id>\n'
' <active>true</active>\n'
' <protocol>https</protocol>\n'
' <host>localhost</host>\n'
' <port>3133</port>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_authenticated_proxies(self, run_mock):
env_vars = (
('http_proxy', 'http://user1:pass1@localhost:3132'),
('https_proxy', 'http://user2:pass2@localhost:3133'),
('no_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <username>user1</username>\n'
' <password>pass1</password>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' <proxy>\n'
' <id>https_proxy</id>\n'
' <active>true</active>\n'
' <protocol>https</protocol>\n'
' <host>localhost</host>\n'
' <port>3133</port>\n'
' <username>user2</username>\n'
' <password>pass2</password>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
| jonathon-love/snapcraft | snapcraft/tests/test_plugin_maven.py | Python | gpl-3.0 | 19,419 | 0.000412 |
"""
Used as entry point for mayatest from commandline
"""
if __name__ == "__main__":
from mayatest.cli import main
main()
| arubertoson/mayatest | mayatest/__main__.py | Python | mit | 131 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ISS', '0021_poster_auto_subscribe'),
]
operations = [
migrations.AddField(
model_name='poster',
name='posts_per_page',
field=models.PositiveSmallIntegerField(default=20),
),
]
| RyanJenkins/ISS | ISS/migrations/0022_poster_posts_per_page.py | Python | gpl-3.0 | 420 | 0 |
# plot_profile_data.py ---
#
# Filename: plot_profile_data.py
# Description:
# Author: Subhasis Ray
# Maintainer:
# Created: Sat Sep 6 11:19:21 2014 (+0530)
# Version:
# Last-Updated:
# By:
# Update #: 0
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
import sys
from collections import namedtuple, defaultdict
import csv
import numpy as np
import matplotlib as mpl
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
sizeOfFont = 12
fontProperties = {'family':'sans-serif','sans-serif':['Arial'],
'weight' : 'normal', 'size' : sizeOfFont}
ticks_font = mpl.font_manager.FontProperties(family='Arial', style='normal',
size=sizeOfFont, weight='normal', stretch='normal')
mpl.rc('font',**fontProperties)
mpl.rc('figure', figsize=(17.35/(2.54*2), 23.35/2.54/3))
# mpl.rc('text', usetex=True)
# mpl.rc('text.latex', preamble=r'\usepackage{cmbright}')
# mpl.rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"]
FIELDNAMES = ['dialect',
'compression',
'increment',
'mincol',
'maxcol',
'sampling',
'sources',
'variables',
'event_data',
'event_ds',
'nonuniform_data',
'nonuniform_ds',
'uniform_data',
'uniform_ds',
'write_data']
KEY_FIELDS = ['dialect',
'compression',
'increment',
# 'mincol',
# 'maxcol',
# 'sampling',
# 'sources',
# 'variables',
]
DATA_FIELDS = ['event_data',
'event_ds',
'nonuniform_data',
'nonuniform_ds',
'uniform_data',
'uniform_ds',
'write_data']
KeyTuple = namedtuple('BenchmarkCond', KEY_FIELDS)
COLORS = {'vlen': 'SteelBlue',
'oned': 'DarkRed',
'nan': 'Orange'}
POS = {'oned': 1,
'vlen': 2,
'nan': 3}
def plot_profile_data(filename):
"""Plot the processed profiling information for different dialect.
The profile data is processed into csv files containing the following
headers:
dialect: dialect of nsdf
compression: compression level. 0 is no compression 6 is medium
compression.
increment: number of columns written at each step for incremental
writing. 0 means fixed dataset.
mincol: minimum number of columns for generated event and
nonuniform data.
maxcol: maximum number of columns for generated event and
nonuniform data. This is also the number of columns for generated
nonuniform data.
sampling: kind of sampling. all means the benchmark script writes
all three kinds of data in a single run.
sources: number of data sources for each variable. This will be
the number of rows in the dataset.
variables: number of variables for each sampling type. Although
the variables could share the same sources, we create different
source populations for benchmarking purpose.
All the times below are cumulative, i.e. summed over multiple
calls of the function as required to write the entire dataset.
event_data: time to write event data
event_ds: time to write event data sources (dimension scale)
nonuniform_data: time to write nonuniform data
nonuniform_ds: time to write nonuniform data sources (dimension
scale)
uniform_data: time to write uniform data
uniform_ds: time to write uniform data sources (dimension scale)
write_data: total time to write data file (close to the sum of the
above times).
"""
with open(filename, 'rb') as datafile:
reader = csv.DictReader(datafile)
data = defaultdict(dict)
for row in reader:
print row
# return
kdict = {field: row[field] for field in KEY_FIELDS}
key = KeyTuple(**kdict)
for field in DATA_FIELDS:
print field, row[field]
values = data[key].get(field, [])
values.append(float(row[field]))
data[key][field] = values
for field in DATA_FIELDS:
fig = plt.figure(field)
# fig.suptitle(field)
axes_list = []
ax = None
for ii in range(4):
ax = fig.add_subplot(2, 2, ii+1,sharex=ax, sharey=ax)
ax.get_xaxis().set_visible(False)
axes_list.append(ax)
if ii // 2 == 0:
title = r'Compressed' if ii % 2 else r'Uncompressed'
ax.set_title(title, fontsize=12)
if ii % 2 == 0:
ylabel = '{}\nTime (s)'.format('Fixed' if ii // 2 == 0 else 'Incremental')
ax.set_ylabel(ylabel)
else:
ax.get_yaxis().set_visible(False)
plt.setp(ax, frame_on=False)
for iii, key in enumerate(data):
color = COLORS[key.dialect]
pos = POS[key.dialect]
col = 0 if key.compression == '0' else 1
row = 0 if key.increment == '0' else 1
ax = axes_list[row * 2 + col]
ax.bar([pos], np.mean(data[key][field]), yerr=np.std(data[key][field]),
color=color, ecolor='b', alpha=0.7,
label=key.dialect)
for ax in axes_list:
start, end = ax.get_ylim()
if end < 0.1:
step = 0.05
elif end < 1:
step = 0.5
elif end < 10:
step = 2
elif end < 50:
step = 10
elif end < 100:
step = 30
elif end < 200:
step = 50
else:
step = 100
ax.yaxis.set_ticks(np.arange(0, end + step/2, step))
fig.tight_layout()
fig.savefig('{}.svg'.format(field))
# pdfout = PdfPages('{}.pdf'.format(field))
# pdfout.savefig(fig)
# pdfout.close()
plt.show()
if __name__ == '__main__':
filename = sys.argv[1]
print 'Reading', filename
plot_profile_data(filename)
#
# plot_profile_data.py ends here
| nsdf/nsdf | benchmark/plot_profile_data.py | Python | gpl-3.0 | 6,981 | 0.008308 |
"""
WSGI config for tele_giphy project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
# Standard Library
import os
# Django
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tele_giphy.settings")
application = get_wsgi_application()
| JessicaNgo/TeleGiphy | tele_giphy/tele_giphy/wsgi.py | Python | mit | 426 | 0 |
import boto
import os
import re
import urlparse
from boto import s3
from boto.s3 import connection
from wal_e import log_help
from wal_e.exception import UserException
logger = log_help.WalELogger(__name__)
_S3_REGIONS = {
# A map like this is actually defined in boto.s3 in newer versions of boto
# but we reproduce it here for the folks (notably, Ubuntu 12.04) on older
# versions.
'ap-northeast-1': 's3-ap-northeast-1.amazonaws.com',
'ap-southeast-1': 's3-ap-southeast-1.amazonaws.com',
'ap-southeast-2': 's3-ap-southeast-2.amazonaws.com',
'eu-west-1': 's3-eu-west-1.amazonaws.com',
'sa-east-1': 's3-sa-east-1.amazonaws.com',
'us-standard': 's3.amazonaws.com',
'us-west-1': 's3-us-west-1.amazonaws.com',
'us-west-2': 's3-us-west-2.amazonaws.com',
}
try:
# Override the hard-coded region map with boto's mappings if
# available.
from boto.s3 import regions
_S3_REGIONS.update(dict((r.name, r.endpoint) for r in regions()))
except ImportError:
pass
def _is_ipv4_like(s):
"""Find if a string superficially looks like an IPv4 address.
AWS documentation plays it fast and loose with this; in other
regions, it seems like even non-valid IPv4 addresses (in
particular, ones that possess decimal numbers out of range for
IPv4) are rejected.
"""
parts = s.split('.')
if len(parts) != 4:
return False
for part in parts:
try:
int(part)
except ValueError:
return False
return True
def _is_mostly_subdomain_compatible(bucket_name):
"""Returns True if SubdomainCallingFormat can be used...mostly
This checks to make sure that putting aside certificate validation
issues that a bucket_name is able to use the
SubdomainCallingFormat.
"""
return (bucket_name.lower() == bucket_name and
len(bucket_name) >= 3 and
len(bucket_name) <= 63 and
'_' not in bucket_name and
'..' not in bucket_name and
'-.' not in bucket_name and
'.-' not in bucket_name and
not bucket_name.startswith('-') and
not bucket_name.endswith('-') and
not bucket_name.startswith('.') and
not bucket_name.endswith('.') and
not _is_ipv4_like(bucket_name))
def _connect_secureish(*args, **kwargs):
"""Connect using the safest available options.
This turns on encryption (works in all supported boto versions)
and certificate validation (in the subset of supported boto
versions that can handle certificate validation, namely, those
after 2.6.0).
Versions below 2.6 don't support the validate_certs option to
S3Connection, and enable it via configuration option just seems to
cause an error.
"""
if tuple(int(x) for x in boto.__version__.split('.')) >= (2, 6, 0):
kwargs['validate_certs'] = True
kwargs['is_secure'] = True
return connection.S3Connection(*args, **kwargs)
def _s3connection_opts_from_uri(impl):
# 'impl' should look like:
#
# <protocol>+<calling_format>://[user:pass]@<host>[:port]
#
# A concrete example:
#
# https+virtualhost://user:pass@localhost:1235
o = urlparse.urlparse(impl, allow_fragments=False)
if o.scheme is not None:
proto_match = re.match(
r'(?P<protocol>http|https)\+'
r'(?P<format>virtualhost|path|subdomain)', o.scheme)
if proto_match is None:
raise UserException(
msg='WALE_S3_ENDPOINT URI scheme is invalid',
detail='The scheme defined is ' + repr(o.scheme),
hint='An example of a valid scheme is https+virtualhost.')
opts = {}
if proto_match.group('protocol') == 'http':
opts['is_secure'] = False
else:
# Constrained by prior regexp.
proto_match.group('protocol') == 'https'
opts['is_secure'] = True
f = proto_match.group('format')
if f == 'virtualhost':
opts['calling_format'] = connection.VHostCallingFormat()
elif f == 'path':
opts['calling_format'] = connection.OrdinaryCallingFormat()
elif f == 'subdomain':
opts['calling_format'] = connection.SubdomainCallingFormat()
else:
# Constrained by prior regexp.
assert False
if o.username is not None or o.password is not None:
raise UserException(
msg='WALE_S3_ENDPOINT does not support username or password')
if o.hostname is not None:
opts['host'] = o.hostname
if o.port is not None:
opts['port'] = o.port
if o.path:
raise UserException(
msg='WALE_S3_ENDPOINT does not support a URI path',
detail='Path is {0!r}'.format(o.path))
if o.query:
raise UserException(
msg='WALE_S3_ENDPOINT does not support query parameters')
return opts
class CallingInfo(object):
"""Encapsulate information used to produce a S3Connection."""
def __init__(self, bucket_name=None, calling_format=None, region=None,
ordinary_endpoint=None):
self.bucket_name = bucket_name
self.calling_format = calling_format
self.region = region
self.ordinary_endpoint = ordinary_endpoint
def __repr__(self):
return ('CallingInfo({bucket_name}, {calling_format!r}, {region!r}, '
'{ordinary_endpoint!r})'.format(**self.__dict__))
def __str__(self):
return repr(self)
def connect(self, creds):
"""Return a boto S3Connection set up with great care.
This includes TLS settings, calling format selection, and
region detection.
The credentials are applied by the caller because in many
cases (instance-profile IAM) it is possible for those
credentials to fluctuate rapidly. By comparison, region
fluctuations of a bucket name are not nearly so likely versus
the gains of not looking up a bucket's region over and over.
"""
def _conn_help(*args, **kwargs):
return _connect_secureish(
*args,
provider=creds,
calling_format=self.calling_format(),
**kwargs)
# If WALE_S3_ENDPOINT is set, do not attempt to guess
# the right calling conventions and instead honor the explicit
# settings within WALE_S3_ENDPOINT.
impl = os.getenv('WALE_S3_ENDPOINT')
if impl:
return connection.S3Connection(**_s3connection_opts_from_uri(impl))
# Check if subdomain format compatible; no need to go through
# any region detection mumbo-jumbo of any kind.
if self.calling_format is connection.SubdomainCallingFormat:
return _conn_help()
# Check if OrdinaryCallingFormat compatible, but also see if
# the endpoint has already been set, in which case only
# setting the host= flag is necessary.
assert self.calling_format is connection.OrdinaryCallingFormat
if self.ordinary_endpoint is not None:
return _conn_help(host=self.ordinary_endpoint)
# By this point, this is an OrdinaryCallingFormat bucket that
# has never had its region detected in this CallingInfo
# instance. So, detect its region (this can happen without
# knowing the right regional endpoint) and store it to speed
# future calls.
assert self.calling_format is connection.OrdinaryCallingFormat
assert self.region is None
assert self.ordinary_endpoint is None
conn = _conn_help()
bucket = s3.bucket.Bucket(connection=conn,
name=self.bucket_name)
try:
loc = bucket.get_location()
except boto.exception.S3ResponseError, e:
if e.status == 403:
# A 403 can be caused by IAM keys that do not permit
# GetBucketLocation. To not change behavior for
# environments that do not have GetBucketLocation
# allowed, fall back to the default endpoint,
# preserving behavior for those using us-standard.
logger.warning(msg='cannot detect location of bucket',
detail=('The specified bucket name was: ' +
repr(self.bucket_name)),
hint=('Permit the GetLocation permission for '
'the provided AWS credentials. '
'Or, use a bucket name that follows the '
'preferred bucket naming guidelines '
'and has no dots in it.'))
self.region = 'us-standard'
self.ordinary_endpoint = _S3_REGIONS[self.region]
else:
raise
else:
# An empty, successful get location returns an empty
# string to mean S3-Classic/US-Standard.
if loc == '':
loc = 'us-standard'
self.region = loc
self.ordinary_endpoint = _S3_REGIONS[loc]
# Region/endpoint information completed: connect.
assert self.ordinary_endpoint is not None
return _conn_help(host=self.ordinary_endpoint)
def from_store_name(bucket_name):
"""Construct a CallingInfo value from a bucket name.
This is useful to encapsulate the ugliness of setting up S3
connections, especially with regions and TLS certificates are
involved.
"""
mostly_ok = _is_mostly_subdomain_compatible(bucket_name)
if not mostly_ok:
return CallingInfo(
bucket_name=bucket_name,
region='us-standard',
calling_format=connection.OrdinaryCallingFormat,
ordinary_endpoint=_S3_REGIONS['us-standard'])
else:
if '.' in bucket_name:
# The bucket_name might have been DNS compatible, but once
# dots are involved TLS certificate validations will
# certainly fail even if that's the case.
return CallingInfo(
bucket_name=bucket_name,
calling_format=connection.OrdinaryCallingFormat,
region=None,
ordinary_endpoint=None)
else:
# If the bucket follows naming rules and has no dots in
# the name, SubdomainCallingFormat can be used, with TLS,
# world-wide, and WAL-E can be region-oblivious.
return CallingInfo(
bucket_name=bucket_name,
calling_format=connection.SubdomainCallingFormat,
region=None,
ordinary_endpoint=None)
assert False
| x86Labs/wal-e | wal_e/blobstore/s3/calling_format.py | Python | bsd-3-clause | 10,819 | 0 |
#!/usr/bin/env python
import os.path
from os import path
from datetime import date
import sys
import com
import release
import dev
import proj
upstream_org_url = "https://github.com/lvgl/"
workdir = "./release_tmp"
proj_list = [ "lv_sim_eclipse_sdl", "lv_sim_emscripten"]
def upstream(repo):
return upstream_org_url + repo + ".git"
def clone(repo):
com.cmd("git clone --recurse-submodules " + upstream(repo))
os.chdir("./" + repo)
com.cmd("git checkout master")
com.cmd("git remote update origin --prune")
com.cmd("git pull origin --tags")
os.chdir("..")
def clone_repos():
com.cmd("rm -fr " + workdir)
com.cmd("mkdir " + workdir)
os.chdir(workdir)
clone("lvgl")
clone("lv_examples")
clone("lv_drivers")
clone("docs")
clone("blog")
for p in proj_list:
clone(p)
def cleanup():
os.chdir("../")
com.cmd("rm -fr " + workdir)
if __name__ == '__main__':
prepare_type = ['major', 'minor', 'bugfix']
dev_prepare = 'minor'
# if(len(sys.argv) != 2):
# print("Missing argument. Usage ./release.py bugfix | minor | major")
# print("Use minor by default")
# else:
# dev_prepare = sys.argv[1]
if not (dev_prepare in prepare_type):
print("Invalid argument. Usage ./release.py bugfix | minor | major")
exit(1)
#os.chdir(workdir)
clone_repos()
release.make()
for p in proj_list:
proj.make(p, True)
dev.make(dev_prepare)
#cleanup()
| littlevgl/lvgl | scripts/release/main.py | Python | mit | 1,494 | 0.005355 |
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test fhir_types functionality."""
from absl.testing import absltest
from proto.google.fhir.proto.r4 import fhirproto_extensions_pb2
from proto.google.fhir.proto.r4 import uscore_pb2
from proto.google.fhir.proto.r4.core import datatypes_pb2
from proto.google.fhir.proto.r4.core.resources import patient_pb2
from google.fhir.utils import fhir_types
class FhirTypesTest(absltest.TestCase):
"""Tests functionality provided by the fhir_types module."""
def testIsCode_withCode_returnsTrue(self):
"""Tests that is_code returns True when given a Code."""
self.assertTrue(fhir_types.is_code(datatypes_pb2.Code()))
def testIsCode_withProfileOfCode_returnsFalse(self):
"""Tests that is_code returns False when given a profile of Code."""
self.assertFalse(fhir_types.is_code(datatypes_pb2.Address.UseCode()))
def testIsProfileOfCode_withProfileOfCode_returnsTrue(self):
"""Tests that is_profile_of_code returns True for a profile of Code."""
self.assertTrue(
fhir_types.is_profile_of_code(datatypes_pb2.Address.UseCode()))
def testIsProfileOfCode_withCode_returnsFalse(self):
"""Tests that is_profile_of_code returns False for a base Code."""
self.assertFalse(fhir_types.is_profile_of_code(datatypes_pb2.Code()))
def testIsTypeOrProfileOfCode_withProfileOfCode_returnsTrue(self):
"""Tests that is_type_or_profile_of_code returns True for a profile."""
self.assertTrue(
fhir_types.is_type_or_profile_of_code(datatypes_pb2.Address.UseCode()))
def testIsTypeOrProfileOfCode_withCode_returnsTrue(self):
"""Tests that is_type_or_profile_of_code returns True for a base Code."""
self.assertTrue(fhir_types.is_type_or_profile_of_code(datatypes_pb2.Code()))
def testIsTypeOrProfileOfCode_withNonCode_returnsFalse(self):
"""Tests that is_type_or_profile_of_code returns False for a non-Code."""
self.assertFalse(
fhir_types.is_type_or_profile_of_code(patient_pb2.Patient()))
def testIsCoding_withCoding_returnsTrue(self):
"""Tests that is_coding returns True when given a Coding instance."""
self.assertTrue(fhir_types.is_coding(datatypes_pb2.Coding()))
def testIsCoding_withProfileOfCoding_returnsFalse(self):
"""Tests that is_coding returns False when given a profile."""
self.assertFalse(fhir_types.is_coding(datatypes_pb2.CodingWithFixedCode()))
def testIsProfileOfCoding_withCoding_returnsTrue(self):
"""Tests that is_profile_of_coding returns True for a profile."""
self.assertTrue(
fhir_types.is_profile_of_coding(datatypes_pb2.CodingWithFixedCode()))
def testIsProfileOfCoding_withCoding_returnsFalse(self):
"""Tests that is_profile_of_coding returns False for a base Coding type."""
self.assertFalse(fhir_types.is_profile_of_coding(datatypes_pb2.Coding()))
def testIsTypeOrProfileOfCoding_withCoding_returnsTrue(self):
"""Tests that is_type_or_profile_of_coding returns True for profile."""
self.assertTrue(
fhir_types.is_type_or_profile_of_coding(
datatypes_pb2.CodingWithFixedCode()))
def testIsTypeOrProfileOfCoding_withNonCoding_returnsFalse(self):
"""Tests that is_type_or_profile_of_coding returns False for non-Coding."""
self.assertFalse(
fhir_types.is_type_or_profile_of_coding(patient_pb2.Patient()))
def testIsPeriod_withPeriod_returnsTrue(self):
"""Tests that is_period returns True when given a Period instance."""
self.assertTrue(fhir_types.is_period(datatypes_pb2.Period()))
def testIsPeriod_withCoding_returnsFalse(self):
"""Tests that is_period returns False when given a profile of Coding."""
self.assertFalse(fhir_types.is_period(datatypes_pb2.Coding()))
def testIsDateTime_withDateTime_returnsTrue(self):
"""Tests that is_date_time returns True when given a DateTime instance."""
self.assertTrue(fhir_types.is_date_time(datatypes_pb2.DateTime()))
def testIsDateTime_withCoding_returnsFalse(self):
"""Tests that is_date_time returns False when given a profile of Coding."""
self.assertFalse(fhir_types.is_date_time(datatypes_pb2.Coding()))
def testIsExtension_withExtension_returnsTrue(self):
"""Tests that is_extension returns True when given an Extension."""
self.assertTrue(fhir_types.is_extension(datatypes_pb2.Extension()))
def testIsExtension_withDateTime_returnsFalse(self):
"""Tests that is_extension returns False when given a DateTime."""
self.assertFalse(fhir_types.is_extension(datatypes_pb2.DateTime()))
def testIsProfileOfExtension_withBase64BinarySeparatorStride_returnsTrue(
self):
"""Tests that is_profile_of_extension returns True for valid profile."""
self.assertTrue(
fhir_types.is_profile_of_extension(
fhirproto_extensions_pb2.Base64BinarySeparatorStride()))
def testIsTypeOrProfileOfExtension_withExtension_returnsTrue(self):
"""Tests that is_type_or_profile_of_extension returns True for Extension."""
self.assertTrue(
fhir_types.is_type_or_profile_of_extension(datatypes_pb2.Extension()))
def testIsTypeOrProfileOfExtension_withExtensionProfile_returnsTrue(self):
"""Tests that is_type_or_profile_of_extension returns True for profile."""
self.assertTrue(
fhir_types.is_type_or_profile_of_extension(
fhirproto_extensions_pb2.Base64BinarySeparatorStride()))
def testIsTypeOrProfileOfExtensions_withDateTime_returnsFalse(self):
"""Tests that is_type_or_profile_of_extension returns False for DateTime."""
self.assertFalse(
fhir_types.is_type_or_profile_of_extension(datatypes_pb2.DateTime()))
def testIsTypeOrProfileOfPatient_withPatient_returnsTrue(self):
"""Tests that IsTypeOfProfileOfPatient returns True for a Patient type."""
self.assertTrue(
fhir_types.is_type_or_profile_of_patient(patient_pb2.Patient()))
def testIsTypeOrProfileOfPatient_withCoding_returnsFalse(self):
"""Tests that IsTypeOfProfileOfPatient returns False for a Coding type."""
self.assertFalse(
fhir_types.is_type_or_profile_of_patient(datatypes_pb2.Coding()))
def testIsTypeOrProfileOfPatient_withPatientProfile_returnsTrue(self):
"""Tests that IsTypeOfProfileOfPatient returns True for Patient profile."""
self.assertTrue(
fhir_types.is_type_or_profile_of_patient(
uscore_pb2.USCorePatientProfile()))
if __name__ == '__main__':
absltest.main()
| google/fhir | py/google/fhir/utils/fhir_types_test.py | Python | apache-2.0 | 6,987 | 0.00458 |
from argparse import ArgumentParser
from typing import Any, List
from zerver.lib.actions import do_create_multiuse_invite_link, ensure_stream
from zerver.lib.management import ZulipBaseCommand
from zerver.models import PreregistrationUser, Stream
class Command(ZulipBaseCommand):
help = "Generates invite link that can be used for inviting multiple users"
def add_arguments(self, parser: ArgumentParser) -> None:
self.add_realm_args(parser, True)
parser.add_argument(
'-s', '--streams',
dest='streams',
type=str,
help='A comma-separated list of stream names.')
parser.add_argument(
'--referred-by',
dest='referred_by',
type=str,
help='Email of referrer',
required=True,
)
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
streams: List[Stream] = []
if options["streams"]:
stream_names = {stream.strip() for stream in options["streams"].split(",")}
for stream_name in set(stream_names):
stream = ensure_stream(realm, stream_name, acting_user=None)
streams.append(stream)
referred_by = self.get_user(options['referred_by'], realm)
invite_as = PreregistrationUser.INVITE_AS['MEMBER']
invite_link = do_create_multiuse_invite_link(referred_by, invite_as, streams)
print(f"You can use {invite_link} to invite as many number of people to the organization.")
| brainwane/zulip | zerver/management/commands/generate_multiuse_invite_link.py | Python | apache-2.0 | 1,624 | 0.001847 |
#!/usr/bin/env python
###
#
###
# -*- coding: utf-8 -*-
import os,socket
import threading
import time
from datetime import date
import xmlrpclib as xmlrpc
class SchedulerClient():
def __init__(self):
self.cron_dir='/etc/cron.d'
self.task_prefix='remote-' #Temp workaround->Must be declared on a n4d var
self.cron_dir='/etc/cron.d'
self.count=0
self.dbg=0
self.holidays_shell="/usr/bin/check_holidays.py"
self.pidfile="/tmp/taskscheduler.pid"
def startup(self,options):
t=threading.Thread(target=self._main_thread)
t.daemon=True
t.start()
def _debug(self,msg):
if self.dbg:
print("%s"%msg)
def _main_thread(self):
objects["VariablesManager"].register_trigger("SCHEDULED_TASKS","SchedulerClient",self.process_tasks)
tries=10
for x in range (0,tries):
self.scheduler_var=objects["VariablesManager"].get_variable("SCHEDULED_TASKS")
if self.scheduler_var!=self.count:
self.count=self.scheduler_var
self.process_tasks()
break
else:
time.sleep(1)
def process_tasks(self,data=None):
self._debug("Scheduling tasks")
today=date.today()
prefixes={'remote':True,'local':False}
tasks={}
try:
socket.gethostbyname('server')
except:
prefixes={'local':False}
for prefix,sw_remote in prefixes.iteritems():
if prefix=='remote':
n4d=xmlrpc.ServerProxy("https://server:9779")
tasks=n4d.get_remote_tasks("","SchedulerServer")['data'].copy()
else:
n4d=xmlrpc.ServerProxy("https://localhost:9779")
tasks=n4d.get_local_tasks("","SchedulerServer")['data'].copy()
#Delete files
for f in os.listdir(self.cron_dir):
if f.startswith(prefix):
os.remove(self.cron_dir+'/'+f)
#Create the cron files
for name in tasks.keys():
task_names={}
self._debug("Processing task: %s"%name)
for serial in tasks[name].keys():
self._debug("Item %s"%serial)
sw_pass=False
if 'autoremove' in tasks[name][serial]:
if (tasks[name][serial]['mon'].isdigit()):
mon=int(tasks[name][serial]['mon'])
if mon<today.month:
sw_pass=True
if sw_pass==False:
if (tasks[name][serial]['dom'].isdigit()):
dom=int(tasks[name][serial]['dom'])
if dom<today.day:
sw_pass=True
if sw_pass:
continue
self._debug("Scheduling %s"%name)
fname=name.replace(' ','_')
task_names[fname]=tasks[name][serial].copy()
self._write_crontab_for_task(task_names,prefix)
#Launch refresh signal to gui
if os.path.isfile(self.pidfile):
with open(self.pidfile,'r') as p_file:
pid=p_file.read()
try:
os.kill(int(pid),signal.SIGUSR1)
except:
pass
#def process_tasks
def _write_crontab_for_task(self,ftask,prefix):
cron_array=[]
for task_name,task_data in ftask.iteritems():
self._debug("Writing data %s: %s"%(task_name,task_data))
fname=self.cron_dir+'/'+prefix+task_name.replace(' ','_')
m=task_data['m']
h=task_data['h']
dom=task_data['dom']
mon=task_data['mon']
if '/' in m:
m=m.replace('0/','*/')
if '/' in h:
h=h.replace('0/','*/')
if '/' in dom:
dom=dom.replace('1/','*/')
if '/' in mon:
mon=mon.replace('1/','*/')
cron_task=("%s %s %s %s %s root %s"%(m,h,dom,mon,task_data['dow'],u""+task_data['cmd']))
if 'holidays' in task_data.keys():
if task_data['holidays']:
cron_task=("%s %s %s %s %s root %s && %s"%(m,h,dom,mon,task_data['dow'],self.holidays_shell,u""+task_data['cmd']))
cron_array.append(cron_task)
if task_data:
if os.path.isfile(fname):
mode='a'
else:
mode='w'
with open(fname,mode) as data:
if mode=='w':
data.write('#Scheduler tasks\n')
data.write('SHELL=/bin/bash\n')
data.write('PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin\n')
data.write('DISPLAY=:0\n')
data.write('XAUTHORITY=/var/run/lightdm/root/:0\n')
if 'https_proxy' in os.environ.keys():
https_proxy=os.environ['https_proxy']
data.write('https_proxy=%s\n'%https_proxy)
if 'http_proxy' in os.environ.keys():
http_proxy=os.environ['http_proxy']
data.write('http_proxy=%s\n'%http_proxy)
for cron_line in cron_array:
data.write(cron_line.encode('utf8')+"\n")
#def _write_crontab_for_task
| juanma1980/lliurex-scheduler | client-scheduler.install/usr/share/n4d/python-plugins/SchedulerClient.py | Python | gpl-3.0 | 4,262 | 0.056546 |
from base64 import b64encode
from suds.wsse import UsernameToken, Token
try:
from hashlib import sha1, md5
except:
from sha import new as sha1
class UsernameDigestToken(UsernameToken):
"""
Represents a basic I{UsernameToken} WS-Security token with password digest
@ivar username: A username.
@type username: str
@ivar password: A password.
@type password: str
@ivar nonce: A set of bytes to prevent reply attacks.
@type nonce: str
@ivar created: The token created.
@type created: L{datetime}
@doc: http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0.pdf
"""
def __init__(self, username=None, password=None):
UsernameToken.__init__(self, username, password)
self.setcreated()
self.setnonce()
def setnonce(self, text=None):
"""
Set I{nonce} which is arbitraty set of bytes to prevent
reply attacks.
@param text: The nonce text value.
Generated when I{None}.
@type text: str
@override: Nonce save binary string to build digest password
"""
if text is None:
s = []
s.append(self.username)
s.append(self.password)
s.append(Token.sysdate())
m = md5()
m.update(':'.join(s))
self.raw_nonce = m.digest()
self.nonce = b64encode(self.raw_nonce)
else:
self.nonce = text
def xml(self):
usernametoken = UsernameToken.xml(self)
password = usernametoken.getChild('Password')
nonce = usernametoken.getChild('Nonce')
created = usernametoken.getChild('Created')
password.set('Type', 'http://docs.oasis-open.org/wss/2004/01/'
'oasis-200401-wss-username-token-profile-1.0'
'#PasswordDigest')
s = sha1()
s.update(self.raw_nonce)
s.update(created.getText())
s.update(password.getText())
password.setText(b64encode(s.digest()))
nonce.set('EncodingType', 'http://docs.oasis-open.org/wss/2004'
'/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary')
return usernametoken
| ricardosiri68/patchcap | cam/wsse.py | Python | gpl-2.0 | 2,243 | 0.001783 |
import os, logging
from xmlrpc.server import SimpleXMLRPCServer
from xmlrpc.server import SimpleXMLRPCRequestHandler
from master.server_handler import ServerHandler
from xmlrpc.client import Binary
HOST = "0.0.0.0"
PORT = int(os.getenv('PORT', 5555))
ENDPOINT = 'RPC2'
logging.basicConfig(level=logging.INFO)
class RequestHandler(SimpleXMLRPCRequestHandler):
# rpc_paths = ('RPC2',)
def log_message(self, format, *args):
logging.debug(format)
def start():
server = SimpleXMLRPCServer((HOST, PORT), requestHandler=RequestHandler,
allow_none=True, use_builtin_types=True)
server.register_instance(ServerHandler())
logging.info("Server is listening on " + HOST + ":" + str(PORT) + "/" + ENDPOINT)
server.serve_forever()
| medo/Pandas-Farm | master/server.py | Python | mit | 761 | 0.00657 |
import urllib
import lxml.html
connection = urllib.urlopen('http://www.amazon.in/s/ref=nb_sb_noss?url=search-alias%3Daps&field-keywords=iphone')
dom = lxml.html.fromstring(connection.read())
for link in dom.xpath('//li[@id="result_0"]/@data-asin'): # select the url in href for all a tags(links)
print link
| smartyrad/Python-scripts-for-web-scraping | beauty.py | Python | gpl-3.0 | 314 | 0.012739 |
# How long should we wait on EACH commit() checkpoint: for ipaddr,
# ports etc. That's not total commit() timeout.
SYNC_TIMEOUT = 5
class DeprecationException(Exception):
pass
class CommitException(Exception):
pass
class CreateException(Exception):
pass
| tomislacker/python-iproute2 | pyroute2/ipdb/common.py | Python | apache-2.0 | 272 | 0 |
######################################
# Example: ftDigiCam.py
# Digital Camera with live
# video stream to TXT display
# and autofocus functionality
# (c) 2016 by Torsten Stuehn
# version 0.8 from 2016-02-12
######################################
# Python2/3 'print' compatibility
from __future__ import print_function
import ftrobopy
import ftrobopytools
from os import system
import time
txt = ftrobopy.ftrobopy(host ='127.0.0.1',
port = 65000,
update_interval = 0.01,
keep_connection_interval = 1.0)
run_ave_contrast = 8
hist_minlength = 10
hist_maxlength = 20
fname_prefix = 'PICT'
displayLiveStream = 1 # live video on TXT display, 0=no 1=yes
# definition of TXT outputs and inputs
FocusMotor = txt.motor(1) # the focus motor is connected to M1
Switch = txt.input(1) # the switch is connected to I1
if displayLiveStream:
# make backup copy of TXT display contents
with open('/dev/fb0', 'rb') as f:
framebuffer_backup=f.read()
try:
# initialize camera (/dev/video0) and
fps = 15 # frames per second
width = 320 # width of camera image
height = 240 # height of camera image
videv = ftrobopytools.camInit(fps, width, height, 0, 0)
if displayLiveStream:
# initialize Standard Display Library (SDL)
# for access to display of TXT
ftrobopytools.sdlInit()
# reset text/cmd console (compromized by SDL)
system('reset')
contrast = [0]
hist_contrast = [0]
hist_counter = [0]
contrast_counter_shift = 3
ave_contrast = 0
xtopleft = 10 # width / 2 - width / 4
ytopleft = 10 # height / 2 - height / 8
xbottomright = 310 # width / 2 + width / 8
ybottomright = 120 # height / 2 + height / 8
state = 0
dir = -1
for i in range(1000):
contr = ftrobopytools.measureContrast(videv,
width, height,
xtopleft, ytopleft, xbottomright, ybottomright,
displayLiveStream)
if contr:
contrast.append(contr)
if len(contrast) > run_ave_contrast:
contrast = contrast[1:]
ave_contrast = sum(contrast)/len(contrast)
motor_counter = FocusMotor.getCurrentDistance()
contrast_variation = 0
for i in contrast:
if i != ave_contrast:
contrast_variation = 1
hist_contrast.append(ave_contrast)
if len(hist_contrast) > hist_maxlength:
hist_contrast = hist_contrast[1:]
hist_counter.append(motor_counter)
if len(hist_counter) > hist_maxlength:
hist_counter = hist_counter[1:]
#if state == 2 or state == 3 or state == 4:
if True:
print(hist_contrast)
#print(hist_counter)
if state == 0:
if Switch.state() != 0:
# dir = -dir
state = 1
if state == 1:
print("state 1: start focus motor")
# start increasing focus
FocusMotor.setDistance(3000)
FocusMotor.setSpeed(512*dir)
hist_contrast = [0]
hist_counter = [0]
state = 2
if state == 2:
if len(hist_contrast) > hist_minlength and ( hist_contrast[-1] < hist_contrast[-2] or contrast_variation == 0 ):
print("state 2: contrast_variation",contrast_variation)
hist_contrast = [0]
hist_counter = [0]
FocusMotor.stop()
# start decreasing focus
FocusMotor.setDistance(3000)
FocusMotor.setSpeed(-512*dir)
state = 3
if state == 3:
if len(hist_contrast) > hist_minlength and ( hist_contrast[-1] < hist_contrast[-2] or contrast_variation == 0 ):
print("state 3: contrast_variation",contrast_variation)
FocusMotor.stop()
# increase focus to maximum contrast
idx = hist_contrast.index(max(hist_contrast))
bestfocus_counter = hist_counter[idx]
#FocusMotor.setDistance(hist_counter[-(1+contrast_counter_shift)] - bestfocus_counter)
FocusMotor.setDistance(300)
FocusMotor.setSpeed(512*dir)
state = 4
if state == 4:
if FocusMotor.finished():
# save jpeg in high resolution (1280x720)
print("taking snapshot at high resolution ...")
# close (low resolution) camera device
ftrobopytools.camClose(videv, 0)
# open (high resolution) camera device
high_fps = 5 # 5 is the lowest possible framerate of the TXT camera
high_width = 1280 # 1280 is the maximum horizontal resolution of the TXT camera
high_height = 720 # 720 is the maximum vertical resolution of the TXT camera
videv = ftrobopytools.camInit(high_fps, high_width, high_height, 0, 0)
# get high resolution snapshot as jpg image
jpg = ftrobopytools.getJPGImage(videv)
# close (high resolution) camera device
ftrobopytools.camClose(videv, 0)
# restore resolution for liveStreaming
videv = ftrobopytools.camInit(fps, width, height, 0, 0)
# save jpeg to file and increment picture count index
try:
with open(fname_prefix+'IDX','r') as f:
pict_number = int(f.read())
except:
pict_number = 0
with open(fname_prefix + '%04i' % pict_number +'.JPG', 'wb') as f:
f.write(jpg)
with open(fname_prefix+'IDX','w') as f:
f.write(str(pict_number + 1))
# ready for the next picture
hist_contrast = [0]
hist_counter = [0]
state = 0
except ftrobopytools as error:
print(error)
finally:
# close camera device
ftrobopytools.camClose(videv, 0)
if displayLiveStream:
# close Standard Display Library
ftrobopytools.sdlClose()
# restore TXT display
with open('/dev/fb0', 'wb') as f:
f.write(framebuffer_backup)
| rkunze/ft-robo-snap | ftrobopy/examples/ftDigiCam.py | Python | agpl-3.0 | 6,070 | 0.020428 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2004 Martin Hawlisch
# Copyright (C) 2005-2008 Donald N. Allingham
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2011 Michiel D. Nauta
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"Export Persons to vCard (RFC 2426)."
#-------------------------------------------------------------------------
#
# Standard Python Modules
#
#-------------------------------------------------------------------------
import sys
from textwrap import TextWrapper
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
import collections
log = logging.getLogger(".ExportVCard")
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
from gramps.gen.const import PROGRAM_NAME
from gramps.version import VERSION
from gramps.gen.lib import Date, Person
from gramps.gen.lib.urltype import UrlType
from gramps.gui.plug.export import WriterOptionBox
from gramps.gen.lib.eventtype import EventType
from gramps.gen.display.name import displayer as _nd
from gramps.gen.plug.utils import OpenFileOrStdout
#-------------------------------------------------------------------------
#
# Support Functions
#
#-------------------------------------------------------------------------
def exportData(database, filename, user, option_box=None):
"""Function called by Gramps to export data on persons in VCard format."""
cardw = VCardWriter(database, filename, option_box, user)
try:
cardw.export_data()
except EnvironmentError as msg:
user.notify_error(_("Could not create %s") % filename, str(msg))
return False
except:
# Export shouldn't bring Gramps down.
user.notify_error(_("Could not create %s") % filename)
return False
return True
#-------------------------------------------------------------------------
#
# VCardWriter class
#
#-------------------------------------------------------------------------
class VCardWriter:
"""Class to create a file with data in VCard format."""
LINELENGTH = 73 # unclear if the 75 chars of spec includes \r\n.
ESCAPE_CHAR = '\\'
TOBE_ESCAPED = ['\\', ',', ';'] # order is important
LINE_CONTINUATION = [' ', '\t']
@staticmethod
def esc(data):
"""Escape the special chars of the VCard protocol."""
if isinstance(data, str):
for char in VCardWriter.TOBE_ESCAPED:
data = data.replace(char, VCardWriter.ESCAPE_CHAR + char)
return data
elif type(data) == type([]):
return list(map(VCardWriter.esc, data))
elif type(data) == type(()):
return tuple(map(VCardWriter.esc, data))
else:
raise TypeError("VCard escaping is not implemented for "
"data type %s." % str(type(data)))
def __init__(self, database, filename, option_box=None, user=None):
self.db = database
self.filename = filename
self.user = user
self.filehandle = None
self.option_box = option_box
if isinstance(self.user.callback, collections.Callable): # callback is really callable
self.update = self.update_real
else:
self.update = self.update_empty
if option_box:
self.option_box.parse_options()
self.db = option_box.get_filtered_database(self.db)
self.txtwrp = TextWrapper(width=self.LINELENGTH,
expand_tabs=False,
replace_whitespace=False,
drop_whitespace=False,
subsequent_indent=self.LINE_CONTINUATION[0])
self.count = 0
self.total = 0
def update_empty(self):
"""Progress can't be reported."""
pass
def update_real(self):
"""Report progress."""
self.count += 1
newval = int(100*self.count/self.total)
if newval != self.oldval:
self.user.callback(newval)
self.oldval = newval
def writeln(self, text):
"""
Write a property of the VCard to file.
Can't cope with nested VCards, section 2.4.2 of RFC 2426.
"""
self.filehandle.write('%s\r\n' % '\r\n'.join(
[line for line in self.txtwrp.wrap(text)]))
def export_data(self):
"""Open the file and loop over everyone too write their VCards."""
with OpenFileOrStdout(self.filename, encoding='utf-8',
errors='strict', newline='') as self.filehandle:
if self.filehandle:
self.count = 0
self.oldval = 0
self.total = self.db.get_number_of_people()
for key in sorted(list(self.db.iter_person_handles())):
self.write_person(key)
self.update()
return True
def write_person(self, person_handle):
"""Create a VCard for the specified person."""
person = self.db.get_person_from_handle(person_handle)
if person:
self.write_header()
prname = person.get_primary_name()
self.write_formatted_name(prname)
self.write_name(prname)
self.write_sortstring(prname)
self.write_nicknames(person, prname)
self.write_gender(person)
self.write_birthdate(person)
self.write_addresses(person)
self.write_urls(person)
self.write_occupation(person)
self.write_footer()
def write_header(self):
"""Write the opening lines of a VCard."""
self.writeln("BEGIN:VCARD")
self.writeln("VERSION:3.0")
self.writeln("PRODID:-//Gramps//NONSGML %s %s//EN" %
(PROGRAM_NAME, VERSION))
def write_footer(self):
"""Write the closing lines of a VCard."""
self.writeln("END:VCARD")
self.writeln("")
def write_formatted_name(self, prname):
"""Write the compulsory FN property of VCard."""
regular_name = prname.get_regular_name().strip()
title = prname.get_title()
if title:
regular_name = "%s %s" % (title, regular_name)
self.writeln("FN:%s" % self.esc(regular_name))
def write_name(self, prname):
"""Write the compulsory N property of a VCard."""
family_name = ''
given_name = ''
additional_names = ''
hon_prefix = ''
suffix = ''
primary_surname = prname.get_primary_surname()
surname_list = prname.get_surname_list()
if not surname_list[0].get_primary():
surname_list.remove(primary_surname)
surname_list.insert(0, primary_surname)
family_name = ','.join(self.esc([("%s %s %s" % (surname.get_prefix(),
surname.get_surname(), surname.get_connector())).strip()
for surname in surname_list]))
call_name = prname.get_call_name()
if call_name:
given_name = self.esc(call_name)
additional_name_list = prname.get_first_name().split()
if call_name in additional_name_list:
additional_name_list.remove(call_name)
additional_names = ','.join(self.esc(additional_name_list))
else:
name_list = prname.get_first_name().split()
if len(name_list) > 0:
given_name = self.esc(name_list[0])
if len(name_list) > 1:
additional_names = ','.join(self.esc(name_list[1:]))
# Alternate names are ignored because names just don't add up:
# if one name is Jean and an alternate is Paul then you can't
# conclude the Jean Paul is also an alternate name of that person.
# Assume all titles/suffixes that apply are present in primary name.
hon_prefix = ','.join(self.esc(prname.get_title().split()))
suffix = ','.join(self.esc(prname.get_suffix().split()))
self.writeln("N:%s;%s;%s;%s;%s" % (family_name, given_name,
additional_names, hon_prefix, suffix))
def write_sortstring(self, prname):
"""Write the SORT-STRING property of a VCard."""
# TODO only add sort-string if needed
self.writeln("SORT-STRING:%s" % self.esc(_nd.sort_string(prname)))
def write_nicknames(self, person, prname):
"""Write the NICKNAME property of a VCard."""
nicknames = [x.get_nick_name() for x in person.get_alternate_names()
if x.get_nick_name()]
if prname.get_nick_name():
nicknames.insert(0, prname.get_nick_name())
if len(nicknames) > 0:
self.writeln("NICKNAME:%s" % (','.join(self.esc(nicknames))))
def write_gender(self, person):
"""Write the X-GENDER property of a VCard (X- dropped in 4.0, we're at 3.0)."""
gender = person.get_gender()
gender_value = ''
if gender == Person.MALE:
gender_value = 'Male'
elif gender == Person.FEMALE:
gender_value = 'Female'
log.info("gender: %s -> %s" % (gender, gender_value))
if gender_value:
self.writeln("X-GENDER:%s" % (gender_value))
def write_birthdate(self, person):
"""Write the BDAY property of a VCard."""
birth_ref = person.get_birth_ref()
if birth_ref:
birth = self.db.get_event_from_handle(birth_ref.ref)
if birth:
b_date = birth.get_date_object()
mod = b_date.get_modifier()
if (mod != Date.MOD_TEXTONLY and
not b_date.is_empty() and
not mod == Date.MOD_SPAN and
not mod == Date.MOD_RANGE):
(day, month, year, slash) = b_date.get_start_date()
if day > 0 and month > 0 and year > 0:
self.writeln("BDAY:%s-%02d-%02d" % (year, month, day))
def write_addresses(self, person):
"""Write ADR and TEL properties of a VCard."""
address_list = person.get_address_list()
for address in address_list:
postbox = ""
ext = ""
street = address.get_street()
city = address.get_city()
state = address.get_state()
zipcode = address.get_postal_code()
country = address.get_country()
if street or city or state or zipcode or country:
self.writeln("ADR:%s;%s;%s;%s;%s;%s;%s" % self.esc(
(postbox, ext, street, city, state, zipcode, country)))
phone = address.get_phone()
if phone:
self.writeln("TEL:%s" % phone)
def write_urls(self, person):
"""Write URL and EMAIL properties of a VCard."""
url_list = person.get_url_list()
for url in url_list:
href = url.get_path()
if href:
if url.get_type() == UrlType(UrlType.EMAIL):
if href.startswith('mailto:'):
href = href[len('mailto:'):]
self.writeln("EMAIL:%s" % self.esc(href))
else:
self.writeln("URL:%s" % self.esc(href))
def write_occupation(self, person):
"""
Write ROLE property of a VCard.
Use the most recent occupation event.
"""
event_refs = person.get_primary_event_ref_list()
events = [event for event in
[self.db.get_event_from_handle(ref.ref) for ref in event_refs]
if event.get_type() == EventType(EventType.OCCUPATION)]
if len(events) > 0:
events.sort(key=lambda x: x.get_date_object())
occupation = events[-1].get_description()
if occupation:
self.writeln("ROLE:%s" % occupation)
| beernarrd/gramps | gramps/plugins/export/exportvcard.py | Python | gpl-2.0 | 12,904 | 0.00279 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_middleware import request_id
import webob
from tacker import auth
from tacker.tests import base
class TackerKeystoneContextTestCase(base.BaseTestCase):
def setUp(self):
super(TackerKeystoneContextTestCase, self).setUp()
@webob.dec.wsgify
def fake_app(req):
self.context = req.environ['tacker.context']
return webob.Response()
self.context = None
self.middleware = auth.TackerKeystoneContext(fake_app)
self.request = webob.Request.blank('/')
self.request.headers['X_AUTH_TOKEN'] = 'testauthtoken'
def test_no_user_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '401 Unauthorized')
def test_with_user_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.user_id, 'testuserid')
self.assertEqual(self.context.user, 'testuserid')
def test_with_tenant_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'test_user_id'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.tenant_id, 'testtenantid')
self.assertEqual(self.context.tenant, 'testtenantid')
def test_roles_no_admin(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_ROLES'] = 'role1, role2 , role3,role4,role5'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.roles, ['role1', 'role2', 'role3',
'role4', 'role5'])
self.assertEqual(self.context.is_admin, False)
def test_roles_with_admin(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_ROLES'] = ('role1, role2 , role3,role4,role5,'
'AdMiN')
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.roles, ['role1', 'role2', 'role3',
'role4', 'role5', 'AdMiN'])
self.assertEqual(self.context.is_admin, True)
def test_with_user_tenant_name(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_PROJECT_NAME'] = 'testtenantname'
self.request.headers['X_USER_NAME'] = 'testusername'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.user_id, 'testuserid')
self.assertEqual(self.context.user_name, 'testusername')
self.assertEqual(self.context.tenant_id, 'testtenantid')
self.assertEqual(self.context.tenant_name, 'testtenantname')
def test_request_id_extracted_from_env(self):
req_id = 'dummy-request-id'
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.environ[request_id.ENV_REQUEST_ID] = req_id
self.request.get_response(self.middleware)
self.assertEqual(req_id, self.context.request_id)
| priya-pp/Tacker | tacker/tests/unit/test_auth.py | Python | apache-2.0 | 4,418 | 0 |
#!/usr/bin/env python
# -*- coding: ascii -*-
from __future__ import absolute_import
from __future__ import print_function
import sys
import base64
# use a base64 image as default/test image
TEST_IMAGE = """iVBORw0KGgoAAAANSUhEUgAAA8AAAALQCAYAAABfdxm0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz
AAASdAAAEnQB3mYfeAAAIABJREFUeJzs3XdUVFf3P/73OfTeEWwgNsTesZcnElusT2JDij1qTDQa
NfGJIaaoMbboJ2qMiIomlhRjiy1iNAajYoktgrEkoKh0lDbs3x/+5n4ZZ4YmzCDs11qshfeUu8+Z
OyN77r3nCiIiMMYYY4wxxhhjlZw0dgCMMcYYY4wxxpghcALMGGOMMcYYY6xK4ASYMcYYY4wxxliV
wAkwY4wxxhhjjLEqgRNgxhhjjDHGGGNVAifAjDHGGGOMMcaqBE6AGWOMMcYYY4xVCZwAM8YYY4wx
xhirEjgBZowxxhhjjDFWJXACzBhjjDHGGGOsSuAEmDHGGGOMMcZYlcAJMGOMMcYYY4yxKoETYMYY
Y4wxxhhjVQInwIwxxhhjjDHGqgROgBljjDHGGGOMVQmcADPGGGOMMcYYqxI4AWaMsSoiKioKUkr4
+PiUab8RERGQUqJnz55Gj+v27duQUsLExOS5+2LG4+3tDSkljh8/buxQivS8xz9jjDHD4gSYMcYq
uNDQUEgpNX7Mzc3h6uqK+vXrY/Dgwfj0009x69YtY4eqJSIiAmFhYbh48aKxQ6nwjh49ilGjRqFu
3bqwtraGra0t6tWrh+7du2Pu3Ln4+eefkZeXZ+wwDUIIASGEQffZvXt3rfeZiYkJnJyc0L59eyxY
sADJyckGi+eDDz5AWFgY0tLSDLZPxhirCkyNHQBjjLHiMTc3h7OzMwCAiJCWlobk5GTcvHkTP/74
I+bNm4f//ve/+L//+z+4uLhotbe2toavry9q1qxZpnE5ODjA19cXXl5eWmUbN27E8ePHUadOHTRr
1kxn+7KMy8zMDL6+vgZPnp5Hfn4+xo8fj/DwcCVuU1NT2NjY4M6dO/j7779x/PhxLF68GDExMXrn
kT0fddJtaWkJBwcHAIBKpcLDhw9x5swZ/PHHH1i3bh2OHj2K+vXrl3s8H374IYQQCA0Nhb29fbnv
jzHGqgo+A8wYYy+Ijh07Ij4+HvHx8UhISEBmZiaSk5Oxf/9+DB8+HFJK7NixAy1atEB8fLxW+7Zt
2+LKlSs4ePBgmcY1aNAgXLlyBeHh4aVqX5ZxVa9eHVeuXMHly5efuy9DWbRokZL8Tp48GVevXkV2
djYePHiAJ0+e4I8//kBYWBi8vb2NHWqVMGzYMOV9dv/+faSmpmLZsmWwtLREfHw8hg8fbuwQGWOM
PQdOgBlj7AVmb2+PgIAAbN26FXv37lX+SP/vf/9r7NBYMa1atQpCCEydOhVffPEFGjRooJSZmJig
VatW+N///oe4uDj4+fkZMdKqydbWFtOmTcO7774LIsL58+cRHR1t7LAYY4yVEifAjDFWSQQEBGDJ
kiUgIkRHR2Pv3r0a5cVZbCoiIgL+/v6wtbWFi4sLevbsqfSjb2EiXYsAqbdFRUWBiBASEqJxb2XB
GPTF9WwbfT9jxoxR2hS2CJa6vw8//BD5+flYvnw5mjdvDhsbG7i4uOCVV17B2bNnC53jkydPol+/
fnBxcYGtrS1atGiBFStWaIzxww8/LLSPgh4+fIiEhAQAQL9+/Yqsb2qqfedSTEwM5syZgy5dusDL
ywuWlpZwdXVFjx498PXXXyM/P19nX2FhYRrzt3r1arRs2RJ2dnaoXr06QkJC8O+//yr1Y2NjERwc
jFq1asHKygpNmzbF+vXrdfb97Gv6008/oUePHnB2doadnR06duyIbdu2FTlefXJzc7Fq1Sp07doV
Li4usLS0hLe3N8aOHYtr166Vut/CjBgxQvm9qOPkWd999x169+4Nd3d3WFpaolatWggMDERMTIxW
XfVxJIQAESnvO13HO2OMsZLje4AZY6wSGT9+PBYsWIDExERs3bq1WElVwbZff/01hBCQUsLCwgLH
jx9HVFQUli1bVqKFiaysrODh4YGkpCTk5ubC3t4eVlZWSrm7u3uRfTg6OsLDw0Nv+cOHD6FSqYoV
D/D/7vHMy8tD3759cfDgQZibm8PCwgIpKSnYu3cvjh49iqNHj6J9+/Za7Tdt2oQxY8aAiJT4rl69
ihkzZuD48eOwt7d/rnuPCyabJREQEICkpCQAT++ntrGxQXJysvLa/fDDD/jxxx8hpfZ33up4R4wY
gW+//RYWFhYwMzPD/fv3sWnTJpw4cQLR0dG4ceMG+vbti9TUVDg4OCA3NxdXrlzBhAkTkJqairff
fltvfCtWrMD06dMhpYSDgwOysrIQHR2N33//HadOncLKlStLNN579+6hd+/euHjxonKs2tjY4O7d
uwgPD8e2bdsQGRmJwYMHl6jfotSoUUP5vbgLU6m/GNm8eTOEEDAxMYGdnR3i4+OxdetWfPPNN1i1
ahUmTZqktFEf9/fu3YMQAq6urhpf6Dg6OpbdoBhjrCoixhhjFVpISAgJIahHjx7Fqj9y5EgSQlCt
WrU0th87doyEEFSnTh2tNhs2bCAhBEkpad68eZSWlkZERA8ePKDx48eTubk52djYkJSSoqKiNNpu
3LhRb3zdu3cnKSVFRETojbewuPTZt28fmZiYkJSStm/frmy/deuWMo5nqefRycmJXF1daefOnZSb
m0tERJcuXaJmzZqRlJLat2+v1fbatWtkYWFBUkp65ZVX6M6dO0RElJWVRatWrSIzMzNycnIiKSWF
hYUVexxERN7e3sr4L126VKK2RESjRo2i7du30/3795Vtjx8/psjISKpevTpJKWnJkiVa7T744AMS
QpCjoyPZ29vTtm3blPk4ceIEeXp6kpSSJk2aRN7e3jRw4EC6desWERGlp6fT5MmTSQhB1tbWlJSU
pNG3+jW1sbEhc3NzCg0NpcTERCIiSklJoVmzZimv07Zt23TOia5jLTc3l9q2bUtSSgoICKDo6GjK
y8sjIqJ79+7RjBkzSAhBtra2dPPmzRLNo/pYDQ0N1Vl+/fp1JeYvv/xS2V7Y8f/pp5+SEIJMTEzo
k08+oYyMDCIiio+Pp2HDhpEQgkxNTenXX3/Vaqvel/pYY4wxVjY4AWaMsQqupAmw+o9uKaWSHBAV
nmjWqVNHSXZ06devn9KnsRPga9eukYODA0kpae7cuRplxUmApZT022+/aZWfPXtWKb97965GWVBQ
EAkhqHnz5kqSWNBnn32mtC1pAhweHk5SShJCkBCCWrduTW+++SZt2bKFYmNjS9TXs06cOEFCCPLx
8dEqUyfAUkravHmzVvnmzZuVmHx9fUmlUmmU5+fnU/369XW2V7+mUkrq3bu3ztjUr0eDBg20yvQl
wF999RUJIah79+5a8ahNmjSJpJT0xhtv6CzXp6gE+O2331bGdO7cOWW7vuM/IyNDOU7fe+89rf5U
KhV16dKFpJTUrVs3rXL1vm7fvl2icTDGGCsc3wPMGGOVjPpRSQCUS2MLc+7cOeUZwrNmzdJZZ/bs
2WUS2/NKS0vDgAEDkJ6ejr59++KTTz4pcR9dunRBhw4dtLa3atVKeRTTn3/+qWwnIvz4448QQuCt
t97SeR/ulClTYGNjU+JYgKf3fH799ddwd3eHEAIxMTFYuXIlRo8ejfr168PHxweffPIJHj9+XOK+
O3XqBEdHR9y6dQv37t3TWadmzZoIDAzU2v7SSy8BeHqZ9MyZM7UuoRZCoEePHgA05+tZc+fO1bn9
vffeA/D03uLiPic6IiICQghMmzZN5yXdADBq1CgQEQ4dOlSsPguTn5+P2NhYzJ07FytWrIAQAh07
dkTLli2LbHvo0CGkpaXB3Nxc5/tKSon//e9/ICL8+uuvSExMfO54GWOMFY0TYMYYq2To/79HtbjU
C/F4eHjoXSDL398fZmZmzx3b8yAiDB8+HDdu3ICvry+2bt1a4j6EEGjbtq3ecvV9nsnJycq2mzdv
Kvd8durUSWc7KysrtG7dusTxqIWEhOD27dvYsWMHJk2ahFatWsHCwgJCCNy+fRvz5s1D27Zt8eDB
A53td+zYgcGDB8PLywvW1tYaiyalpKQAgM5HYwHQu7J0wfu0mzRporNOtWrVQEQa81WQmZkZOnbs
qLOsXr168PT0BPD0S5iiqFQq/PHHHwCACRMmwNPTU+fPkCFDAAB3794tss9nERE2btyozJ2pqSka
NGiARYsWIT8/H76+vsVevEs9pubNmyvPFX5W165dlft7izMHjDHGnh8vgsUYY5VMwWSk4NlgfR4+
fAgASjKii5mZGVxcXHD//v3nD7CUZs+ejQMHDsDZ2Rm7d++GnZ1dqfoprJ2lpSWAp6sMq6nnByh8
jqpXr16qeNQsLCwwZMgQJYHLysrCkSNHsHDhQvz222+4du0aJk2ahF27diltVCoVXn31Vfzwww/K
glYWFhZwc3NTEqvExEQQETIzM3XuV9+YCp5h1VdHvY+C81WQq6urzjPmajVq1MC9e/f0JvYFJSUl
IScnB0KIIq9sEEIgKyuryD51sbKyUhJWKSXs7OxQv3599O/fH0FBQcoxUhT1mAounvUsCwsLuLq6
IjExsVhzwBhj7PlxAswYY5WM+nLSmjVr6nwc0Ito69atWLJkCUxNTfHNN9+gbt26xg6p3FlaWqJf
v37o168fXn75ZRw6dAg//vgjkpOT4eTkBABYt24dfvjhB9jY2GDRokUYNGiQViJeu3Zt/PvvvyW+
MqCiKfg4p/Pnz6Np06Zlvg8hBIYNG4YNGzaUWZ+lTcQZY4yVD74EmjHGKpHc3FwcPXoUQgh06dKl
WG1cXV0BQHkerb5+Hz16VCYxltTZs2cxfvx4CCGwePFi5d5UQ1HPD1D4HBVW9rxCQ0MBPL1ENzY2
Vtm+c+dOCCHw/vvvY/LkyVrJb35+vsYZbEN7+PAh8vLy9JarL8t2c3Mrsi8XFxflC53bt2+XTYDl
SD2mO3fu6K2TnZ2tvK+KMweMMcaeHyfAjDFWiaxbt05ZTGfUqFHFaqNe0OfevXv4+++/ddaJjo7W
e5lrYdSX0Zb27GNiYiIGDRqErKwsBAUF4a233ipVP8/Dx8cH9vb2AIATJ07orJOVlYWzZ8+WWwwF
F9gyNzdXfv/nn38AAC1atNDZ7sSJE0Y9A5mbm4tTp07pLIuLi1MS4FatWhXZl6mpKdq0aQMA2L9/
f9kFWU7UY7px44beL0eioqKULwienQP1Je0v+pl7xhiraDgBZoyxSuLnn3/GO++8o6xU26dPn2K1
a9myJby8vAAAS5Ys0Vln0aJFpYpJnTiqF2IqidzcXAwZMgTx8fFo37491q5dW6oYnpcQAgMHDgQR
YcWKFVCpVFp1Vq9ejYyMjBL3nZubi+PHjxdZLzIyEsDT+1MbNmyobFffq3rp0iWtNiqVCvPmzStx
TGXt008/1bldvYJ3gwYN0KxZs2L1FRISoixUpWvMBZXmmCtLAQEBsLe3R25uLj777DOt8vz8fCxY
sADA08WwCi46Bjzfe4cxxph+nAAzxtgLLC0tDQcPHsSIESPQr18/ZGVloXbt2tixY0ex+xBCKI9j
WbNmDd5//32kp6cDeHoJ64QJE3Do0CFYW1uXOL7GjRuDiPDdd98pKykX1xtvvIHffvsN1atXx3ff
fadx5tPQ5s6dC3Nzc1y6dAlDhgxRLmvNzs7G6tWrMXfuXOW+3JLIyclB9+7d0bFjR3z55Ze4ceOG
UpaXl4ezZ8/i1Vdfxfbt2yGEwPjx4zUWYerVqxeICAsWLMDu3buV+2SvXbuG/v3748yZM6V+PFNZ
sLa2xpEjRzB27FhlkafU1FTMnj0b4eHhEELggw8+KHZ/Y8eOhb+/P548eYIePXpg/fr1yrEKPL0M
PSIiAl27dsXKlSvLejglYm1tjXfffRdEhJUrV+KTTz5RFiKLj4/H8OHDcfLkSZiYmOCjjz7Sat+4
cWMAwKZNmzTuf2aMMfZ8OAFmjLEXxMmTJ5VHvXh4eMDGxgaOjo7o3bs3tm/fDgAYNmwYzp49Cw8P
jxL1PWbMGIwZMwYA8NFHH8HZ2RkuLi6oVq0avv76ayxZskS5F9bCwqLY/Y4ePRrm5uY4ceIEXF1d
UbNmTdSpU6dY9yf//PPPAJ6uat2yZUu9j72ZPn16seMp7eWkvr6+WLNmDYQQ2LNnD7y9veHi4gJ7
e3tMmzYNAwcOxCuvvAKgZPOjftROdHQ0pkyZgoYNGyorA1tYWKBt27bYtWsXhBAYMmSI1pn4mTNn
ol69ekhLS8OgQYNgZWUFR0dH+Pn54ciRI1izZo3GPcyG5ubmhs8++wzh4eHw8PCAi4sLXFxc8Nln
n0EIgalTp2LYsGHF7s/U1BS7d+9G586dkZycjAkTJsDJyQmurq6wtbVFjRo1EBoaipMnTyqXEJdE
WV9uPHPmTAQHB4OIMG/ePDg6OsLFxQW1atXCzp07YWJiglWrVul8vNa4ceNARFi2bBlsbW3h7e2N
OnXq4J133inTGBljrKrhBJgxxl4AQgjk5eUhMTERiYmJSEpKgpWVFerWrYuBAwfi448/xs2bN7F1
69ZCH30khNCbGKxfvx4bNmxAu3btlLOMPXr0wJ49ezB16lTlDK6jo2Ox+23YsCEOHz6M3r17w9HR
Effv38edO3e0nkmrr70QAk+ePFHGretH15llfWMsTVKkFhISguPHjytjycnJgZ+fH5YtW4YdO3Yg
NTUVgO750cfKygoJCQnYsGEDgoOD0axZM1hbWyMtLQ22trbw9fVFUFAQDhw4gB07dmidBXdycsLv
v/+O119/HbVq1YIQAtbW1hgyZAiOHz+OoKCgQsdd2PFQsE5R5YXVmTZtGn766Sd0794dRAQrKyt0
7NgRkZGRWLFiRYn36+rqiqioKERGRqJfv35wd3dHRkYGpJRo1KgRgoODsX37dsyZM6fQuEszlpK2
k1IiPDwcO3fuxMsvvwwnJydkZmaievXqGDVqFE6fPo2JEyfq7DMkJATr169H+/btYWZmhn/++Qd3
7twx2mJ0jDFWWQji1RUYY4wV4ebNm6hXrx4sLCyQnp5e6LNdqyovLy/8888/+OWXX9C1a1djh2NU
UVFR6NGjB7y9vXHz5k1jh8MYY4wp+AwwY4yxIqkvve3WrRsnvzps27YNd+/ehb29Pdq3b2/scBhj
jDGmByfAjDHGADy9D3jXrl1ISkpStt26dQuTJ0/GV199BSEE3n77bSNGaFyffvopVq1ahX/++Ue5
VzQlJQUrVqzAuHHjIITAlClTSnQPMGOMMcYMi7/GZ4wxBgA4dOgQNm7cCODpc2ellMoKu+qVonv1
6mXECI3rypUriIyMxLRp02Bubg4bGxukpKSAiCCEQK9evfD+++8bO0zGGGOMFYITYMYYYwCePgP4
xx9/RExMDO7fv4/Hjx+jRo0a6NixIyZPnoxu3boZO0SjmjJlChwcHHDixAkkJCQgJSUFLi4uaNas
GUaPHo3Ro0dDSr6wSq20C0oxxhhj5YkXwWKMMcYYY4wxViXwV9WMMcYYY4wxxqoEToAZY4wxxhhj
jFUJnAAzxhhjjDHGGKsSOAFmjDEdOnfuDCkltm7dqrM8Ojoa/fr1g6urK0xMTCClxCeffGLgKCsv
Dw8PSClx+vRpY4dS5rKzsyGlhImJCRITEw3engH+/v6QUmL79u3GDuWFsWvXLkgp0a5dO62yJk2a
QEqJffv2aWw/e/YspJRwd3c3VJiMMVYkToAZYwYTGhoKKSWaNGlS7DarV6+GlBLW1tZIS0srx+g0
FbaC7fXr19GzZ08cOHAAqampcHNzg4eHB2xtbcs1JnVSXvDHxMQEjo6OaN26NebMmYP4+PhyjcFQ
9M1/XFwcwsLCsHr1aiNEVf7Onj2LsLAwREZGGmX/6uS6uD85OTlGifN5lXSFanXCXNIfKyurchxF
xcErfjPGXiT8GCTGmMEEBwcjIiICV69exblz59CqVasi22zevBlCCAwaNAj29vYGiPIpLy8vPHr0
CA4ODlpla9euxZMnT9CzZ0/88MMP5Z74qqn/yLSyslLmQqVS4dGjRzh//jxiYmKwdu1a7N+/H/7+
/gaJydBiY2MRFhYGX19fTJkyxdjhlIqUEg0bNoSUEqammv8NnzlzBmFhYejduzdGjRplpAifHmsO
Dg6wtLQstE5VSXrUX3I9KyMjA5mZmTAxMYGrq6tWeVVJgPWxsrKCr68vnJ2djR0KY4wpOAFmjBlM
9+7d4eXlhTt37mDTpk1FJsB//fUXTp8+DSEEgoKCDBTlU1u2bNFbdvnyZQgh8Nprrxks+S1o5MiR
WLdunfLv7OxsfP/993jjjTeQlJSEkSNH4q+//tJKrljFYGZmhqtXrxo7jCKtWbMGr732mrHDqBB+
+uknndvnzp2LRYsWoV69erhy5YqBo6r4/Pz8eF4YYxUOXwLNGDOo0aNHg4jwzTffID8/v9C6ERER
AJ7eD/ryyy8bIrxiefLkCQAYJfnVxcLCAsOHD8fSpUtBRLh9+zaOHz9u7LDKRWV/dH1lHx9jjDFm
bJwAM8YMSn0m98GDB9i/f3+hdSMjIyGEQGBgoM5LLTMyMvDRRx+hbdu2cHR0hLW1NRo2bIi33npL
772wBRe3SklJwaxZs+Dr6wtra2u4ubnprKdWs2ZNSClx4sQJEBECAwOVe/0aNGiAmzdvKvfl/vXX
X3rHlZ6eDmtra0gpcezYsULnoCQCAgKU3ws765KTk4MVK1agc+fOcHZ2hqWlJXx8fDBhwgTExsbq
bbdr1y706dMH1apVg7m5OVxdXdGoUSMEBgZi165dGnV//vlnSCnh5+ent7+1a9dCSom+ffsWa3we
Hh7o27cvhBC4du2a1v2WBRc0ysnJwdKlS9GhQwc4OTnBwsICnp6eaNmyJd5880388ccfxdonAHTq
1AlSSoSHh2uVffzxx8r+dZ0lnD59OqSUmDx5srJN1yJW6m3qegcOHNAan74Fwf7++2+EhoaiZs2a
sLS0RL169TBnzhxkZmYWe4zPS6VSoX379pBS6v2yKjExEe7u7pBSYs6cOTrr/Pnnnxg/fjzq168P
GxsbODs7o3nz5pgxYwYuXLigt9933nkHTZs2hZ2dHWxtbdGsWTPMnz8fqampZTbG52VrawspJa5c
uYKbN29i7Nix8Pb2hoWFhcZ7V+3gwYMYMmQIqlevDgsLC7i5uaFv3756z0Y/u0jVwYMH0atXLzg7
O8PW1hbt2rVTvlTU58GDB5gyZQpq164NKysr1KlTB9OmTcOjR49KNebCFsHq378/pJT4v//7P+Tm
5uKTTz6Bn58frK2t4e7ujv/+979FXimRkpKC999/Hy1btoS9vT1sbGzg5+eH2bNn48GDB6WKmTFW
BRBjjBlYp06dSEpJr732mt46v/zyCwkhSEpJf/75p1b5n3/+SbVq1VLqmJubk52dHUkpSQhBLi4u
FB0drdWuc+fOJKWkzz//nLy9vUlKSdbW1uTg4EDu7u5a9SIjI5VtrVu3Jk9PT7KwsCApJTk5OZGn
pyd5enpSp06diIioZ8+eJKWkOXPm6B3bmjVrSAhB9erVK9Z8PRvT+PHjdZYnJCQo87F8+XKdde7e
vUuNGzdW6pmZmZG9vb0ybzY2NrRnzx6tdjNmzFDaSCnJwcGBrK2tlX/XqVNHo/6BAwdICEGNGjXS
Ox71PPTp00erzMPDg6SUGq9h8+bNycXFhYQQZGZmpsy9p6cnVa9enXbv3k1ERDk5OdShQwclXhMT
E3J2diYzMzMl3tDQUL1xPevdd98lKSWFhIRolQUEBCj7mTlzplZ5q1atSEpJ3377rbItKytLaXP/
/n0iIsrOziZPT09ydHQkIQRZWlpqjS8mJkar/U8//UROTk4kpSRHR0dljEII6tKlC+Xn5xd7nM/2
XTDm4vjrr7/IxsaGpJS0cuVKrfL+/fuTEIJatmxJubm5WuWfffYZmZiYKK+RnZ2dMjYppc7j5OjR
o+Tg4KDUsbKyUo5LIQT5+PjQzZs3tdr5+/uXaozPmjNnTpHHuZqtrS1JKWnjxo3Ke87e3p5sbGwo
ICBAqadSqWj8+PEa7zdHR0fldyEETZ06Vav/nTt3khCC2rZtS0uXLtU49gu2/fjjj3XGFxsbS9Wr
V1f2a2trq7yeXl5etGrVKqX/ZzVp0oSklLR3716N7WfOnCEhBLm5uWm16d+/P0kpafHixdShQwfl
9VPPkxCCHB0d6cqVKzrj/eOPP8jd3V2J19LSUolXCEGenp506dKlQl8TxljVxAkwY8zg1q1bR0II
srKyotTUVJ11QkJCSAhBrVu31ipLTk6mWrVqkZSSRo0aRX/++afyh/7Nmzdp5MiRJISgGjVqUHp6
ukbbzp07kxCC7OzsqE6dOnT48GGlLC4uTqPeswlwccq2bt2q7Ftf8tG+fXuSUtJHH32ks1yfohLg
jRs3Kn8MqpPBgrKzs6l58+YkpaS+ffvSmTNnKC8vj4iI4uPj6Y033iAhBDk4ONDdu3eVdtevX1f+
gP7www8pKSlJKXvw4AHt2LGDJk2apLGv8kiAi9uv+viyt7en7du3U05ODhER5efn0+3bt+mLL76g
zz//XG/7Z6n3+WySn5eXR7a2tkoC9mxikJqaqiR0CQkJynZdCbBaYXOiq72TkxP17duXrl+/TkRP
X+N169aRubk5SSkpPDy82ON8tu/SJIfq+K2trenq1avK9rVr1yrv+cuXL2u127Rpk7LfwMBA+uuv
v5SyR48e0ebNm7W+VIqNjSU7OzsyMTGh6dOn099//62UXbx4kV566SW9nyHGTIDt7OyoU6dOdOHC
BaWs4GfPvHnzlOR927ZtlJGRQUREjx8/pvDwcHJxcSEpJa1fv16jf3UC7ODgQGZmZvT+++9TcnIy
ET2dw+DgYBJCkIWFBcXHx2u0ValU1Lp1axJCkJeXFx09elQpO3jwINWsWVP5cqYsE2AhBDk5OVHN
mjVp7969pFKpiIjo9OnTVLduXZJSUr9+/bTaJiQkkJubm/J5qD7+iZ5+Xg0aNEj5klH9/meMMTVO
gBljBpfvRX22AAAgAElEQVSSkkJWVlYkpaSvvvpKq/zx48fKGZIvvvhCq3z27NkkhKDg4GC9++jV
qxdJKWnFihUa29UJsKWlpcYfTc8qbQKcnZ2t/IG6b98+rfIrV66QEIJMTU01kszi0JcAZ2VlUWRk
JDk7O5MQgjw8PCg7O1ur/RdffEFCCHr55Zf1JuchISEkpaRZs2Yp29TJSatWrYodqzET4DFjxpCU
kmbMmFHseAuTnp5OpqamJKWkO3fuKNt///13EkLQ0KFDqVGjRmRmZqbxhcuePXtICEENGjTQ6K8s
E+A2bdooX2IUpD6DqCt5KEzBvh0dHcnDw0Pvz+uvv66zD3Vi06pVK8rNzaXY2Fgl+Vu2bJnOfVar
Vo2klDRu3Lhixzp06FCSUtKCBQv0jqVRo0Y6EzNjJsC1atXS+mJO7Z9//iEzMzNycnLSSOgL2rt3
r84vZNQJsJSS3nvvPa12eXl5VLt2bZJS0pdffqlR9v333ytXVly8eFGr7dmzZ5Uvc8o6AbawsNB5
lvfnn39WPivVXwKoTZw4kaSU9Oabb2q1I3qa0Ktf44iICJ11GGNVF98DzBgzOAcHBwwcOBBEhE2b
NmmVf//990hPT4epqSlGjBihVb5p0yYIIfD222/r3cfIkSNBRDh06JBWmRAC/fv3R4MGDZ5vIDqY
m5sjMDAQRIQNGzZolau3vfTSS6hZs2aJ+yciREZGwtPTE56ennB3d4e1tTUCAwORkpICGxsbREZG
wtzcXKttREQEhBB488039T6+ZtSoUVrzpn7kUnJyMrKzs0scs6HZ29uDiJCQkFAm/dna2qJly5YA
gKioKGV7VFQUhBDo3r07unbtCpVKhRMnTmiUA0C3bt3KJA5dZs6cCRMTE63tgwYNAvD0ntrSSktL
Q2Jiot4ffffXfv3113Bzc8P58+cxd+5cBAUFITMzEz179sRbb72lVf/AgQNITEyEubk5Fi5cWOzY
fvzxR5iammLatGk661hYWGDIkCEAoPNzwFgmTpyodwG9LVu2IC8vD4MHD4a3t7fOOn379oWDgwNu
376NuLg4rXITExPMnDlT5/Z+/fqBiLSOi507dwIA+vTpg6ZNm2q1bdWqFXr37l0ui7T1798fjRo1
0toeEBAAS0tL5Ofna9wLrFKplLUZ9P0fIKXEsGHD9P4fwBir2vgZGYwxowgJCcG3336LkydP4tat
Wxp/7KkT3L59+8LFxUWj3a1bt3Dv3j0IIXQuHKOWk5MDALh7967O8g4dOjz/IPQYN24cVq5ciZ9+
+glJSUnKMzBVKhW2bNkCIQTGjh1b6v6zsrKQlZWlsU0IAR8fHxw+fBheXl5abbKzs3H+/HkAT5/H
rCtpUscIaM5bp06d4ODggFu3bqFTp054/fXX0atXL9SuXbvUYyhPffv2xYoVK/DNN98gOzsbQUFB
6NatGxwdHUvdZ7du3XDmzBlERUUhMDAQAJQFzLp16wYXFxesW7cOUVFR6N27N4D/lyCXZwLcpk0b
ndtr1KgB4OmXFqX1zTff4NVXXy1xO3d3d3z11VcYNGgQPv/8cwCAo6MjNm7cqLP+77//DuDpWJ59
v+tz+vRp5Vht2LCh3npPnjwBEen9HDCGwj57Tp06BQDYsWMH9u3bp7deRkYGgKfv07p162qU1a1b
V++xru+4OHfuXJHHardu3QqNqTSEEGjbtq3e8ho1auDmzZsa8V66dAkZGRkQQigLfumi/oysSK89
Y6xi4DPAjDGj6NWrFzw9PQEAmzdvVrbfu3cPR44cAQCdz/4teFavsLNTKSkpEEIojyx6VsEVn8ta
kyZN0K5dO+Tm5iIyMlLZvnfvXty/fx/Ozs4YOHBgqfpWJ88qlQoqlQoZGRk4efIkunXrhri4OEyc
OFFJDApKTExUtj969EjvvD169Ehr3lxdXREREQEHBwfExMRg/Pjx8Pb2Rs2aNTFmzBicPHmyVGMp
L7169cK8efNgamqKH374AYMHD4aLiwuaNGmC2bNn4++//y5xn+rEQH1WNz8/H7/99hucnZ3RtGlT
rfKMjAycO3cOANC1a9eyGJZOdnZ2OrdbWloCAHJzc0vd9/Oc7RswYAAGDx4M4Okxu3TpUiX5etb9
+/cBoERfqKg/B/Lz8wv9HEhPTy/0c8AYCvvsUY8rMzOz0HHl5+dDCIHHjx9r9aHvmAD0HxfqFZOr
V6+ut62+1+95lTTe4v4fkJaWVuFee8ZYxcAJMGPMKKSUyqXCBRPgzZs3Q6VSwdnZGa+88opWu4LP
Ds7IyFASQX0/169f17l/fWdAy8r48eNBRBqPztmwYQOEEBg1ahTMzMzKZD9WVlbw9/fHvn374Ofn
h0OHDmH+/Pla9QrO27Vr14qct2cfoTNgwADcunULa9aswauvvgpPT08kJCRg48aN6NKli85LW43p
ww8/xPXr1/Hxxx8jICAAtra2uHr1Kj777DM0atRI45FJxaF+LFZcXBwSEhIQExODtLQ0dOnSBcDT
xKFu3bo4e/YsHj9+jJMnT0KlUsHLywu1atUqjyFWaLdv38aRI0eUS+0LXhpeFtTHc7Vq1Yo8llUq
Ffbu3Vum+38ehX32qBPbBQsWFDmmvLy8Yj9CrDJRv/ampqbFeu2jo6ONHDFjrKLhBJgxZjTBwcEA
gLi4OOUySPUlwiNGjICpqfZdGtWqVVN+v337tmECLYXhw4fD1tYWFy5cwIULF/DgwQPl8sHQ0NAy
35+lpSWWLVsGIsLnn3+uNTdubm5KMlLaeXNwcMD48ePx7bff4t9//8XFixeVsXzxxRf45ZdflLrq
1+7ZS7ULKu9ntNapUwdz5szBgQMHkJycjMOHD6NTp07IycnBxIkTS7R/R0dH5d7IqKgo5Uxv9+7d
lTrdunVDXl4efvvtN4Pc/1tRERGCg4ORnp6ORo0aQQiB8PBw7N69W2d99Xu6JMelus3Dhw8LPcZe
NOpx3blzx6D7VZ+V1vf89KLKDEk9R3l5ebh3756Ro2GMvYg4AWaMGY2fnx9at24N4Ol9v+fPn8el
S5cA6L78GQDq1asHV1dXAMD+/fsNE2gp2NjYYNiwYQCenvndvHkz8vLy0LJlSzRv3rxc9tmrVy+0
a9cOOTk5CAsL0yiztrZGs2bNAJTdvDVu3Bhff/21zgWi1PcgFvZH8x9//FHifUr59L+tkl6eK6VE
jx49sHv3bkgpkZaWhpiYmBL1ob6UWZ0AP3vPZNeuXUFEOHbsWKnv/y3t+CqSJUuW4Pjx47Czs8Oe
PXvwzjvvgIgwYcIE5VLbgvz9/QEAZ86cwcOHD4u1j/bt20MIAZVKVakWOerQoQOICAcPHjToflu1
agUiwvHjx/XWKfj+NqbmzZvDysoKQMX+P4AxVnFxAswYM6rg4GAQEbZv346vvvoKAODr66t3cZ+C
bRYvXqzcP6hPWlpamcZbEurLoLdu3Yr169c/9+JXxTFr1ixlpehnzyKFhISAiLBu3Tpcu3at0H5S
UlKU34u6j1R9n17BFaIbNWoEU1NT5Obm6vwj9cqVK9i9e7fe1aj1Ua9IXTC+ZxUWr7m5uZJklnRF
627duoGIcPToUfz6669wdHTU+DJDnezu27cPZ86c0dhWXMUZX0V28eJF/O9//4MQAsuXL4e3tzfC
wsLQokULPHjwAOPGjdNq07t3b7i7uyM3Nxdz5swp1n6cnJwwYMAAEBHmzZtX6H2eeXl5Ou+VrYgC
AwNhamqK27dvY9myZYXWLctjRL3Y2b59+3D58mWt8vPnz+PAgQMlfr+WBzMzM2WV/wULFhQ6D/n5
+UhPTzdgdIyxFwEnwIwxoxoxYgTMzMyQnJyMtWvXQgiBkJCQQtu8++678Pb2xv379+Hv749du3Zp
XAb5999/Y/Xq1WjWrBn27NlTziPQr127dmjatCmSkpJw7do1WFhYYOTIkeW6zyFDhqBu3brIy8vT
eqTMpEmT0KpVK2RmZqJr167YuHGjsposAPz7778IDw9Hx44dsW7dOmX78uXL0a9fP3z77bdITExU
tqekpOCDDz5QVq59+eWXlTJra2v07dsXRISpU6cq9+Hl5+dj//796N27d6GL3+jj6+sLExMTJCYm
6l2RdtiwYZgwYQKOHDmicS9zXFwcRo8ejby8PNja2qJjx44l2rf6DHBsbCxSU1OV+3/VvLy8ULt2
bVy4cAG5ubmoUaMG6tSpU6J9NG7cGABw4cKFEp+hNracnBwEBgYiNzcXgwYNUt7HZmZm2LJlC8zN
zbFnzx6sX79eo52FhQUWL16sPDosMDAQN27cUMofPXqENWvWaD3aZ8mSJXBwcMClS5fQqVMnHD58
WGMBuKtXr2LhwoWoX7/+cz0OypC8vLzw7rvvgogwc+ZMzJgxQ+PS8IyMDBw6dAhBQUFlev/vK6+8
gtatWyMvLw/9+vXTuJ3h0KFDGDBgAOzs7CrMlQkLFiyAh4cHbt26hQ4dOmDPnj3Kyv/A0/fosmXL
4Ofnh19//dWIkTLGKqRyf9IwY4wVYfDgwSSEICEEmZqa0r///ltkm9jYWGrUqBFJKZV2rq6uZGlp
qfQlpaStW7dqtOvcuTNJKSkyMrLQ/gurV9w+iIhWrFihxDJy5Mgi6xcVkxCCxo8fX2i9L7/8koQQ
ZGlpSfHx8Rpl9+7dI39/f2XeTExMyMXFhaytrTXmbfHixUqbhQsXKmVCCLKzsyNHR0eN+tOnT9eK
4/r16+Ts7Kzsy8bGhqysrEgIQf7+/rR06VISQlCfPn202np4eJCUkqKjo7XKhg0bpvTp5ORE3t7e
5O3tTXv27CEiot69eyvlUkpycnIiGxsbJV5zc3PauXNnseb8WX5+fkq/y5Yt0yoPCgpS9hMYGKiz
j6ysLKWP+/fva5W3b99eid/V1VUZ3/nz54vVnojo2rVrJIQgKyurEo1P3bcQghwdHcnDw6PQn5iY
GKXtjBkzSAhBnp6e9PDhQ62+1a+3nZ0dxcXFaZUvXLiQTExMlLE/e5zpOk5OnTpFnp6eShtzc3Ny
dXUlc3NzjePz9OnTGu3U74Fvv/22RPPzrDlz5pAQgho1alRkXVtbW5JS0uXLlwutl5+fT7NmzVLG
JIQge3t7cnR01Diu27dvr9Fu586dJISgtm3b6u17yZIlJISgV199VassLi6OatasqezD1tZWed/U
qVOHVq9erbf/Jk2akJSS9u7dq7H9zJkzJIQgNzc3rTb9+/cnKSWtXr1ab7z6+iUiunTpEtWpU0eJ
18zMjFxdXcnCwkLjtd+3b5/e/hljVROfAWaMGV1wcDCEEBBC4D//+U+hj+JQq1u3Ls6fP49Vq1ah
R48ecHJyQlpaGiwsLNCiRQtMmjQJ+/fvx4gRIwwwAv2GDBmi/F4Wi1+p56kwoaGhcHNzQ05ODhYv
XqxRVq1aNfz222+IiIhAnz594ObmhvT0dJiYmKBx48YIDQ3Frl27MH36dI3+1q1bh9deew1+fn4w
NTXFkydPUKNGDQwZMgT79u3D0qVLteJo0KABfv/9d7z22mtwc3NDfn4+vLy8EBYWhqioKFhbWxc6
Hn3bw8PDMWvWLDRs2BBZWVm4c+cO7t69q5ztXbp0KRYuXIiXX34ZPj4+yM3NRX5+PurXr48JEybg
/PnzGDp0aKFzqE+3bt2UmHVd3qwul1IW+fgjfePbs2cPJk6ciDp16iA9PV0Z37OXbBfnctTSXLKq
Hl9aWlqhj5l58OCBctYtKioKy5cvh5QS69ev1/k83+nTp6NHjx7IzMxEUFCQ1tnE2bNn48yZMwgK
CoK3tzdyc3NhamqK5s2bY+bMmVrHMvD0/mH1at8dOnSAra0tUlNTYWtri3bt2mH69Ok4ceJEoc+a
fV7FeU8WrFucOosXL0Z0dDRCQ0Ph4+ODvLw8ZGVloVatWujfvz9Wrlyp8+qW4sSir46Pjw9iYmLw
+uuvo2bNmsjLy4ObmxumTZuGM2fOoFq1aqV6v5ZkfkrSb5MmTXD58mUsXboUXbt2hYODA1JTU2Ft
bY3WrVtj6tSpOHLkCPr06VPqfTPGKidBz/4PxAxmy5YtykI/69evx5gxY5Sy27dvF3rp3PDhw7F1
69Zyj5Ex9nwiIiIQGhoKLy+vUj1/ljHGGGOMlR3tZ4wwg7h79y7eeOMN2NnZadyD96wWLVpg0KBB
WtubNGlSnuExxsrImjVrIITQufgPY4wxxhgzLE6AjSQ0NBSurq4YMmQIlixZordeixYt8P777xsw
MsZYWVm7di2io6NhbW2NCRMmGDscxhhjjLEqjxNgI1ixYgWOHTuGY8eO4ciRI8YOhzFWhu7cuYOu
XbsiIyMDSUlJEEJg7ty5cHNzM3ZojDHGGGNVHifABnb16lXMnTsXb731Fjp37lxkAhwfH49169bh
0aNHcHFxQYcOHdC0aVMDRcsYK6nc3FzcvXsXJiYmqFu3LiZOnKj1+BbGGGOMMWYcnAAbkEqlwujR
o+Ht7Y2PP/64WG0OHTqEQ4cOKf8mInTv3h0RERGoVatWeYXKGCulunXrajyLlDHGGGOMVRz8GCQD
CgsLw4ULF7Bx40ZYWFgUWtfa2hrvv/8+zp49i+TkZCQnJyMqKgo9e/bEsWPH8NJLL+HJkycGipwx
xhhjjDHGXnx8BthAoqOj8emnn2LmzJlo165dkfXd3NzwwQcfaGzr3Lkzfv75Z3Tu3BmnT5/G+vXr
8cYbb+jtw8PDA5mZmahdu/bzhs8YY4wxxhh7Tnfu3IGNjQ3u3btn7FCqLJMPns2yWJlTqVR46aWX
4O7ujm+//RYmJiZK2bFjx3D8+HEMGDAALVu2LLIvKSWEENi9ezdsbGzw2muv6a0bFhaGzMxMPHjw
ACkpKUhOTkZSUpLGT0ZGBhwcHDTa5eTkKM8rtbKy0ihLSkrCv//+C1tbW41xAE/f0KmpqVW+v8TE
RCQkJFTY+Cpjf0lJSXjw4EGFja8y9peUlIQnT55U2PgqY39JSUkwMTGpsPFVxv6SkpJgZWVVYeOr
jP2p57yixlcZ+1PPeUWN70XtT92H+m+UxMREpKSkIDs7G0+ePOGnvBgRnwE2gIyMDNy4cQNCCJ2X
PqufETpu3Di89dZbWLp0aaH9qVeTzczMLLRe7dq1cf36deTl5ZU+eFZinp6eyM7ONnYYVYqnpyfS
09ONHUaV4unpqXzRwwzD09MTN27cMHYYVYqnpycuX75s7DCqFJ5zw+M5N6zGjRsjNjbW2GFUaZwA
G4CFhQXGjRuns+zcuXOIiYlBly5d0LBhQ3To0KHI/k6dOgUA8PHxKbIuL8ZjeElJScYOocrhOTc8
nnPD4zk3PJ5zw+M5Nzyec1bVcAJsAJaWlli3bp3OsrCwMMTExCA4OBhjxoxRtsfExKBFixYQQmjU
P3LkCJYvXw4hBAIDA4vct5mZ2fMFz0qMV+c2PJ5zw+M5Nzyec8PjOTc8nnPD4zlnVQ0nwBUAEWlt
mzFjBm7cuIGOHTuiZs2aAICLFy/i6NGjEELgo48+gr+/f5F9P5tAs/Jnbm5u7BCqHJ5zw+M5Nzye
c8PjOTc8nnPD4zlnVQ0nwBWAriQ1KCgI33//Pc6cOYMDBw4gNzcX1apVw/DhwzFlyhR06tTJCJEy
xhhjjDHG2IuLE2Ajmz9/PubPn6+1PTQ0FKGhoUaIiDHGGGOMMcYqJ2nsAFj5cnZ2NnYIVc6UKVOM
HUKVw3NueDznhsdzbng854bHc254POeGx3+fGxcnwJVcRkaGsUOocg4cOGDsEKocnnPD4zk3PJ5z
w+M5Nzyec8PjOTc8ToCNixNgxhhjjDHGGGNVgiBdSxCzSqFx48YAwA83Z4wxxhhjrALgv8+NjxfB
YooBAwYgLi7O2GEwxhgrJ3Xr1sXu3buNHQZjjDFmNHwJNFPExcUhNjbW2GEwxhgrB7GxsfwlJ2OM
sSqPzwAzDfXq1eNLMhhjrBJSX3bHGGOMVWV8BriSy8nJMXYIjDHG2AuJr4oyPJ5zw+M5Z1UNJ8CV
3J07d4wdAmOMMfZCGjBggLFDqHJ4zg2P55xVNZwAV3L8nDHGGGOsdKZMmWLsEKocnnPD4zlnVQ0n
wJUcJ8CMMcZY6XBiYHg854bHc86qGk6AGWOMMcYYY4xVCZwAM8YYY4wxxhirEjgBZowxxhhjjDFW
JXACXMklJSUZOwTGGGPshbR69Wpjh1Dl8JwbHs85q2o4Aa7kOAFmjDHGSocTA8PjOTc8nnNW1XAC
XMnVrl3b2CGwF4hKpcKBAwcwbdo0tG3bFo6OjrCwsED16tUxcOBA/Pjjj8YOscTu37+PN998E/Xq
1YOVlRU8PDwwYMAAHD16tNR93r17FytWrMCAAQPg5eUFS0tL2Nvbo0WLFpg7dy7u3btXhiOo2M6e
PYvhw4ejRo0asLKygpeXF8aPH4+4uDij9F2RXpu4uDh4eHhASgl7e3ucO3eu2G1//fVXvPPOO+jY
sSNcXFxgbm6OatWqISAgABERESCicoycqe3evdvYIVQ5POeGx3POqhxilZafnx/5+fmVW31W+Ywb
N46EECSlJCklWVhYkIODg/JvIQS9+uqrlJeXZ+xQi+XChQvk4uKijMnR0ZFMTU2V8SxcuLDEfd69
e5eEEBrz5OjoSGZmZsocOTs70y+//FL2A6pgNm7cqIzbxMSEnJyclDmwtbV9rjkoTd8V6bVJSEgg
Hx8fjfdOtWrVKDY2tsi2H3/8scYYzMzMlPGr++ratSulp6eXKCb+jGeMMePjz2Lj4zPAjDFFXl4e
atSogfnz5yMmJgZZWVlISUnBv//+iylTpkAIgV27duG9994r0/2GhIRASolNmzaVWZ9ZWVkYMGAA
kpOT0bp1a1y+fBnJyclITk7G22+/DSLCe++9h8OHD5eoX5VKBSklXnnlFezcuRNJSUlITk7G48eP
sW/fPvj4+CA5ORmDBw9GYmJimY3neXh7e0NKiTt37pRZn5cuXcKECROgUqkQGBiI+/fvIykpCbdu
3UJAQAAyMzMxdOhQPHr0yGB9V5TXJj09HX369MHff/8NT09P/Pzzz/D19UViYiICAgJw//79Qtvn
5ubCxcUFM2bMwKlTp5CVlYWkpCQ8evQI8+fPh6mpKU6cOIFx48aV2xgYY4yxSsvYGTgrP3wGmJXU
H3/8QTk5OXrLQ0NDlTNwWVlZZbbfkJAQklJSREREmfW5bNkyEkKQvb09JSQkaJUPHjyYhBDUpk2b
EvWbmppKFy9e1Ft+7do1srKyIiklffjhhyWOuzx4e3uTlJJu375dZn0OHDiQhBDUvn17ys/P1yjL
yMig2rVrk5SSZs6cabC+K8Jrk52dTT169CAhBHl5eVFcXBwREd2/f5+aN29OQghq0aIFpaWl6e3j
4sWLhZ7dDQsLU84Q37lzp9ix8Wc8Y4wZH38WGx+fAWbsOd2+fRtSSkgpceXKFb31MjMzYWtrCyll
ic86GkqbNm1gZmamtzwkJAQA8PjxY1y9etVAUZXO1q1bIYTAqFGj4OHhoVU+a9YsAMC5c+dw48aN
Yvdrb2+Ppk2b6i1v2LAh/P39ATy9h7WggwcPwsTEBCYmJnqPgU8//RRSSjg5OZXpGduylJqaiv37
90MIgRkzZkAIoVFuY2ODSZMmgYiwbds2g/Vt7NeGiDBy5EgcO3YMdevWxfHjx+Hj4wMAcHd3xy+/
/ILWrVvj4sWLGDBgAHJycnTup2nTprC1tdU7DvX7UNc4GGOMMVY4ToAruYr6B3Rl4uXlhV69ekEI
gfDwcL31vvnmGzx+/Bje3t546aWXDBhh2XFxcVF+V6lURoykcBkZGUpiEBAQoLOOv78/HBwcAABH
jhwp0/27uLiAiLTmKCAgAFOnTgURITQ0FCkpKRrlMTEx+OCDDyCEwMqVKyvsInYnTpxAbm4uAKBX
r14667z88ssAgISEBFy7dq1C9A2U72szefJkfPfdd/D19cXx48e16jg5OeHo0aPw9/fH8ePHMWLE
COTn55cofvUY1Cry+7AyeOWVV4wdQpXDc254POesquEEmLEyMG7cOBARtmzZovcP2vDwcAghEBwc
bODoys6xY8cAAGZmZmjQoIFxgynE1atXlVVyGzdurLOOEAINGzYEgELP3JeUSqXCyZMnIYRAkyZN
tMoXLVoEX19fxMfHY9KkScr27OxsBAYGIi8vD0OHDsXo0aPLLKaypp4vDw8PODk56azj5+enVd/Y
fZfnaxMWFoZ169ahadOmOHbsGDw9PXXGYGdnh0OHDqFbt2744YcfMHny5GLHr6Z+HwLQOQ7GGGOM
6ccJcCVnyDNIQlTMH0MYNGgQXF1dkZiYiL1792qV37hxA7/99huEEBqXLxaHeoGo0vyU5aJSmZmZ
WLRoEYQQGDp0KOzs7Mqs77KWkJCg/F69enW99apXrw4i0qj/vFatWoV79+5BSqnzyw5LS0tERkbC
1NQUO3bsQGRkJABg9uzZuHr1Kjw9PbF27doyi6c8qOersLm1tLSEo6OjRn1j912er838+fOhUqlw
4cIFuLu7FxqHtbU1jh49CpVKhTVr1hQ7fuDpZdbz588HAHTo0EH5EoeVj59++snYIVQ5POeGx3PO
qhpTYwfAWGVgZmaGoKAgLF26FBs2bNC6nGjDhg0AgB49esDLy6tEfTs6Ouq8h7UoQghYWVmVuJ0+
EydOxD///AMHBwd8+umnZdZvecjMzFR+L2wOrK2tATy9ZLosXLx4Ee+++y6EEHjjjTfg6+urs17L
li0RFhaGd999F1OnTkVOTg6++OILCCGwYcMGvWc+Kwr1/BZ1fFlbWyM1NbVE81tefVeW12bevHk4
d+4czMzMsGLFCqPGwhhjjL2IOAFmrIyMGzcOS5cuxb59+/DgwQO4ubkBAPLz87F582YIITB27NgS
97t8+XIsX768rMMtkYULF2Lr1q2QUmL9+vWlurLgn3/+QZs2bbQWNQKeLnwEANOmTcPs2bO1ymvV
qn5B6LQAACAASURBVIXTp0+XPHADSkhIwKBBg5CVlYU2bdpg4cKFhdafPXs29u3bh5MnT2Ls2LEQ
QmDy5Ml671kuyvbt2zFt2jSd8/vgwQMATxc5MzEx0SofPnw4li1bVqr9vgiM/dqUlW3btmHhwoUQ
QmDhwoVo3bq1UeNhjDHGXkScADNWRnx9fdGxY0ecOnUKW7ZswfTp0wEABw4cQHx8PBwdHTF48GAj
R1lya9euVc6cLV26FEOHDi1VPyqVSknEdCEipKenIz09XatMfaa2uGxsbJTfnzx5ovHvgh4/fgwA
ha64WxzJyckICAjArVu30LBhQ+zZswfm5uaFthFCYP369fD19YUQAt7e3vjss89KHcOTJ0+KnF99
z+RNS0sr0b7U8/nkyZNC65Vmfsu674rw2pSFvXv3KrdPvPnmm8rnC2OMMcZKhu8BruT0PWaDlY/x
48eDiDRWg96wYQOEEBg5ciQsLCyMGF3Jbd68GVOmTIEQAmFhYZg2bVqp+/Ly8oJKpdL5ExQUBCEE
Nm7cqLM8Li6uRPsqeP9ofHy83nrx8fEQQuhdsKg40tLSEBAQgMuXL8Pb2xuHDx9Wzv4XRX1pvPo+
5JKOs6Dg4GC981u7dm0IIXDr1i2d5V9//XWJ9qWe38LmNisrS1lJuSTzW5Z9V5TX5nkdOXIEr776
KvLy8jBmzBgsXbrUaLFUNbGxscYOocrhOTc8nnNW1XACXMkZ8jFIRBXzx5Bee+012Nvb4/Llyzh7
9iwePXqEPXv2AABCQ0NL1eebb74JT0/PUv3s2LGj1GPZsWMHxowZAyLCzJkzMW/evFL3VVxURi+Y
+swdAFy+fFnvvq5fvw5Ac1Xhknj8+DH69OmDs2fPonr16jh8+DBq1KhRrLa//vorlixZAiEEmjZt
iqysLGWl4fJSVvOrnq979+4hOTlZZ52CqzOXZH7Lqu8X7bXR58SJExg4cCCys7MxbNgwrFu3zuAx
VGUDBgwwdghVDs+54fGcs6qGE+BKztnZ2dghVClWVlYYMWIEgKdnkCIjI5GTk4MmTZqU+n69tLQ0
JCYmlvjnwYMHRV5Gqs9PP/2EwMBA5Ofn4/XXX8eiRYtK1Y+x2Nraok2bNgCAQ4cO6awTHR2t3Hv8
n//8p8T7yMrKQv/+/XHq1Cm4ubnh8OHD8PHxKVbb9PR0BAUFgYgwduxYHD16FO7u7rh48aJBvmh4
Xp07d4aZmRkA4PDhwzrrHDx4EMDTM7r6Fpwqr74ry2tz+vRp9O/fH0+ePMGAAQOUtQSY4UyZMsXY
IVQ5POeGx3POqhxilZafnx/5+fmVW32m29mzZ0kIQc7OztSkSROSUtLKlSuNHVaxHTx4kCwtLUlK
SWPGjDHIPkNCQkhKSREREWXW5/Lly0kIQQ4ODnTv3j2t8iFDhpAQgtq1a1fivnNycqh3794khCAX
Fxc6f/58idoHBweTEILq1atHGRkZRES0e/duEkKQqakp/frrryWOqTDe3t4kpaTbt2+XWZ+DBg0i
IQT5+/trlWVmZpKXlxdJKemdd94xaN8v2mujz/nz58nZ2ZmklNS7d2/Kycl57j75M54xxoyPP4uN
jxPgSowTYONp2bIlSSlJCEGWlpb06NEjY4dULCdOnCAbGxuSUtLIkSMNtt/ySICfPHlC3t7eJISg
1q1b05UrV4iIKD09nWbNmkVCCJJS0uHDh7Xa3rp1i4QQJITQikmlUtHQoUOV5Do6OrpEce3atUtJ
pk6dOqVRNm7cOBJCkI+PD6Wnp5dwxP8fe/cep1O5/3/8fY/DMMphHEppcpjS755E2jqgwpYOzKAc
UowOKE3aUm0pSqLSQbRN7RAdJIUpw5acSYMOO7Sb/bXdVEMRNSUTZhjr98e9Z3bc9zCYudY963o9
H4957O2+1qz1mXe3sT73Wuu6ilYaDfCGDRucihUrOlFRUU5ycrLz008/OY7jON99953ToUOHwga0
4PU/ev311wvzD1fTye67LP63CWfTpk3OGWec4URFRTlt27Z1Dhw4UCL75Xc8ALiP38Xu4xZooBT0
69dPjuPI5/MpKSmpzNyKPmLECO3bt0+O42jx4sWl9nyxCZUqVdLcuXNVq1Ytffnll0pISFD16tVV
vXp1Pf/884qKitIzzzxzzNufw91u+sknnygtLU1ScJK5zp07F5nRpZdeesT37ty5UwMGDJDP59PQ
oUN1+eWXHzE+fvx4NWzYUN98840GDRpUAimUnosuukiTJ09W+fLlNX36dNWpU0c1atRQ/fr1tXjx
Yp122mmaM2eOatasaWzfXvlvM3bsWO3atUuO42jDhg2qX79+kT8HE2IBAHBiWAYJKAU33nij7r33
XkknP/mVGwqadklFLpkjBRvDk32+2KSLLrpI//rXv/T0009r/vz5+v7771WrVi1ddtlluv/++9Wm
TZsiv9fn84VtgA8fPlz4em5urnbt2lXkPo5evunOO+/UL7/8oosvvlgjR44M2b5KlSp68803dfXV
V+vNN99U586d1aVLl+L9sMdRGs+OJicn68ILL9Rzzz2nVatWKTs7W3FxcerQoYMefvjhYz53W1S+
p7Lvsvrf5mh//DkKZrsuSk5OTqnUAACAV/kcx/Q8uTAlISFB2dnZ2rFjR7G3l4qeNRfF9/bbb6tP
nz6qV6+evvvuOyauAeA6fsefuNTUVCYIMozMzSNzs/hd7D5ugfa47Oxst0uw0t///nf5fD7deeed
NL8AUEalpqa6XYJ1yNw8ModtaIA9Li4uzu0SrPPaa6/pk08+UXR0tO666y63ywEAnKT09HS3S7AO
mZtH5rANzwB7XMWKFd0uwQrff/+9WrVqpZycHGVnZxdOpHPmmWe6XRoA4CTFx8e7XYJ1yNw8Modt
aICBEnDo0CFt27ZNUVFRatiwoQYMGKC//vWvbpcFAAAA4A9ogIEScO655yo/P9/tMgAAAAAcA88A
AwAAAACsQAPscVlZWW6XAABAmZSYmOh2CdYhc/PIHLahAQYAAAAAWIEG2ONYBgkAgJMzb948t0uw
DpmbR+awDQ0wAAAAAMAKNMAAAAAAACvQAAMAAAAArMA6wB6Xl5d3QtsHAgElJCSUUjUAALcEAgHF
x8e7XUaZQmbmkbl5ZA7bcAXY405kGaRGjRrxC7AEBAIBt0uwDpmbR+bmnWrm8fHxatSoUQlVY4ek
pCS3S7AOmZtH5rANV4BdNH36dCUnJ0uSpkyZojvuuCNkm4yMDI0ePVrr1q3T/v37dd555+mOO+7Q
oEGDFBV1/M8vYmNji11Penp68YtHkVJTU5WSkuJ2GVYhc/PI3DwyN4+8zSNz88gctvE5juO4XYSN
tm3bposuukiHDx9WTk6OJk+eHNIAz507V926dVPlypXVs2dPxcbGat68efq///s/de/eXe++++4x
j1FwK/PXX39daj8HAAAA4GW5udIvv0hnnnnq++L83H3cAu2S22+/XbVq1dLdd98ddnzv3r3q37+/
ypcvr5UrV2ry5MkaO3as1q9fryuuuEKzZ8/We++9Z7hqAAAAwB6OIw0cKP3pT9I//+l2NSgJNMAu
mDBhglasWKFp06YpJiYm7DazZs3STz/9pF69euniiy8ufL1ixYoaPXq0HMfRK6+8YqpkAAAAwDov
vCBNmyZ9/73UurU0Z47bFeFU0QAb9u9//1vDhg3T4MGD1bp16yK3W758uXw+n6699tqQsauuukox
MTHKyMjQwYMHS7NcAAAAwErz5kl//ev//rx/v9Stm/T00+7VhFNHA2xQfn6++vTpo/r162vMmDHH
3HbTpk2SpPPPPz9krFy5cmrQoIEOHTqkrVu3HnM/2dnZJ18wTkpqaqrbJViHzM0jc/PI3DwyN4/M
zSPz8L76SrrlluAt0EerXdt8PSg5NMAGPfHEE9qwYYNef/11RUdHH3PbPXv2SJKqVasWdrzg9V9/
/fWY+6EBNo9/SMwjc/PI3DwyN4/MzSNz88g81K5dUmKilJMTOjZ4sNSvn/maUHJogA1Zt26dnn76
aT344IO69NJLjR03Li7O2LEQxHJS5pG5eWRuHpmbR+bmkbl5ZH6k3Fypa1fpu+9Cx66/XnruOfM1
oWTRABuQn5+v5ORkNW7cWKNGjTpirKhVqAqu8BZcCT5awevVq1c/5rF//PFHRUdHKz4+Xn6//4iv
Jk2aKDExMeR7AoGA/H5/2E8EU1NT5ff7FQgEQsYSExPZ339Fcn1e3F98fHxE1+fF/cXHx0d0fV7c
X3x8fETX58X9xcfHR3R9XtxfQeaRWp8X91eQeaTWZ3J/jiMNGCBlZKRK8kv63/78fumdd6SuXYu3
v4Ka/H6/6tatW3g+HggEuEPTZawDbMCePXtUo0YN+Xy+sA3vH18fPHiwxo0bpz59+mjGjBmaMWOG
evbsecT2+fn5qlatmg4ePKicnBxVqFAh7HFZZwwAAAAonrFjpYcfDn29Zk3p00+lhg1P/Ricn7uv
vNsF2CA6Olr9inhY4J///Ke+/PJLXXnllWrcuLGuuOIKSVK7du309ttva+HChSEN8MqVK7Vv3z61
adOmyOYXAAAAQPHMnSsNGxb6eoUKUlpayTS/iAxcAXbZE088oVGjRmny5Mm64447Cl/fu3evGjVq
pL1792r16tW65JJLJEm5ublq27at1q1bp5kzZ6p79+5F7ptPmAAAAIBj27BBatVK+v330LHXXpP+
cIp+yjg/dx/PAEeAcJ9BnH766Zo8ebLy8/PVpk0b9e/fX0OHDlWzZs20bt06de/e/ZjNb4GsrKzS
KBnHUNRzwSg9ZG4emZtH5uaRuXlkbp7tme/cGZzxOVzz+8ADJdv8IjLQAEcAn88X9vXOnTtr5cqV
uvrqq5WWlqaJEyeqYsWKevHFF/XOO+8YrhIAAADwjgMHgjM+b9sWOtaxY/CZYHgPt0B7GLdYAAAA
AKEcR+rTR3r77dCxCy+UPvlEqlq15I/L+bn7uAIMAAAAwCrPPBO++a1VS5o3r3SaX0QGGmAAAAAA
1khLkx55JPT1ChWk99+X6tc3XhIMogEGAAAAYIUvvwze+hzOpElS69Zm64F5NMAel5eX53YJ1gkE
Am6XYB0yN4/MzSNz88jcPDI3z6bMd+yQkpKkfftCx/76V+m224yXBBfQAHscyyCZl5SU5HYJ1iFz
88jcPDI3j8zNI3PzbMl8//7gjM/bt4eOJSVJTz1lvia4gwbY42JjY90uwTopKSlul2AdMjePzM0j
c/PI3DwyN8+GzB1HuvNOad260LEmTaTp06Vy5czXBXewDJKHMc06AAAAbDd6tDRiROjrdepIn34q
nXuuuVo4P3cfV4ABAAAAeNLs2eGb34oVgzM+m2x+ERlogAEAAAB4zhdfSMnJ4cemTJFatjRbDyID
DTAAAAAAT/nhB6lz5+DkV0d7+OGil0KC99EAe1x2drbbJVgnNTXV7RKsQ+bmkbl5ZG4emZtH5uZ5
MfP9+6UuXaTvvw8d69JFGjPGfE2IHEyC5WEJCQkKBALKzc11uxSr+P1+ZWZmul2GVcjcPDI3j8zN
I3PzyNw8r2V++LDUo4c0Z07oWNOm0urV0mmnma+rAJNguY8rwB4XFxfndgnWSU9Pd7sE65C5eWRu
HpmbR+bmkbl5Xsv80UfDN79nnCGlp7vb/CIycAXYw/iECQAAALaYOjW43u/RoqOlFSukyy83XlII
zs/dxxVgAAAAAGXa0qXSXXeFH5s2LTKaX0QGGmAAAAAAZda//y3ddJN06FDo2KhRUq9e5mtC5KIB
BgAAAFAm7doldewo7dkTOpacLA0fbr4mRDYaYI/LyspyuwTrJCYmul2CdcjcPDI3j8zNI3PzyNy8
spz5gQPBZY2++SZ07KqrpEmTJJ/PfF2IbDTAAAAAAMqUw4el226T1qwJHTvvPCktLTj5FXA0ZoH2
MGaZAwAAgBeNGCGNHh36emystHZtsAmORJyfu48rwAAAAADKjDfeCN/8Vqggvf9+5Da/iAw0wAAA
AADKhBUrpP79w49NnRp89hc4FhpgAAAAABFv0ybpxhulgwdDxx57TOrd23xNKHtogD0uLy/P7RKs
EwgE3C7BOmRuHpmbR+bmkbl5ZG5eWcn8p5+Cyx398kvo2C23SCNHGi8JZRQNsMexDJJ5SUlJbpdg
HTI3j8zNI3PzyNw8MjevLGSemyt17Spt2RI61qqV9NprLHeE4qMB9rjY2Fi3S7BOSkqK2yVYh8zN
I3PzyNw8MjePzM2L9MwdR+rXT1q9OnSsYUPpgw+kSpXM14Wyi2WQPIxp1gEAAFCWPfFE+Nubq1cP
rgF8wQXGSzolnJ+7jyvAAAAAACLO22+Hb37Ll5fS0spe84vIQAMMAAAAIKJ8/LF0xx3hxyZPltq2
NVsPvIMGGAAAAEDECASCk16FW8zkkUek224zXhI8hAbY47Kzs90uwTqpqalul2AdMjePzM0jc/PI
3DwyNy/SMs/ODi539PPPoWM9e0pPPmm+JngLk2B5WEJCggKBgHJzc90uxSp+v1+ZmZlul2EVMjeP
zM0jc/PI3DwyNy+SMs/Lk669VlqxInTs8sulZcukypWNl1WimATLfVwB9ri4uDi3S7BOenq62yVY
h8zNI3PzyNw8MjePzM2LlMwdRxowIHzzW7++NHdu2W9+ERm4AuxhfMIEAACAsqCo5Y6qVZMyMiS/
33hJpYLzc/dxBRgAAACAa15/vejljmbP9k7zi8hAAwwAAADAFYsXS/37hx975RWpfXuz9cD7aIAB
AAAAGLdhg3TTTdKhQ6Fjjzwi9etnviZ4Hw2wx2VlZbldgnUSExPdLsE6ZG4emZtH5uaRuXlkbp5b
mW/fHlzuaO/e0LFbbpFGjzZfE+xAAwwAAADAmD17pBtukL7/PnSsTRtp6lTJ5zNeFizBLNAexixz
AAAAiCR5ecErv0uWhI75/dLq1VKNGubrMoXzc/dxBRgAAABAqStY6zdc83vmmdKCBd5ufhEZaIAB
AAAAlLonnpDeeCP09SpVpH/8Qzr3XPM1wT40wAAAAABK1bRpwQb4aOXKSbNmSc2bm68JdqIB9ri8
vDy3S7BOIBBwuwTrkLl5ZG4emZtH5uaRuXkmMl+0KHjrczgvvyxdf32plwAUogH2OJZBMi8pKcnt
EqxD5uaRuXlkbh6Zm0fm5pV25hs2SN26Fb3Wb1GNMVBaaIA9LjY21u0SrJOSkuJ2CdYhc/PI3Dwy
N4/MzSNz80oz82Ot9Xvrraz1C3ewDJKHMc06AAAA3LBnj9S6tfSvf4WOtW0rLVwoVaxovi63cX7u
Pq4AAwAAACgxeXnSTTeFb379fiktzc7mF5GBBhgAAABAiXAcqX9/aenS0LG6daUPP5SqVzdfF1CA
BtiQoUOHqn379oqLi1NMTIxq1qyp5s2ba9SoUcrOzj5i2++++05RUVFFft1yyy0u/RQAAABA0UaO
lN58M/T1grV+4+KMlwQcgQbYkPHjx2vfvn3q0KGDBg8erN69e6tChQoaOXKkmjZtqu+//z7ke5o1
a6aRI0eGfHXr1q3Yxz26uUbpS01NdbsE65C5eWRuHpmbR+bmkbl5JZn51KnSqFGhrxes9XvxxSV2
KOCkMQmWIXl5eaoY5mGH4cOH66mnntI999yjiRMnSgpeAW7QoIFuu+02TZ069aSPmZCQoEAgoNzc
3JPeB06c3+9XZmam22VYhczNI3PzyNw8MjePzM0rqcw/+ig443N+fujYpEnB26LBJFiRgCvAhoRr
fiWpR48ekqTNmzeXynHjuM/EuPT0dLdLsA6Zm0fm5pG5eWRuHpmbVxKZr18fXOs3XPP76KM0v4gs
5d0uwHYFv3SaNm0aMvbDDz9o0qRJ+vnnn1WzZk1dccUVatKkyQntv6jGG6UnPj7e7RKsQ+bmkbl5
ZG4emZtH5uadaubbtgWv/ObkhI717i09+eQp7R4ocTTAhj3//PP6/ffftWfPHn3++edavXq1mjVr
pqFDh4Zsu3jxYi1evLjwz47jqE2bNnrjjTd0zjnnmCwbAAAAOMIvv0jXXSf98EPoWNu20muvST6f
+bqAY6EBNuyFF17Qrl27Cv98/fXX6/XXX1fNmjULX4uJidFjjz2mLl26qGHDhpKkjRs3auTIkVq2
bJnat2+v9evXq3LlysbrBwAAAPbvl5KSpHCPD7PWLyIZzwAbtmPHDuXn52vnzp1KS0vTli1b1KxZ
M61fv75wm9q1a2vkyJFq1qyZqlatqqpVq6p169b66KOPdNlllykQCGjKlCku/hQAAACwVX6+dMst
0urVoWOs9YtIRwPsktq1a6tz585atGiRfv75ZyUnJx/3e8qVK6d+/frJcRytWrWqWMfZtGmToqOj
FR8fL7/ff8RXkyZNlJiYGPI9gUBAfr8/7LT4qamp8vv9CgQCIWOJiYnsT1K7du0iuj4v7q9gX5Fa
nxf3l5iYGNH1eXF/iYmJEV2fF/dXsI9Irc+L+yvYLlLr8+L+Cv5/cffnONK990offFC4h/9+Saef
Li1YEFzrN1J/XlP7K9iH3+9X3bp1C8/HA4EAy5S6jGWQIkDz5s21YcMG7d69W7GxscfcNj09XV26
dNF1112nBQsWHHPbhIQEZWVlae/evSVZLo4jMTFR8+bNc7sMq5C5eWRuHpmbR+bmkbl5J5r56NHS
iBGhr1eoELzy++c/l2BxHsQySO7jCnAE+OG/MweUK1fuuNuuWbNGkgqfDT4elkEyj3+4zSNz88jc
PDI3j8zNI3PzTiTz114L3/xK0ptv0vyibKABNmDz5s367bffQl53HEePPvqodu3apVatWqlatWqS
pC+//FLhLswvXbpU48ePl8/nU+/evUu9bgAAAECS5s+X7ror/Ni4cdLNN5utBzhZzAJtwIIFCzRs
2DC1bt1aDRo0UM2aNfXjjz9q5cqV2rp1q8466yxNmjSpcPshQ4Zo8+bNatmyperVqycpOAv0smXL
5PP5NHr0aF1++eVu/TgAAACwyNq1Uo8ewcmvjvbgg9L995uvCThZNMAGtG/fXlu2bNHq1au1fv16
/frrr6pSpYrOP/989e3bV4MGDVL1P0yVl5ycrPfff1+ff/65Fi5cqIMHD+qMM87QzTffrJSUFLVq
1crFnwYAAAC22LRJ6tQpuOzR0W69VRo71nxNwKlgEiwPS0hIUF5enjZv3ux2KVYJBAKKj493uwyr
kLl5ZG4emZtH5uaRuXnHyvyHH6SWLaXvvgsda99e+sc/WOv3RDEJlvt4BtjjsrKy3C7BOklJSW6X
YB0yN4/MzSNz88jcPDI3r6jM9+yRbrghfPPbvLmUlkbzi7KJBtjjjresEkpeSkqK2yVYh8zNI3Pz
yNw8MjePzM0Ll3lurnTjjdKGDaHbN2gQvPJ7+ukGigNKAbdAexi3WAAAAOBEHD4s3XKL9O67oWO1
akkZGdJ555mvyys4P3cfV4ABAAAAyHGkBx4I3/zGxASv/NL8oqyjAQYAAACgF16Qxo8Pfb1cOWn2
bOnSS83XBJQ0GmAAAADActOnSw89FH7stdek6683Ww9QWmiAPS47O9vtEqyTmprqdgnWIXPzyNw8
MjePzM0jc/NSU1O1aJF0++3hx596Surb12xNQGliEiwPS0hIUCAQUG5urtulWMXv9yszM9PtMqxC
5uaRuXlkbh6Zm0fm5jVs6Nfu3ZnKyQkdu/de6aWXJJ/PfF1exSRY7uMKsMfFxcW5XYJ10tPT3S7B
OmRuHpmbR+bmkbl5ZG7Wli3Snj3pYZvfbt2CzwPT/MJruALsYXzCBAAAgHB27ZJatgw2wUe76irp
o4+kSpXM1+V1nJ+7jyvAAAAAgEX27pU6dgzf/F54oTR3Ls0vvIsGGAAAALBEbq7Utav0+eehY+ec
Iy1cKFWvbr4uwBQaYAAAAMAC+flS797S0qWhYzVqBJvfs882XxdgEg2wx2VlZbldgnUSExPdLsE6
ZG4emZtH5uaRuXlkXnocR7rnHmn27KNHElWpkjRvnuT3u1EZYBYNMAAAAOBxI0ZIkyaFH3vvPalV
K7P1AG5hFmgPY5Y5AAAAjB8v3X9/+LE33pCSk83WYzPOz93HFWAAAADAo956q+jm94UXaH5hHxpg
AAAAwIP+8Q/p9tvDjw0bJg0ZYrYeIBLQAAMAAAAes3q11K1bcObno/XvL40ZY74mIBLQAHtcXl6e
2yVYJxAIuF2CdcjcPDI3j8zNI3PzyLxkbNwodeokHTgQOnbjjdIrr0g+X/DPZA7b0AB7HMsgmZeU
lOR2CdYhc/PI3DwyN4/MzSPzU7d1q3TttdKePaFj7dpJM2ZI5cr97zUyh21ogD0uNjbW7RKsk5KS
4nYJ1iFz88jcPDI3j8zNI/NTs3OndM01wf892p/+JH3wgRQdfeTrZA7bsAyShzHNOgAAgB1+/VW6
+urg7c9Ha9xY+vhjqXZt83XhSJyfu48rwAAAAEAZtm+flJgYvvmtV09atIjmFyhAAwwAAACUUQcP
Sj17Bmd9PlpsbLD5jYszXxcQqWiAAQAAgDLo8GGpXz9p/vzQsSpVpAULpP/3/8zXBUQyGmCPy87O
drsE66SmprpdgnXI3DwyN4/MzSNz88i8+BxHevBB6c03Q8cqVJDef1+67LLj74fMYRsaYI+jATaP
f0jMI3PzyNw8MjePzM0j8+J75hnpxRdDX/f5pOnTg7NBFweZwzY0wB4Xx0MfxqWnp7tdgnXI3Dwy
N4/MzSNz88i8eCZPlh55JPxYaqrUo0fx90XmsA3LIHkY06wDAAB4y5w5wQb38OHQsVGjpBEjzNeE
4uP83H1cAQYAAADKgKVLpVtuCd/83nefNHy4+ZqAsoYGGAAAAIhwa9dKnTtLeXmhY7feGnwe2Ocz
XxdQ1tAAAwAAABFswwbp+uul338PHbv+emnaNCmKs3qgWPir4nFZWVlul2CdxMREt0uwDpmb1guz
IQAAIABJREFUR+bmkbl5ZG4emYf6z3+kDh2kX38NHWvZUpo9O7js0ckic9iGBhgAAACIQN99J7Vv
L+3aFTp20UXS/PlSTIz5uoCyjFmgPYxZ5gAAAMqmnTulK6+UAoHQsfPPl1atks44w3xdODWcn7uP
K8AAAABABMnODt72HK75jYuTliyh+QVOFg0wAAAAECH27g1ObPXVV6FjZ5wRbH7POcd8XYBX0AAD
AAAAEWD//uBSR59+GjpWo4a0eLF03nnm6wK8hAbY4/LCLRaHUhUId78SShWZm0fm5pG5eWRuns2Z
Hzwo9eghLV8eOnbaadKHH0pNmpT8cW3OHHaiAfY4lkEyLykpye0SrEPm5pG5eWRuHpmbZ2vm+flS
cnJwVuejVaokzZsnXXZZ6Rzb1sxhLxpgj4uNjXW7BOukpKS4XYJ1yNw8MjePzM0jc/NszNxxpLvv
lmbODB0rXz64zm+bNqV3fBszh91YBsnDmGYdAAAgcjmO9OCD0rhxoWM+n/TOO1LPnubrQunh/Nx9
XAEGAAAAXPDkk+GbX0maNInmFygNNMAAAACAYePHS48/Hn5s3DipXz+z9QC2oAEGAAAADJo6Vbr/
/vBjI0cWPQbg1NEAe1x2drbbJVgnNTXV7RKsQ+bmkbl5ZG4emZtnQ+bvvSf17x9+7P77pcceM1uP
DZkDf8QkWB6WkJCgQCCg3Nxct0uxit/vV2ZmpttlWIXMzSNz88jcPDI3z+uZL1ggde4sHToUOtav
X/C5X5/PbE1ezzzSMAmW+7gC7HFxcXFul2Cd9PR0t0uwDpmbR+bmkbl5ZG6elzNfuVK66abwzW/P
ntLf/26++ZW8nTkQDg2wIUOHDlX79u0VFxenmJgY1axZU82bN9eoUaOKvE05IyNDN9xwg2rWrKmY
mBg1bdpUEyZM0OHDh4t93IoVK5bUj4Biio+Pd7sE65C5eWRuHpmbR+bmeTXzzz6TOnWSDhwIHevY
UXrzTalcOfN1Sd7NHCgKt0AbEh0drUsuuUR+v1916tTR77//rrVr1+qzzz7T2WefrbVr1+rss88u
3H7u3Lnq1q2bKleurJ49eyo2Nlbz5s3T//3f/6l79+569913j3tMbrEAAABw11dfSW3aSOGud7Rp
E7wtunJl01XBLZyfu48G2JC8vLywV2OHDx+up556Svfcc48mTpwoSdq7d68aNWqkvXv3KiMjQxdf
fHHhPtq2bau1a9fqnXfeUY8ePY55TP6CAQAAuOff/5auvlravTt07NJLpSVLpNNPN18X3MP5ufu4
BdqQom5FLmhiN2/eXPjarFmz9NNPP6lXr16FzW/BPkaPHi3HcfTKK6+UbsEAAAA4aYGA9Oc/h29+
L7xQ+vBDml/ADTTALiuYeKBp06aFry1fvlw+n0/XXnttyPZXXXWVYmJilJGRoYMHDx53/1lZWSVX
LIolMTHR7RKsQ+bmkbl5ZG4emZvnlcy//VZq107asSN0LD5eWrxYio01XlZYXskcKK7ybhdgm+ef
f16///679uzZo88//1yrV69Ws2bNNHTo0MJtNm3aJEk6//zzQ76/XLlyatCggTIzM7V161Y1btzY
WO0AAAA4tu3bg83vtm2hY+eeKy1dKp15pvm6AATxDLBhdevW1a5duwr/fN111+n1119X7dq1C19r
3LixAoGANm/erIYNG4bso3Xr1lqzZo0yMjJ02WWXFXksnjEAAAAwZ8eO4DO/f3iyrdDZZ0urVklh
Tu1gEc7P3cct0Ibt2LFD+fn52rlzp9LS0rRlyxY1a9ZM69evd7s0AAAAnKTdu6X27cM3v2ecIS1b
RvMLRAIaYJfUrl1bnTt31qJFi/Tzzz8rOTm5cKxatWqSpD179oT93oLXq1evftzjbNu2TdHR0YqP
j5ff7z/iq0mTJmGf+wgEAvL7/UpNTQ0ZS01Nld/vVyAQCBlLTExkf+yP/bE/9sf+2B/7s25/2dnS
NddImZmSlPjfr6BatYK3PUdFeefnZX/H31/BPvx+v+rWrVt4Ph4IBJQdbk0sGMMt0BGgefPm2rBh
g3bv3q3Y2Fj16dNHM2bM0IwZM9SzZ88jts3Pz1e1atV08OBB5eTkqEKFCkXul1ssAAAASteePcEr
v59/HjpWo4a0fLn0h7lOYTnOz93HFeAI8MMPP0gKTnAlSe3atZPjOFq4cGHItitXrtS+ffvUqlWr
Yza/BfLy8kq2WBxXuE8LUbrI3DwyN4/MzSNz88pa5jk50g03hG9+q1aVPvoo8pvfspY5cKpogA3Y
vHmzfvvtt5DXHcfRo48+ql27dqlVq1aFtz5369ZNtWrV0syZM/XFF18Ubp+bm6vhw4fL5/Np4MCB
xTo2yyCZl5SU5HYJ1iFz88jcPDI3j8zNK0uZ79snJSZKGRmhY1WqBNf5bdHCfF0nqixlDpQElkEy
YMGCBRo2bJhat26tBg0aqGbNmvrxxx+1cuVKbd26VWeddZYmTZpUuP3pp5+uyZMnq3v37mrTpo1u
vvlmxcbGKj09Xf/5z3/UvXt3de/evVjHjo2UReYskpKS4nYJ1iFz88jcPDI3j8zNKyuZHzggde0q
rVgROla5svSPf0gtWxov66SUlcyBksIzwAZ8/fXXevXVV7V69Wpt375dv/76q6pUqaLzzz9fnTp1
0qBBg8JOaLVmzRqNGTNGa9as0YEDBxQfH68777xTgwYNks/nO+5xecYAAACgZOXlSTfdJM2fHzoW
HS3NmxecEAsIh/Nz99EAexh/wQAAAErOoUNSz55SWlroWIUKwdc7dTJfF8oOzs/dxzPAAAAAwHHk
50vJyeGb33LlpJkzaX6BsoAGGAAAADiGw4elfv2kd94JHYuKkqZPl2680XxdAE4cDbDHsdC2eeEW
VEfpInPzyNw8MjePzM2LxMwdR7rnHun118OPT50q3Xyz0ZJKVCRmDpQmGmCPowE2j39IzCNz88jc
PDI3j8zNi7TMHUe6/37p1VfDj7/6qtS3r9maSlqkZQ6UNhpgj4uLi3O7BOukp6e7XYJ1yNw8MjeP
zM0jc/MiKXPHkR5+WJowIfz4hAnSgAFmayoNkZQ5YAKzQHsYs8wBAACcnMcfl0aNCj/27LPSQw+Z
rQfewPm5+7gCDAAAAPzBE08U3fyOGkXzC5RlNMAAAADAf40aJY0cGX7skUek4cONlgOghNEAAwAA
AJKefDJ463M4Q4ZIo0dLPp/ZmgCULBpgj8vKynK7BOskJia6XYJ1yNw8MjePzM0jc/PczHzMGOmx
x8KPDRokPf+8N5tf3uewDQ0wAAAArPbUU0Xf2nzvvcEZn73Y/AI2YhZoD2OWOQAAgGN7+ungs73h
pKRIf/sbzS9KDufn7uMKMAAAAKz0zDNFN7/33EPzC3gRDTAAAACsM3asNGxY+LGBA6WJE2l+AS+i
AQYAAIBVnn1Wevjh8GN33UXzC3gZDbDH5eXluV2CdQKBgNslWIfMzSNz88jcPDI3z0Tmzz8vDR0a
fmzAAOnll6Uoi86QeZ/DNhb99bYTyyCZl5SU5HYJ1iFz88jcPDI3j8zNK+3MX3hBeuih8GP9+0uv
vGJX8yvxPod9LPsrbp/Y2Fi3S7BOSkqK2yVYh8zNI3PzyNw8MjevNDMfN0568MHwY/36SX//u33N
r8T7HPZhGSQPY5p1AAAA6cUXpSFDwo/deac0aZKdzS/M4/zcffxVBwAAgGcdq/m94w6aX8A2/HUH
AACAJ02YUHTze/vt0uTJNL+AbfgrDwAAAM956SVp8ODwY7fdJk2ZQvML2Ii/9h6XnZ3tdgnWSU1N
dbsE65C5eWRuHpmbR+bmlVTmf/ub9Je/hB/r25fm9494n8M2TILlYQkJCQoEAsrNzXW7FKv4/X5l
Zma6XYZVyNw8MjePzM0jc/NKIvOJE6VBg8KP9ekjTZsmlSt3SofwFN7nZjEJlvv47Mvj4uLi3C7B
Ounp6W6XYB0yN4/MzSNz88jcvFPNnOb3xPE+h224AuxhfMIEAABsMW6c9MAD4cd695Zef53mF+7j
/Nx9XAEGAABAmfbMM0U3v7fcQvML4H9ogAEAAFBmjRolDRsWfqxXL+mNN2h+AfwPDTAAAADKHMeR
RoyQHn88/HifPtJbb0nly5utC0BkowH2uKysLLdLsE5iYqLbJViHzM0jc/PI3DwyN6+4mTuO9PDD
0ujR4cfvvJMJr4qL9zlsw2diAAAAKDMcRxoyRBo/Pvz43XdLqams8wsgPGaB9jBmmQMAAF5y+HBw
maOXXw4/ft99wcbY5zNbF1BcnJ+7j8/GAAAAEPEOHw5e3S2q+X3gAZpfAMdHAwwAAICIlp8ffK53
8uTw48OGSc89R/ML4Ph4BhgAAAAR69AhqW9facaM8OOPPx78ovkFUBxcAfa4vLw8t0uwTiAQcLsE
65C5eWRuHpmbR+bmHZ35wYPSrbcW3fyOHi2NHEnzeyp4n8M2NMAexzJI5iUlJbldgnXI3DwyN4/M
zSNz8/6YeV6e1LOn9N574bd99lnp0UcNFeZhvM9hG26B9rjY2Fi3S7BOSkqK2yVYh8zNI3PzyNw8
MjevIPPcXKlbN2n+/PDbjR8v/eUvBgvzMN7nsA3LIHkY06wDAICyZv9+6cYbpYULw4+npkr33GO2
JqCkcH7uPq4AAwAAICLs2yclJUlLl4aO+XzSpElSv37m6wLgHTTAAAAAcF1OjtSpk7RyZeiYzydN
mxacDRoATgUNMAAAAFz122/SDTdIn3wSOhYVJb31lnTLLebrAuA9zALtcdnZ2W6XYJ3U1FS3S7AO
mZtH5uaRuXlkbsYvv0gdOhQ0v0dmXq6cNHMmzW9p4n0O29AAexwNsHn8Q2IemZtH5uaRuXlkXvp2
7ZLatpXWrSt45X+ZV6ggzZolde/uSmnW4H0O23ALtMfFxcW5XYJ10tPT3S7BOmRuHpmbR+bmkXnp
2r5dat9e2rTpj68GM69YUZozJ/hMMEoX73PYhmWQPIxp1gEAQCTasiXY/H77behYdLT0wQfSddcZ
LwsodZyfu48rwAAAADAmMzPY/O7YETpWpYqUni61a2e+LgB2oAEGAACAEf/8p3TttdJPP4WOVa8u
ffihdPnl5usCYA8mwQIAAECpy8gIXtkN1/zWri0tX07zC6D00QAbkJ2drSlTpujGG2/Ueeedp5iY
GFWvXl1XXnmlpk6dqqMfw/7uu+8UFRVV5NctJ7AWQFZWVkn/ODiOxMREt0uwDpmbR+bmkbl5ZF5y
li6VrrlG2rMndOzss6VVq6RmzcjcDWQO23ALtAGzZs3SwIEDddZZZ6lt27aKi4vTjz/+qLS0NPXr
108LFy7Ue++9F/J9zZo1U5cuXUJev/DCC02UDQAAcMrmzQsuZZSbGzrWoEGwOW7QwHxdAOzELNAG
rFixQr///rs6dux4xOu7du1SixYttH37ds2ePVtdu3aVFLwC3KBBA912222aOnXqSR+XWeYAAICb
Zs6U+vSRDh0KHbvgAmnJkuAVYMAWnJ+7j1ugDWjTpk1I8ytJderU0d133y3HcbRixQrzhQEAAJSS
116TbrklfPPbrFnwtmeaXwCmcQv0H+zdu1c5OTmqW7eusWNWqFBBklS+fOh/ih9++EGTJk3Szz//
rJo1a+qKK65QkyZNjNUGAABwMiZMkAYPDj92xRXSggXBWZ8BwDTrrwDv27dPQ4cOVb169VS9enWd
c845hWOffvqpkpKStH79+lI5dn5+vt544w35fD5dF2a198WLF2vgwIEaPny4Bg4cqKZNm6pdu3ba
tm1bqdQDAABwKhxHGjOm6Oa3XTtp0SKaXwDusboB3rt3r1q2bKnnnntOsbGxaty48REzMickJGjZ
smWaMWNGqRx/6NCh+vrrr9WxY0ddc801ha/HxMToscce0xdffKFffvlFv/zyi1auXKl27dppxYoV
at++vfbv31+sY+Tl5ZVK7ShaIBBwuwTrkLl5ZG4emZtH5ifGcaRhw6Thw8OPd+ok/eMf0mmnFb0P
MjePzGEbqxvg0aNHa+PGjZoyZYo2btyoHj16HDFepUoVXX311Vq6dGmJH/ull17SuHHj5Pf79eab
bx4xVrt2bY0cOVLNmjVT1apVVbVqVbVu3VofffSRLrvsMgUCAU2ZMqVYx2EZJPOSkpLcLsE6ZG4e
mZtH5uaRefEdPizde680dmz48Z49pbQ0qVKlY++HzM0jc9jG6gZ4zpw56tChg+644w5Jks/nC9mm
fv362r59e4ked+LEiRo8eLAuvPBCLVu2TNWLeR9QuXLl1K9fPzmOo1WrVhXre3w+n6KjoxUfHy+/
33/EV5MmTcKu/RYIBOT3+5WamhoylpqaKr/fH/bTwsTERPYnqUePHhFdnxf3l5KSEtH1eXF/KSkp
EV2fF/eXkpIS0fV5cX8pKSkRXV+k7O/QIally1S9/LJfUuj+zjknUTk5ifrvtCfH3F9B5pH883pt
fwWZR2p9ZXV/Bfvw+/2qW7du4fl4IBBQdnZ2yPfDHKuXQapUqZL+8pe/aOx/P6584oknNGrUKOXn
5xduM3ToUE2YMEEHDhwokWOOHz9eQ4YM0UUXXaQlS5aoVq1aJ/T96enp6tKli6677jotWLDgmNsy
zToAAChNeXnBmZ7nzAk/ft990osvSlFWX3IB/ofzc/dZ/evotNNO0+7du4+5zTfffHPCTWpRxo4d
qyFDhqh58+Zavnz5Se13zZo1kqSGDRuWSE0AAAAnY/9+qUuXopvfRx+Vxo+n+QUQWaz+ldSiRQvN
nz9fOTk5Ycd37typDz/8UC1btjzlYz355JMaNmyYWrRooSVLlqhGjRpFbvvll18q3IX5pUuXavz4
8fL5fOrdu/cp1wQAAHAy9uyRrrtO+vDD8ONPPy2NHi2FeboMAFxl9TrA9913nzp27KhOnTpp8uTJ
R4xt3rxZ/fr10/79+3Xfffed0nHeeOMNPf744ypfvrxatWqlCRMmhGxTv3599e3bV5I0ZMgQbd68
WS1btlS9evUkSRs3btSyZcvk8/k0evRoXX755adUEwAAwMn48cdg81vUKpETJ0r/fawUACKPY7kR
I0Y4Pp/PiYqKcipXruxERUU5Z5xxhhMVFeX4fD7nqaeeOuVjjBw50omKijrmV9u2bQu3nzp1qpOY
mOg0aNDAOf30051KlSo55557rtOrVy9n9erVxT6u3+93zjzzzFOuHydm4sSJbpdgHTI3j8zNI3Pz
yDzU1q2OEx/vOMFFj478iopynNdfP7X9k7l5ZG6W3+93/H6/22VYzepJsAosXrxYL730ktauXavs
7GxVrVpVl19+uYYMGXLE+rxlTUJCggKBgHJzc90uxSp+v1+ZmZlul2EVMjePzM0jc/PI/Ej/+pfU
oYO0Y0foWIUK0ttvS927n9oxyNw8MjeLSbDcRwPsYQkJCcrLy9PmzZvdLsUqgUBA8fHxbpdhFTI3
j8zNI3PzyPx/1qyROnaUfvkldCwmRnr//WBzfKrI3DwyN4sG2H00wB7GXzAAAHCqFi6UbrpJ2rcv
dKxGDWnBAompSYDi4fzcfVbPAg0AAICizZwpJSaGb37POkv6+GOaXwBli9WzQFeoUEG+YszP7/P5
eI4WAABY5eWXpXvvDU5xdbTzzpMWLZLq1zdeFgCcEqsb4MsuuyxsA/zrr78WTh7VpEkTVa1a1YXq
AAAAzHMc6cknpccfDz9+8cXB26Lr1DFbFwCUBKtvgV69erU+/vjjkK+vvvpKP/74o5KTk5Wfn695
8+a5XepJy8rKcrsE6yQmJrpdgnXI3DwyN4/MzbMx88OHpb/8pejm9+qrpeXLS6/5tTFzt5E5bGN1
A3wsVatW1WuvvSafz6dHH33U7XIAAABK1cGDUnKy9Le/hR9PSgpe+a1WzWxdAFCSmAX6OO677z7N
nj1bP/zwg9ulnDBmmQMAAMWxb19wDd8FC8KP9+0rTZkilbf64Tng1HF+7j6uAB9HXl6efgm36B0A
AIAH/PprcA3foprfBx6Qpk6l+QXgDfwqO4bNmzdr1qxZatSokdulAAAAlLgdO6TrrpM2bgw//vTT
0tChUjEWzQCAMsHqBnjAgAFhXz906JC2bdumVatW6dChQxo7dqzhygAAAErXli3BK79bt4aORUVJ
f/+71L+/+boAoDRZ3QBPmTLlmOPx8fF66KGH1K9fP0MVlby8vDy3S7BOIBBQfHy822VYhczNI3Pz
yNw8L2e+caN07bXSzp2hYxUrSm+/LXXrZr4uL2ceqcgctrF6EqwtW7aEfT0qKko1atRQ9erVDVdU
shISEgrXM4Y5fr9fmZmZbpdhFTI3j8zNI3PzvJr56tVSp07Snj2hY1WqSB98ILVvb74uybuZRzIy
N4tJsNxn9RVgG57tjY2NdbsE66SkpLhdgnXI3DwyN4/MzfNi5unp0s03S/v3h47VrBmcCOvSS83X
VcCLmUc6ModtrL4C7HV8wgQAAApMnizdfbd0+HDoWL160qJF0v/7f+brAmzC+bn7rLoCnJGRcdLf
27JlyxKsBAAAwAzHkZ54IvgVTuPGweY3Ls5sXQDgBqsa4NatW8t3kvP45+fnl3A1AAAApevQIeme
e4JXf8O55BLpww+l2rXN1gUAbrGqAX7kkUdOugEGAAAoS/btCz7vO29e+PEOHaTZs6XTTzdbFwC4
yaoGePTo0W6XYFx2drbbJVgnNTWVCSUMI3PzyNw8MjevLGf+009SYqK0dm348T59pClTgkseRZKy
nHlZReawTZTbBaB00QCbl5qa6nYJ1iFz88jcPDI3r6xm/u23UuvWRTe/Q4dKb7wRec2vVHYzL8vI
HLax6gqwjeKY0cK49PR0t0uwDpmbR+bmkbl5ZTHz9eul66+Xdu4MHfP5pAkTpEGDzNdVXGUx87KO
zGEb65dBchxHH3zwgT766CN9//33ys3NDdnG5/Ppo48+cqG6U8M06wAA2GPZMqlLF2nv3tCxihWl
6dOl7t3N1wXgfzg/d5/VV4Dz8vLUqVMnLV26VI7jyOfz6Y+fBxT8mYmzAABAJJs5U0pOlg4eDB2r
Vk2aO1e6+mrzdQFApLH6GeBnn31WS5Ys0cMPP6ydO3fKcRw99thjysrK0ptvvqmzzz5bN998s/bv
3+92qQAAAGGNGyf16hW++T37bOnjj2l+AaCA1Q3wu+++q4svvlhjxoxRnTp1JElRUVGqV6+eevfu
reXLl2vevHlMDgAAACLO4cPSAw8Ev8Lx+6U1a6QmTczWBQCRzOoGeOvWrWrVqlXhn30+nw7+4ePT
Ro0aqWPHjpo6daob5ZWIrKwst0uwTmJiotslWIfMzSNz88jcvEjOPDdX6t07ePU3nNatg1d+zznH
bF2nKpIz9yoyh22sboDLly+vmJiYwj+fdtpp2r179xHb1K9fX1u3bjVdGgAAQFi//SbdcIP0zjvh
x7t2lRYtkmJjzdYFAGWB1bNA+/1+NW/eXNOnT5cktWjRQgcPHtT69esLt7n++uv11Vdfafv27W6V
edKYZQ4AAG/ZsSO4zNGGDeHH77lHeuklqVw5s3UBKB7Oz91n9RXgVq1aae0fVonv3LmzNm7cqLvu
uksfffSRhg0bpkWLFqlNmzbuFQkAACBp0ybpiiuKbn7HjJEmTqT5BYBjsXoZpF69eunbb7/Vt99+
q/r16+v+++/XBx98oMmTJ2vKlClyHEcNGjTQM88843apAADAYmvWSJ06SdnZoWPlyklTpki33Wa8
LAAoc6y+BTqcgwcPKi0tTYFAQPXr11fnzp112mmnuV3WSeEWCwAAyr45c4ITXh04EDoWEyPNnh28
LRpA5OP83H3W3QJ96NChY45XqFBBPXv21KOPPqpbb721zDa/BfLy8twuwTqBQMDtEqxD5uaRuXlk
bp7bmTuO9MILUvfu4Zvf2rWlFSu81fy6nbmNyBy2sa4Brlevnh5++GFt2bLF7VKMYBkk85KSktwu
wTpkbh6Zm0fm5rmZ+aFD0r33Sg8+GGyEj9awoZSRIbVoYb620sT73Dwyh22sa4B3796t5557Tuef
f77at2+vd99994i1f70mljUQjEtJSXG7BOuQuXlkbh6Zm+dW5jk5Upcu0ssvhx+/5JJg8xsfb7Yu
E3ifm0fmsI11zwBv375dr732mqZNm6asrCz5fD7VrFlTffv2Vb9+/dS4cWO3SywxPGMAAEDZ8sMP
wcmuvvwy/HhSkjRjhlSlitm6AJQMzs/dZ90V4Hr16unxxx/XN998owULFqhr167as2ePXnjhBfn9
frVp00YzZszg2VkAAGDUV19Jl19edPM7aJCUlkbzCwCnwroGuIDP59N1112n2bNna/v27Ro7dqzi
4+O1atUq9enTR2eddZbuv/9+ZWZmul0qAADwuCVLpNatpW3bQsd8PunFF6WXXmKNXwA4VdbdAn08
q1at0uTJk5WWlqYD/51ysWXLlvr4449druzEcYsFAACRb+pU6a67ghNfHa1yZentt6WuXc3XBaDk
cX7uPmuvABflqquu0ltvvaWtW7eqdevWchxHGRkZbpd10rKzs90uwTqpqalul2AdMjePzM0jc/NK
O3PHkUaMkO68M3zzW6dOcJkjm5pf3ufmkTlsQwN8lK+//lqDBw9WQkKCVq9eLUk677zzXK7q5NEA
m8c/JOaRuXlkbh6Zm1eamefmSn36SKNHhx+/4AJp7Vrp0ktLrYSIxPvcPDKHbcq7XUAk2Ldvn2bO
nKnJkyfr008/leM4io6OVs+ePTVgwAC1adPG7RJPWlxcnNslWCc9Pd3tEqxD5uaRuXlkbl5pZZ6d
Hbyqu2pV+PGrr5bef1+qUaNUDh/ReJ+bR+awjdXPAH/22WeaMmWKZs6cqZycHDmOowsuuED9+/dX
3759y/waujxjAABAZNm6VbrhBmnTpvDjvXtLU6ZI0dFm6wJgBufn7rPuCvCePXv01lvMh9xUAAAg
AElEQVRvacqUKfrqq6/kOI4qVaqkW2+9VQMGDFDr1q3dLhEAAHjQunVSYqK0e3f48REjpCeeCM76
DAAoHdY1wGeddZYOHDggx3GUkJCg/v37Kzk5WdWrV3e7NAAA4FFpadKtt0r/XWDiCOXLS5MmSbff
br4uALCNdQ2wJCUnJ2vAgAG64oor3C4FAAB4mONI48dLDzwQ/P9Hq1o12Bz/+c/mawMAG1k3C/SO
HTs0bdo0a5rfrKwst0uwTmJiotslWIfMzSNz88jcvFPNPD9fuu8+aciQ8M1vXJz0ySc0v3/E+9w8
ModtrLsCXLVqVbdLAAAAHvfbb9LNN0sffhh+vHlzaf58qW5ds3UBgO2sngXa65hlDgAA8775JjjZ
VVH//HbsKM2cKZ12mtm6ALiP83P3WXcLNAAAQGn55BPp0kuLbn5TUqQPPqD5BQC30AAbkJ2drSlT
pujGG2/Ueeedp5iYGFWvXl1XXnmlpk6dqqIuwmdkZOiGG25QzZo1FRMTo6ZNm2rChAk6fPiw4Z8A
AAAcz1tvSe3aST/9FDrm80kvvCD97W/BWZ8BAO6w7ldwTk6OTjP8seusWbM0cOBAnXXWWWrbtq3i
4uL0448/Ki0tTf369dPChQv13nvvHfE9c+fOVbdu3VS5cmX17NlTsbGxmjdvnu6//35lZGTo3Xff
NfozAACA8A4floYPl55+Ovx4lSrSjBlSUpLZugAAoax7BrhRo0aaPn260VmgV6xYod9//10dO3Y8
4vVdu3apRYsW2r59u2bPnq2uXbtKkvbu3atGjRpp7969ysjI0MUXXyxJysvLU9u2bbV27Vq98847
6tGjxzGPm5CQoLy8PG3evLl0fjCEFQgEFB8f73YZViFz88jcPDI3rziZ//671KeP9P774cfPOUea
N09q2rQUCvQg3ufmkblZPAPsPutugc7KytJVV12lxx57TPn5+UaO2aZNm5DmV5Lq1Kmju+++W47j
aMWKFYWvz5o1Sz/99JN69epV2PxKUsWKFTV69Gg5jqNXXnmlWMdmGSTzkviI3zgyN4/MzSNz846X
+fbt0pVXFt38Xn659OmnNL8ngve5eWQO21jXAGdkZKhhw4YaM2aMWrZsqUAg4Go9FSpUkCSV/8MD
QcuXL5fP59O1114bsv1VV12lmJgYZWRk6ODBg8fdf2xsbMkVi2JJSUlxuwTrkLl5ZG4emZt3rMw/
+yw42dWXX4Yf79VLWr5cOvPMUirOo3ifm0fmsI11t0BL0v79+zVkyBC9+uqrqlKlil544QUNGDDA
eB35+flq1qyZMjMztXDhQl1zzTWSpEsvvVRffPGFPv/88yOuABdo0qSJMjMzlZmZqcaNGxe5f26x
AACg5M2aJSUnSwcOhB9/4glpxIjgxFcA8Eecn7vPuivAklS5cmW98sormj9/vqpUqaKBAweqc+fO
2rRpk7KyssJ+lYahQ4fq66+/VseOHQubX0nas2ePJKlatWphv6/g9V9//bVU6gIAAKEcR3rySalH
j/DNb6VK0rvvSo89RvMLAJHKulmg/+iGG27Q119/reTkZM2fP1/z588Pu53P59OhQ4dK9NgvvfSS
xo0bJ7/frzfffLNE9w0AAErWgQPSnXcGZ3MO58wzpblzg7dFAwAil5VXgP9o48aN2rhxoxzH0Rln
nKG4uLiQr3POOadEjzlx4kQNHjxYF154oZYtW6bq1asfMV5whbfgSvDRCl4/+vvC2bZtm6KjoxUf
Hy+/33/EV5MmTZSYmBjyPYFAQH6/X6mpqSFjqamp8vv9YZ+dTkxMZH/sj/2xP/bH/jy3vw4dElW3
bmKY5jcgya+zz04tfCbYCz8v+2N/7O/U91ewD7/fr7p16xaejwcCAWVnZ4d8PwxyLJWXl+c88MAD
Trly5Zzo6Gjn2WefdQ4fPlzqx33xxRcdn8/nNG3a1Nm9e3fYbXr37u1ERUU5M2fODBk7dOiQU6VK
FadixYpOXl7eMY/l9/udM888s0TqRvFNnDjR7RKsQ+bmkbl5ZG7exIkTnQ0bHCcuznGCN0CHfnXp
4jg5OW5X6h28z80jc7P8fr/j9/vdLsNqVl4B/vrrr9WiRQuNGzdOF1xwgdatW6eHHnpIvlJ+YGfs
2LEaMmSImjdvruXLl6tWrVpht2vXrp0cx9HChQtDxlauXKl9+/apVatWhTNIHwufMJkX7pNElC4y
N4/MzSNz88aOTVWrVlJRU4E8/LA0Z45UpYrZuryM97l5ZA7bWNcAjx8/Xi1atNBXX32lQYMG6Ysv
vlBTAwv0Pfnkkxo2bJhatGihJUuWqEaNGkVu261bN9WqVUszZ87UF198Ufh6bm6uhg8fLp/Pp4ED
BxbruHFxcadcO05Menq62yVYh8zNI3PzyNwcx5FeeEHati1dOTmh4xUqSK+/Lj39tBRl3ZlU6eJ9
bh6ZwzbWLYMUFRWlunXratq0aerQoYORY77xxhu6/fbbVb58ed17771hZ3euX7+++vbtW/jnuXPn
qnv37oqOjtbNN9+s2NhYpaen6z//+Y+6d++umTNnHve4TLMOAMCJyc2VUlKk114LP16rlvT++1Lr
1mbrAuANnJ+7z7pZoLt27arJkycrNjbW2DG//fZb+Xw+5efna8KECWG3ufrqq49ogDt37qyVK1dq
zJgxSktL04EDBxQfH68XX3xRgwYNMlU6AADW2LlTuvFGac2a8ON+vzRvntSwodm6AAAlx7orwDbh
EyYAAIrns8+krl2l778PP37dddLMmVKYm7gAoNg4P3cfT64AAACrTZ8uXXll0c3vffcFr/zS/AJA
2UcD7HFZRU1diVITbm04lC4yN4/MzSPzknfokPTgg1KfPsFnf4/m8yXq73+XJkyQylv30Jg7eJ+b
R+awDb/OAQCAdX75Rbr5ZmnRovDjtWtL550n3XWX2boAAKWLZ4A9jGcMAAAIlZkpde4sBQLhx5s3
D870zEqCAEoa5+fu4xZoAABgjfR06bLLim5+e/WSPv6Y5hcAvIoGGAAAeN7hw9KTTwav/ObkhI77
fNLYsdLbb0sxMebrAwCYwTPAAADA03JypNtuk+bMCT9erZo0Y4Z0ww1GywIAuIArwB6Xl5fndgnW
CRR1Xx1KDZmbR+bmkfnJ+eYbqWXLopvfxo2lTz8N3/ySuXlkbh6ZwzY0wB7HMkjmJSUluV2Cdcjc
PDI3j8xP3LJlUosW0ldfhR/v2FFat046//zw42RuHpmbR+awDQ2wx8XGxrpdgnVSUlLcLsE6ZG4e
mZtH5sXnONLf/iZ16CD9/HP4bR55RJo7N3j7c1HI3DwyN4/MYRuWQfIwplkHANgmN1e65x5p6tTw
45UrS9OmST17mq0LACTOzyMBk2ABAABP2LFDuvFGae3a8ONxccGrvs2ama0LABA5uAUaAACUeevW
SX/6U9HN71VXSZ9/TvMLALajAQYAAGWW40ivvhpscH/4Ifw2KSnSkiVS7dpmawMARB4aYI/Lzs52
uwTrpKamul2CdcjcPDI3j8xD7d8v3XmndPfdUrhV/ypUkCZNkiZODP7/E0Xm5pG5eWQO2zAJlocl
JCQoEAgoNzfX7VKs4vf7lZmZ6XYZViFz88jcPDI/0rffSjfdJP3zn+HH69SR0tKkVq1O/hhkbh6Z
m0fmZjEJlvu4AuxxcXFxbpdgnfT0dLdLsA6Zm0fm5pH5/yxaJF1ySdHN75/+FHze91SaX4nM3UDm
5pE5bMMVYA/jEyYAgJccPiw9/bQ0YkTw2d9w+veXXnpJqlTJbG0AUBycn7uPZZAAAEDE27NHSk6W
irpYFR0tpaYGnwkGAKAoNMAAACCi/etfwfV9N28OPx4XJ82ZE7z1GQCAY+EZYAAAELHefVe67LKi
m9/27aUvvqD5BQAUDw2wx2VlZbldgnUSExPdLsE6ZG4emZtnW+YHD0pD/n97dx4eRZW2f/zuZg9b
CJsohgARNBEEF0ZkRwRHTERGFJVFEBWMOILD4MoPFLdRx42orIooLrhGB0WGVVl0QMVBEGgQAiIy
ENkhCUn9/qgXENOdhaRPdVd9P9eVa167Tlc/ua96tZ+cqnNGSn37SocOBR9z773SZ59JdeqEpwav
ZR4JyNw8MofXcAs0AACIKDt2SNddJy1eHPx49erS9OnS1VebrQsAEP1YBdrFWGUOABBtli6V+vSR
tm8Pfjwpyd7ft3lzs3UBQFng+7nzuAUaAAA4zrLsVZw7dw7d/PbpI331Fc0vAODU0QADAABHHTpk
b3F0xx32s79/VK6c9NRT9oJY1aqZrw8A4B48AwwAAByzaZO9xdGqVcGP16tnN76dOxstCwDgUswA
u1xOTo7TJXhOIBBwugTPIXPzyNw8N2aekSFdcEHo5vfii+0tjpxqft2YeaQjc/PIHF5DA+xybINk
XmpqqtMleA6Zm0fm5rkp89xcadQo6aqrpD17go8ZNkxauFBq2NBoaSdxU+bRgszNI3N4DbdAu1xc
XJzTJXhOWlqa0yV4DpmbR+bmuSXzrVvtvX2XLg1+vHJl6eWXpYEDzdYVjFsyjyZkbh6Zw2vYBsnF
WGYdABBJPvtM6tdP2r07+PGEBHuLo9atjZYFAMbw/dx53AINAADC6uhR6YEHpD//OXTze+WV9vO+
NL8AgHDiFmgAABA2v/wi3XCD/TxvMOXKSY89Jt19t+Tnz/IAgDCjAQYAAGExf77d/P76a/DjZ5wh
vfWW1L692boAAN7F31pdLisry+kSPCc9Pd3pEjyHzM0jc/OiKfO8POmhh6Ru3UI3v927S99+G9nN
bzRl7hZkbh6Zw2tYBMvFkpOTFQgElJ2d7XQpnpKUlKQ1a9Y4XYankLl5ZG5etGS+c6e90NXcucGP
+/3SuHHSffdF/i3P0ZK5m5C5eWRuFotgOY9boF0uPj7e6RI8JyMjw+kSPIfMzSNz86Ih8y++sLc4
2r49+PH69aWZM6WuXc3WdaqiIXO3IXPzyBxewwywi/EXJgCACfn50pNPSvffb9/+HEznztKbb0qn
nWa0NACIKHw/d16E33wEAAAi2e7dUmqqdM89oZvfBx6wb4mm+QUAOI1boAEAwCn56ivp2mulzMzg
x2vXll5/Xbr8crN1AQAQCjPAAACgRCxLevZZqUOH0M1vu3bSd9/R/AIAIgsNsMtlhvpmgrBJSUlx
ugTPIXPzyNy8SMk8K0vq3VsaMULKzQ0+ZtQoacECqWFDs7WVtUjJ3EvI3Dwyh9dwCzQAACiWL76Q
brhB2rYt+PHYWGn6dPuZYAAAIhGrQLsYq8wBAMpCXp40frz00EP2is/BXHSR9M47UkKC0dIAIKrw
/dx5zAADAICQtm6V+vWTFi8OPWb4cHsbpEqVzNUFAMCpoAEGAABBffSRNHiw/dxvMLGx0pQp0l/+
YrYuAABOFYtgAQCAkxw5It1xh9SrV+jm99gqzzS/AIBoQgPscjk5OU6X4DmBQMDpEjyHzM0jc/NM
Zb52rdSmjZSeHvy4zyc9+KC0cKHUqJGRkhzDdW4emZtH5vAaGmCXYxsk81JZ/tQ4MjePzM0Ld+aW
Zd/OfMEF0n//G3zM6adL8+fbi2GV98BDVFzn5pG5eWQOr6EBNuS9997TnXfeqY4dO6pmzZry+/0a
MGBA0LFbtmyR3+8P+XPDDTcU+3Pj4uLK6ldAMaWlpTldgueQuXlkbl44M9+zR+rbV7rlFunw4eBj
UlKkVaukzp3DVkbE4To3j8zNI3N4DdsgGdK6dWt9//33qlatmho2bKgff/xRN954o1577bUCY7ds
2aLGjRurVatW6tWrV4Hj5557rnr37l3kZ7LMOgCgKMuXS9dfL23eHPx4xYrSU0/ZzwT7fEZLAwDX
4fu58zxwA1NkePbZZ9WwYUM1bdpUixYtUpcuXYp8T6tWrTRmzBgD1QEAvCY/X/rHP6QHHrD3+Q2m
eXPprbekVq3M1gYAQLjQABvSqVMnp0sAAECS9MsvUv/+0rx5occMHiw9/7xUtaq5ugAACDca4Ai2
fft2TZo0Sbt371bt2rXVtm1btWjRwumyAABR7NNPpYEDpf/9L/jx6tWliRPt26IBAHAbFsGKYHPn
ztWwYcP0wAMPaNiwYTrvvPPUtWtXbd26tdjnyAq1gSPCJj3U3iEIGzI3j8zNK23m2dnSyJHSFVeE
bn7btLH39qX5tXGdm0fm5pE5vIYGOALFxMRozJgxWrlypX777Tf99ttvWrRokbp27aqFCxeqW7du
Ohxqmc4/oAE2j/+QmEfm5pG5eaXJfPVqu7l95pnQY0aPlr78UmrS5JQ/xnW4zs0jc/PIHF5DAxyB
6tatq7Fjx6pVq1aqUaOGatSoofbt22vOnDn605/+pEAgoClTphTrXPHx8WGuFn+UkZHhdAmeQ+bm
kbl5p5J5fr707LPShRdK338ffEz9+tLnn0uPPy5VqFDKIl2G69w8MjePzOE1NMBRpFy5choyZIgs
y9LixYuL9Z5ff/1VlSpVUmJiopKSkk76adGihVJSUgq8JxAIKCkpKehfBNPT05WUlKRAIFDgWEpK
Cuf7P5FcnxvPl5iYGNH1ufF8iYmJEV2fG8+XmJhYovP9/LPUo4c0YoSUnZ0iqWB97dsHVKNGktav
j7zfNxLOl5iYGNH1ufF8xzKP1PrceL5jmUdqfdF6vmPnSEpKUoMGDY5/Hw8EAtyh6TD2AXbAsW2Q
+vXrF3Qf4MJkZGSoV69euvzyyzV79uxCx7LPGAB406xZ0m23Sb/9Fvx4hQrSo4/azwT7+VM4ABjD
93PnsQp0lFm2bJkkqQkPaQEA/mDfPmn4cKmwv62ec470xhtS69bm6gIAIFLwd98I9O233yrYxPy8
efP07LPPyufzqV+/fg5UBgCIVF98IZ13XuHN7/Dh0sqVNL8AAO+iATbko48+0qBBgzRo0CA9/vjj
kqSlS5cef23UqFHHx44cOVJnnnmmrr32Wo0cOVIjR45Ut27ddNlllyknJ0fjx4/XxRdfXKzPzczM
DMvvg9BCPReM8CFz88jcvFCZ5+RI990ndeokbd4c/L0NGkiffSY9/7xUpUr4anQbrnPzyNw8MofX
cAu0Id99991Jz/v6fD799NNP+umnnyRJCQkJevLJJyVJAwYM0AcffKAVK1bos88+U25ururXr6++
ffsqLS1N7dq1c+R3AABElh9/lG68Ufrmm9BjeveWJk2Satc2VxcAAJGKRbBcjIfsAcCdLEt68UVp
1Cgp1Lbw1arZM7433ST5fEbLAwCEwPdz5zEDDABAFNmxQxo8WPr009BjLrlEmjFDYr1EAABOxjPA
AABEiQ8/lFq0CN38li8vjR8vLVpE8wsAQDDMAAMAEOEOHJDuukuaOjX0mGbNpNdfly66yFxdAABE
G2aAXS4nJ8fpEjwnEAg4XYLnkLl5ZG7OsmVSq1bS1KmhMx861F4Ii+a3bHGdm0fm5pE5vIYG2OXY
Bsm81NRUp0vwHDI3j8zD78gR6Z57pPbtpY0bJalg5vXqSZ98Ir30klS1qvESXY/r3DwyN4/M4TXc
Au1ycXFxTpfgOWlpaU6X4Dlkbh6Zh9eKFdLAgdKaNb9/9eTMU1KkKVPsJhjhwXVuHpmbR+bwGrZB
cjGWWQeA6JKTIz38sPTYY1JeXvAxMTHSs89KQ4awvREARBu+nzuPGWAAACLAqlX2rO+qVaHHtGlj
L3R11lnm6gIAwE14BhgAAAfl5tqzvhdeGLr5rVBBeuQRackSml8AAEqDGWAAABzyww/2rO/KlaHH
tG4tTZ9u7/8LAABKhxlgl8vKynK6BM9JT093ugTPIXPzyLx08vKkJ56Qzj8/dPNbvrz0//6f9NVX
dvNL5uaRuXlkbh6Zw2tYBMvFkpOTFQgElJ2d7XQpnpKUlKQ1Jy/dijAjc/PI/NStWyfddJO0fHno
Meeea8/6nn/+idfI3DwyN4/MzSNzs1gEy3nMALtcfHy80yV4TkZGhtMleA6Zm0fmJZefLz3zjNSq
Vejm1++X7r3X3gbp982vROZOIHPzyNw8MofXMAPsYvyFCQAiw8aN0qBB0hdfhB5z9tn2rG+bNubq
AgCYxfdz5zEDDABAmOTnSy++KLVsGbr59fmku++WvvmG5hcAgHBjFWgAAMJg82bp5pul+fNDj0lM
lF59VWrXzlRVAAB4GzPAAACUofx86eWX7VnfwprfO++UvvuO5hcAAJNogF0uMzPT6RI8JyUlxekS
PIfMzSPz4Navl7p0kYYNk/bvDz4mIUFasEB67jmpatXin5vMzSNz88jcPDKH19AAAwBQSrm50uOP
27O+ixeHHjd0qPT991LnzsZKAwAAv8Mq0C7GKnMAEH7ffms/6/vtt6HHNGwoTZsmXXaZuboAAJGH
7+fOYwYYAIBTcPiwvWfvRRcV3vzefLO0ejXNLwAAkYBVoAEAKKHFi6UhQ6QNG0KPadJEmjRJuvRS
c3UBAIDCMQMMAEAx7dtnL3DVqVPo5tfvt/f1/e9/aX4BAIg0NMAul5OT43QJnhMIBJwuwXPI3Dwv
Zv6vf0nJyfYWR6G0aCEtXy499ZQUE1O2n+/FzJ1G5uaRuXlkDq+hAXY5tkEyLzU11ekSPIfMzfNS
5v/7n3TDDdKVV0rbtgUfU6GC9NBD0ooV9jPB4eClzCMFmZtH5uaRObyGZ4BdLi4uzukSPCctLc3p
EjyHzM3zQuaWJc2cKf31r9Lu3aHHtW0rTZkiJSWFtx4vZB5pyNw8MjePzOE1bIPkYiyzDgCnZutW
e8/e2bNDj6laVXr0USktTSpXzlxtAIDoxfdz5zEDDADA/8nPt5/xHT1aOnAg9Lju3aWJE6WEBGOl
AQCAMkADDACApB9+kG67TVqyJPSYWrWkZ56RBgyQfD5ztQEAgLLBIlgAAE87dEi6916pVavCm98+
faS1a6WBA2l+AQCIVjTALpeVleV0CZ6Tnp7udAmeQ+bmuSXz2bPtrY0ef1w6ejT4mAYNpA8+kN55
R6pf32x9v+eWzKMJmZtH5uaRObyGBtjlaIDN4z8k5pG5edGe+c8/2zO6PXtKmzeHHnfLLdKaNVKv
XsZKCynaM49GZG4emZtH5vAangF2ufj4eKdL8JyMjAynS/AcMjcvWjPPy5PS06UHHpD27w897qyz
7MWwunY1V1tRojXzaEbm5pG5eWQOr2EbJBdjmXUAOGHFCntro5UrQ4+pWNF+Hviee6TKlc3VBgDw
Br6fO48ZYACAq+3dKz34oD3zm58felzXrtKLL0rNm5urDQAAmEUDDABwJcuS3n1X+utfpV9+CT2u
bl3pn/+UbryR1Z0BAHA7FsECALjOpk32AlfXXlt483vrrdK6dVK/fjS/AAB4AQ2wy2VmZjpdguek
pKQ4XYLnkLl5kZp5To702GP21kaffhp6XIsW9p6/EydKtWqZq680IjVzNyNz88jcPDKH13ALNADA
Fb74wl7kas2a0GNiYqSxY6W77pIqVDBWGgAAiBCsAu1irDIHwAt27ZJGj5amTSt8XEqK9MILUqNG
ZuoCAOCP+H7uPG6BBgBEpbw8e9XmZs0Kb34bNpQ++ED66COaXwAAvI5boAEAUefLL6U77pBWrQo9
xu+3V4AeN06qXt1cbQAAIHLRAAMAosb27dLf/y698Ubh49q0kV5+WWrd2kxdAAAgOnALtMvl5OQ4
XYLnBAIBp0vwHDI3z3TmOTnSk09KzZsX3vzWrGnfFr10qfuaX65z88jcPDI3j8zhNTTALsc2SOal
pqY6XYLnkLl5JjP//HOpZUt75vfAgdDjBg6UfvxRGjZMKlfOWHnGcJ2bR+bmkbl5ZA6v4RZol4uL
i3O6BM9JS0tzugTPIXPzTGS+ebM0cqS9gFVhzj9fmjBBats27CU5iuvcPDI3j8zNI3N4DdsguRjL
rAOIRocPS//4h/T449KRI6HH1a4tPfqodPPN7pzxBQC4D9/PnccMMAAgIliW9OGH9qzv5s2hx/n9
0tCh0sMPS9zkAgAASoIGGADguHXrpDvvtJ/3LUy7dvbtzq1amakLAAC4C4tgGfLee+/pzjvvVMeO
HVWzZk35/X4NGDCg0PcsXbpUV1xxhWrXrq2YmBidd955eu6555Sfn2+oagAIr/377cWtWrQovPk9
7TRpxgzpiy9ofgEAwKmjATZk/PjxSk9P16pVq9SwYUP5fL5Cx3/00Ufq1KmTvvzyS/Xu3VvDhw9X
bm6uRowYoeuvv77Yn5uVlVXa0lFC6enpTpfgOWRuXmkztyx7O6Pmze3tjXJzg48rX17629/sGeJ+
/aQi/tXpalzn5pG5eWRuHpnDa1gEy5BFixapYcOGatq0qRYtWqQuXbqoX79+eu211wqM3b9/v5o2
bar9+/dr6dKlav1/m1nm5OSoS5cuWr58ud58801de+21hX5mcnKyAoGAsrOzw/I7IbikpCStWbPG
6TI8hczNK03mS5faz/l+9VXh4y67THr+eenss0/pY1yH69w8MjePzM0jc7NYBMt5zAAb0qlTJzVt
2rRYY2fNmqVdu3bp+uuvP978SlLFihU1fvx4WZall156qVjnio+PP6V6ceoyMjKcLsFzyNy8U8l8
82bpuuvs53gLa34bNZLef1+aM4fm9/e4zs0jc/PI3Dwyh9ewCFYEWrBggXw+n3r06FHgWMeOHRUT
E6OlS5cqNzdXFSpUKPRcFStWDFeZCCExMdHpEjyHzM0rSeb79kmPPSY984xU2A0plStLo0fbzwTH
xJRBkS7DdW4emZtH5uaRObyGGeAItG7dOklSs2bNChwrV66cGjdurKNHj2rTpk2mSwOAYjt6VJo4
UUpMtPf0Laz57dVLWrNGGjuW5hcAAIQPM8ARaO/evZKkmjVrBj1+7PU9e/YYqwkASmLuXPs539Wr
Cx/XooX09NP2874AAADhxgwwAKDMrF0r9ewpde9eePNbr540aZL07bc0vwAAwE/olowAACAASURB
VBwa4Ah0bIb32EzwHx17PTY2tshzrVu3TpUqVVJiYqKSkpJO+mnRooVSUlIKvCcQCCgpKSnosvjp
6elKSkpSIBAocCwlJYXzSeratWtE1+fG8x07V6TW58bzpaSknHS+XbukO+6wZ3Rnz5aklP/7OVnF
igHVrp2kv/89XbfcIpUrFx2/byScLyUlJaLrc+P5jp0jUutz4/mOjYvU+tx4vmP/d6TWF63nO3aO
pKQkNWjQ4Pj38UAgwDalDmMbJAcUtQ1S//79NXPmTM2cOVPXXXfdScfy8vJUs2ZN5ebm6sCBA4Uu
gpWcnKzMzEzt37+/zH8HhJaSkqKPP/7Y6TI8hczNO5Z5drY0YYL08MNSiL/ZHde3r/0scKNGZmp0
G65z88jcPDI3j8zNYhsk5zEDHIG6du0qy7L02WefFTi2aNEiHTp0SO3atStyBWiJbZCcwH9EzCNz
8zIyPtb770vJydLf/lZ483vxxfbev2++SfNbGlzn5pG5eWRuHpnDa2iAI9A111yjOnXq6K233tLK
lSuPv56dna0HHnhAPp9Pw4YNc7BCAF62cqXUubP0l79IGzeGHhcfbze9S5dKbdsaKw8AACAkVoE2
5KOPPtKHH34oSdqxY4ckaenSpRo0aJAkqU6dOnryySclSdWrV9fkyZPVp08fde7cWX379lVcXJwy
MjK0fv169enTR3369HHmFwHgWZs3Sw8+KL3+euHjqlWT7rtPuusuqUoVI6UBAAAUCw2wId99991J
z/v6fD799NNP+umnnyRJCQkJxxtgSbrqqqu0aNEiPfLII3r//fd15MgRJSYm6plnntHw4cON1w/A
u/73P+mRR6SXXpJyckKP8/ulm2+2nweuX99cfQAAAMXFIlgulpycrJycHG3YsMHpUjwlEAgoMTHR
6TI8hczD48AB6ZlnpCeflAqupReQdCLzSy+V/vlPqWVLkxV6C9e5eWRuHpmbR+ZmsQiW83gG2OUy
MzOdLsFzUlNTnS7Bc8i8bOXkSOnpUmKiNGZMsOZXkuzMmzeXPvlEmjuX5jfcuM7NI3PzyNw8MofX
cAu0y8XFxTldguekpaU5XYLnkHnZyM+X3nlHeuCBwhe3kqSqVdP02GPS0KFSMRakRxngOjePzM0j
c/PIHF7DLdAuxi0WAIpr7lzpnnukb74pfFzVqtLdd9s/NWqYqQ0AALfg+7nzmAEGAA9bscJufOfN
K3xc+fLSbbfZq0CzwBUAAIhWNMAA4EGBgH2r89tvFz22b197ZWfWSAEAANGOBhgAPGTHDruZnTRJ
Onq08LHdu0uPPSadf76Z2gAAAMKNVaBdLisry+kSPCc9Pd3pEjyHzIu2b599+3LTptKLLxbe/F5w
gfTvf0tz5oRufsncPDI3j8zNI3PzyBxewyJYLpacnKxAIKDs7GynS/GUpKQkrVmzxukyPIXMQzt4
0G54n3hC2r278LGJidIjj0jXXCP5i/jzKJmbR+bmkbl5ZG4emZvFIljO4xZol4uPj3e6BM/JyMhw
ugTPIfOCDh+WJk60b2HeubPwsfXrS2PHSjffXPwtjcjcPDI3j8zNI3PzyBxewwywi/EXJsB7srOl
yZOlRx+Vfvml8LHVq0ujR0t33WVvbwQAAMKL7+fOYwYYAFwgJ0d65RVp/Hhp27bCx1asKKWlSffd
J9WpY6Y+AACASEADDABRLDdXmjHDXtl58+bCx/r9Uv/+0rhxUqNGRsoDAACIKDTAABCF8vKkmTPt
ZnbjxsLH+nzS9ddL/+//Sc2amakPAAAgErENkstlZmY6XYLnpKSkOF2C53gp8/x86a23pORkacCA
opvfPn2k1aulN94o2+bXS5lHCjI3j8zNI3PzyBxewwwwAESB/Hzp/fft1ZqLs25Gr1727HDLlmEv
DQAAIGqwCrSLscocEP0sS8rIsG9fXrWq6PFXXmk3yRdcEPbSAABACfH93HnMAANABLIsafZsu/Fd
ubLo8T162DO+f/pT+GsDAACIVjTAABBB8vLsW50ffVT67ruix3ftaje+7duHvzYAAIBoRwMMABEg
N1d680278V23rujx7dvbWx917hz20gAAAFyDVaBdLicnx+kSPCcQCDhdgudEc+ZHjkgvv2yv0Dxw
YNHN78UXS3PnSosXO9v8RnPm0YrMzSNz88jcPDKH19AAuxzbIJmXmprqdAmeE42ZHzwoPfOM1KSJ
NGyYtHlz4eMvvNB+JnjpUqlbN3tvXydFY+bRjszNI3PzyNw8MofXcAu0y8XFxTldguekpaU5XYLn
RFPme/dKEyZIzz4r7dpV9PiLL5buv1/q2dP5pvf3oilztyBz88jcPDI3j8zhNWyD5GIssw5Ejl27
7KZ3wgS7CS5K165249ulS2Q1vgAA4NTx/dx5zAADQBht3y499ZQ0caJ06FDR43v2tBvftm3DXxsA
AIDX0AADQBhs3iw98YQ0bZpU1Fp0Pp90zTXSffdJrVoZKQ8AAMCTaIABoAz98IP05JPS66/be/oW
plw56cYbpXvvlc4+20x9AAAAXsYq0C6XlZXldAmek56e7nQJnuN05pYlLVhg37587rnS9OmFN78V
K0q33SZt2GCPjcbm1+nMvYjMzSNz88jcPDKH17AIloslJycrEAgoOzvb6VI8JSkpSWvWrHG6DE9x
KvOjR6V337Wf8V25sujxVapIQ4dKd98tnXFG+OsLJ65z88jcPDI3j8zNI3OzWATLedwC7XLx8fFO
l+A5GRkZTpfgOaYz379fmjrVXtV5y5aix9eoId1xh3TXXVLduuGvzwSuc/PI3DwyN4/MzSNzeA0z
wC7GX5iAsrV9u/TCC9LLL0t79hQ9vnZtu+m94w4pNjb89QEAgMjG93PnMQMMAEX44Qfp6aftha1y
c4se37ixNGKENHiwVLVq+OsDAABA8dAAA0AQxxa2euop6dNPi/eeiy6SRo2Srr5aKs+/XQEAACIO
X9EA4HeOHpVmzbIb32++Kd57UlKkv/1N6tDB3tMXAAAAkYltkFwuMzPT6RI8JyUlxekSPKcsMt+/
317UqmlT6YYbim5+K1WSbrlFWrtWysiQOnb0VvPLdW4emZtH5uaRuXlkDq9hBhiApwUC0oQJ0iuv
SPv2FT0+Lk66/XZ7Yav69cNfHwAAAMoOq0C7GKvMAcHl50uff26v6Dx7dvHe06SJNHKkdNNNLGwF
AABODd/PnccMMADP2LdPmj7dnvFdv75472nT5sTCVuXKhbc+AAAAhBcNMADXW7fObnpffVU6cKB4
70lNtRvfdu289WwvAACAm9EAA3Cl/Hx7+6IXXpDmzCnee6pUkfr3t291bt48vPUBAADAPFaBdrmc
nBynS/CcQCDgdAme8/vM9+yRnnlGatZMuvLK4jW/jRvb2x79/LM0cSLNb3FwnZtH5uaRuXlkbh6Z
w2togF2ObZDMS01NdboEz0lNTdWaNfbqzA0b2jO4GzcW/b5u3aSPPpI2bJDuvluqVSv8tboF17l5
ZG4emZtH5uaRObyGW6BdLi4uzukSPCctLc3pEjwjL0/617+kvLw0/d+iikWqWlUaONDexuicc8Jb
n5txnZtH5uaRuXlkbh6Zw2vYBsnFWGYdbrVtmzR1qv2zdWvx3tO0qd30Dhok1awZ3voAAACC4fu5
85gBBhAVjh6VPvtMmjTJnvXNzy/e+y6/XBo+3P5fPw99AAAAeBoNMICItnXridnebduK957q1aWb
bpLS0ljQCgAAACfQAAOIOEePSrNn27O9n35a/Nne5s3t25wHDJBq1AhvjQAAAIg+3BDocllZWU6X
4Dnp6elOlxC1MjOlMWOkRo2kq64q3q3Ofr907rnp+uwzac0auwGm+Q0/rnPzyNw8MjePzM0jc3gN
DbDL0QCbx39ISuboUXsrop49pYQE6eGHpe3bi35ffLz00EN205yXl64ePXjG1ySuc/PI3DwyN4/M
zSNzeA23QLtcfHy80yV4TkZGhtMlRIXNm+3neqdNK17DK0nlyklXXindeqvUo4f9zxKZO4HMzSNz
88jcPDI3j8zhNWyDFMESEhKUmZkZ9Nhpp52m7UV0DSyzjkhz+LA92/vqq9Lnn0vF/bdPfLx0yy32
FkZnnBHWEgEAAMKG7+fOYwY4gvl8PsXGxmrEiBH6498pqlWr5lBVQMlYlrR0qTR9uvTOO9LevcV7
X7lyUkqKdNtt0mWXnZjtBQAAAE4VDXCEi42N1YMPPuh0GUCJbdkizZghvfaatGFD8d/XqNGJ2d7T
Tw9ffQAAAPAeGmAAZebAAem99+zZ3gULiv++cuXsVZ9vvVXq1o3ZXgAAAIQHa6ZGuOzsbL3xxht6
7LHH9Pzzz2vhwoXKL+6mqFLIZ4gRPikpKU6XYFR+vt3s3nSTdNpp9v8Wt/lNSJAeeUTautVunH+/
sFVJeC3zSEDm5pG5eWRuHpmbR+bwGmaAI9yOHTs0YMCA4/9sWZYaN26sV155RR07dnSwMnhdIGDP
9M6YYd/uXFxVq0rXXCMNHCh16sTWRQAAADCHVaAj2MMPP6wOHTooOTlZ1atX16ZNmzRhwgRNnDhR
VapU0bJly9SiRYuQ72eVOZS1vXvthaymT5eWLCnZe7t2tZve3r0l1nADAABexPdz59EAR6FRo0bp
6aef1tVXX6333nsv5Dj+Hwxl4cgR6dNPpbfekjIy7H8ursREu+nt399e3AoAAMDL+H7uPG4+jEJD
hw6VJC1evLjIsVu3blWlSpWUmJiopKSkk35atGgR9LmPQCCgpKQkpaenFziWnp6upKQkBQKBAsdS
UlI4n0vOt3ZtQJ9+ajev9erZs7bvvJOiI0eCPScUkJQkyT5fzZr2YlZLlkh//Wu6Zs5MUm5uZP++
nI/zcT7Ox/k4H+fjfGV5vmPnSEpKUoMGDY5/Hw8EAsrKyirwfpjDDHAU2rdvn2JjY1W5cmUdOnQo
5Dj+woSSyMuTFi+2Z3rffVcqyb+b/X57AauBA6XUVKlKlfDVCQAAEK34fu48ZoCj0LJlyyRJTZo0
KXJsTk5OuMvBHwT7a2Gksixp2TLpr3+VGja0n9OdNKn4zW9ysvSPf9irOM+eLV13nTPNbzRl7hZk
bh6Zm0fm5pG5eWQOr6EBjlA//vhj0NndzZs364477pDP51P//v2LPA/bIJmXmprqdAmFsizp22+l
0aOlxo2lSy6Rnn9e2rGjeO+vXVsaPlxasUL673+lUaOk008Pb81FifTM3YjMzSNz88jcPDI3j8zh
NWyDFKHefvttPf300+rYsaMaNWqk6tWra+PGjfrXv/6l7Oxs9ezZU3fffXeR54mLizNQLX4vLS3N
6RKCWrvWvr35rbek9etL9t4aNaSrr5b69pUuvVSqUCE8NZ6qSM3czcjcPDI3j8zNI3PzyBxewzPA
EWrx4sWaOHGivv32W+3YsUMHDx5UbGysWrVqpQEDBujGG28s8hw8Y4B166T335fefltatapk761S
xX6et29f6fLLpcqVw1MjAACAV/D93HnMAEeojh07qmPHjk6XgShjWdLKldIHH9g/a9eW7P0VK0p/
/rPd9F55Jfv1AgAAwF1ogIEod/So9MUXdsP74Yf2glQlUa6c1K2b3fT26iXFxoanTgAAAMBpNMBA
FDp8WJo71256P/5Y2r27ZO/3+aQOHaTrr5f+8hepbt3w1AkAAABEElaBdjk22jYv2IbqZWHPHumN
N6RrrrEb1quukl59tWTNb5s20j//KWVmSosWSUOHuqP5DVfmCI3MzSNz88jcPDI3j8zhNSyC5WLJ
yckKBALKzs52uhRPSUpK0po1a8rkXL/8In30kT3Tu2CBlJtb8nNcfLG9gvM110jF2Do6KpVl5ige
MjePzM0jc/PI3DwyN4tFsJzHLdAuFx8f73QJnpORkXHK77UsafVqafZsu/Fdvtx+rSTKl5e6dLGb
3quucn6PXhNKkzlODZmbR+bmkbl5ZG4emcNrmAF2Mf7CFB0OHJDmzbOb3tmzpW3bSn6OKlXsrYp6
95Z69pRq1Sr7OgEAAFA6fD93HjPAgGGWJa1ff6LhXbxYyskp+Xlq1ZJSUuyZ3u7dpZiYsq8VAAAA
cBMaYMCAw4elhQtPNL2bNp3aec44w96q6OqrpY4dpQoVyrRMAAAAwNVogIEw+emnEw3v/PnSkSOn
dp7mze2G9+qrpQsvlPys3Q4AAACcEr5Ku1xmZqbTJXjGkSP2s7xNm6bonHPsFZfvuMNugEvS/B5b
xOrJJ6U1a6Qff5Qee8zewojmN7iUlBSnS/AcMjePzM0jc/PI3Dwyh9cwAwycorw8aeVKu+mdN09a
suTUZ3kbNJCuuML+6dZNqlGjbGsFAAAAwCrQrsYqc2XLsuwZ2fnz7YZ34UJp795TO5ffL7Vte6Lp
Pe88yecr03IBAAAQYfh+7jxmgIFCbNlyYoZ3/nxpx45TP1edOtKf/2w3vN27S3FxZVcnAAAAgKLR
AAO/s2vXiRneefOkjRtLd76LLjoxy8sCVgAAAICzaIDhaVlZ0tKl0oIFdsO7alXpzlevntS1qz3T
26OHVL9+2dQJAAAAoPRogF0uJyfH6RIihmXZtzR/+eWJn9I+flG9utSpk3TppfbPuedKGzcGlJiY
WDZFo1gCATI3jczNI3PzyNw8MjePzOE1LILlYsnJyQoEAsrOzna6FEfk5Unff3+i2V2yRPr559Kd
s2JF6ZJLTjS8F11kb1v0e0lJSVqzZk3pPgglQubmkbl5ZG4emZtH5uaRuVksguU8ZoBdLs5DKy0d
PCh9/fWJhnfZMmn//tKd0+eTLrjgRMPbrp0UE1P4e9LS0kr3oSgxMjePzM0jc/PI3DwyN4/M4TXM
ALuY2//C9Ouv9qzusYb322+lo0dLf95zzrGb3a5dpc6dpVq1Sn9OAAAAwO3fz6MBM8CICvv2SStX
Sv/5z4mfLVvK5txnn23P7HbubDe9p59eNucFAAAAEFlogBFxDh+WvvvuRKO7YoW0bp29iFVpVahg
39Lcvr39c8klUt26pT8vAAAAgMhHAwxH5ebaKzH/fmZ39eqyuZVZkmrUsJvcYw3vRRcV/QwvAAAA
AHfyO10AwisrK8vpEo47elRas0aaMUO6806pbVu7QW3dWrr1VmnyZHvmtzTNb8OG0vXXS+np9p6+
WVnSp59K999vb1dkovlNT08P/4fgJGRuHpmbR+bmkbl5ZG4emcNrWATLxZzcBmnnTnsLot//rFkj
lWUpfr+UnHxidrd9eyk+vuzOf6rYTsA8MjePzM0jc/PI3DwyN4/MzWIRLOdxC7TLxYe5IzxyRFq7
tmCzu3Nn2X9WYqJ04YX2bcwXXWTPHFerVvafU1oZGRlOl+A5ZG4emZtH5uaRuXlkbh6Zw2uYAXax
svwLk2VJmZnSf/97cqO7fr2Ul1fq0xfQsOHJze6FF7IdEQAAAKIbM8DOYwYYJzl8WAoE7FWX162z
G9x166Qff5T27g3PZ9aufaLRPdbsNmgQns8CAAAA4F00wB6Uny9t23Zyg3vsJzOzbLYbCqV2ball
y5Nndxs1kny+8H0mAAAAAEg0wK53+LD0+usnz+iuX2+/Hk4VKkhJSXaz26KF/b8tW0qnnUazCwAA
AMAZbIPkcj/9lKn+/aXx46VZs+ytgcq6+W3YULriCumee6Q33rCfEz540N7S6LXXpFGjpB497Nua
vdD8pqSkOF2C55C5eWRuHpmbR+bmkbl5ZA6vYQYYxRYTc2I29/f/GxfndGUAAAAAUDRWgXax5ORk
2du6lWyVuTPPlJo1k5o3P/HTrJn9rK6fewYAAACAU8Iq0M5jBtijqlcv2OA2by6ddZZUtarT1QEA
AABA2aMBdrmKFaXLLivY7LIYFQAAAACvoQF2ucRE6ZNPnK4CAAAAAJzHE50ul5OT43QJnhMIBJwu
wXPI3DwyN4/MzSNz88jcPDKH19AAu1xmZqbTJXhOamqq0yV4DpmbR+bmkbl5ZG4emZtH5vAaGmCX
i2OPIuPS0tKcLsFzyNw8MjePzM0jc/PI3Dwyh9ewDZKLscw6AAAAEDn4fu48ZoABAAAAAJ5AAwwA
AAAA8AQaYAAAAACAJ9AAu1xWVpbTJXhOenq60yV4DpmbR+bmkbl5ZG4emZtH5vAaFsFyseTkZAUC
AWVnZztdiqckJSVpzZo1TpfhKWRuHpmbR+bmkbl5ZG4emZvFIljOYwbY5eLj450uwXMyMjKcLsFz
yNw8MjePzM0jc/PI3Dwyh9cwA+xi/IUJAAAAiBx8P3ceM8AAAAAAAE+gAQYAAAAAeAINMAAAAADA
E2iAI9zPP/+swYMH64wzzlDlypXVuHFjjRgxQnv27CnW+zMzM8NcIf4oJSXF6RI8h8zNI3PzyNw8
MjePzM0jc3hNeacLQGibNm1S27ZttWvXLvXq1UvNmzfX119/reeee05z5szRkiVLVKtWrULPkZOT
Y6haHLN582anS/AcMjePzM0jc/PI3DwyN4/MzcvKynK6BE9jBjiCDRs2TLt27dILL7yg9957T48+
+qj+/e9/a8SIEfrxxx91//33O10igsjLy3O6BM8hc/PI3DwyN4/MzSNz88jcPBpgZ9EAR6hNmzZp
7ty5SkhI0O23337SsXHjxqlq1aqaMWOGDh8+7FCFAAAAABBdaIAj1IIFCyRJ3bt3L3CsWrVqateu
nQ4dOqTly5ebLg0AAAAAohINcIRat26dfD6fmjVrFvT4WWedJUlav369ybIAAAAAIGrRAEeovXv3
SpJq1qwZ9Pix14taDdqyrLItDEVi4THzyNw8MjePzM0jc/PI3Dwyh9ewCrSLZWZmKjc3V8nJyU6X
4ikbN24kc8PI3DwyN4/MzSNz88jcPDI3a+PGjcrNzXW6DE+jAY5Qx2Z4j80E/9Gx12NjY0Oeo2rV
qjp8+LACgUDQ43FxcYqLiytlpfij0047zekSPIfMzSNz88jcPDI3j8zNI/PwyMrKCrrac25uripW
rOhARTiGBjhCNW/eXJZlhXzGd8OGDZIU8hlhSdqxY0dYagMAAACAaOSzeEg0Im3atEmJiYlq3Lix
Nm7ceNKxAwcOqEGDBpKknTt3qkqVKk6UCAAAAABRhUWwIlSTJk3UvXt3bd68WRMmTDjp2JgxY3Tw
4EENGDCA5hcAAAAAiokZ4Ai2adMmtWvXTjt37lRqaqrOOeccLV++XAsXLtTZZ5+tJUuWqFatWk6X
CQAAAABRgQY4wv38888aM2aMPvvsM+3evVsNGjRQ7969NWbMmJBbJAEAAAAACqIBBgAAAAB4As8A
AwAAAAA8gQbYhX7++WcNHjxYZ5xxhipXrqzGjRtrxIgR2rNnj9OluVJCQoL8fn/Qn9NPP93p8qLW
e++9pzvvvFMdO3ZUzZo15ff7NWDAgELfs3TpUl1xxRWqXbu2YmJidN555+m5555Tfn6+oaqjW0ky
37JlS8jr3u/364YbbjBcfXTKysrSlClT1Lt3b5111lmKiYlRbGysOnTooGnTpinUTVpc66eupJlz
rZeN0aNHq1u3boqPj1dMTIxq166t888/Xw899FDQvVIlrvPSKknmXOfh8frrrx/PcNq0aUHHcJ2b
xy3QLrNp0ya1bdtWu3btUq9evdS8eXN9/fXXmj9/PgtnhUnjxo21d+9ejRgxosAXp2rVqmnkyJEO
VRbdWrdure+//17VqlVTw4YN9eOPP+rGG2/Ua6+9FnT8Rx99pGuuuUZVqlTRddddp7i4OH388cf6
8ccf1adPH7399tuGf4PoU5LMt2zZosaNG6tVq1bq1atXgePnnnuuevfubaLsqDZx4kQNGzZMp59+
urp06aL4+Hj9+uuvev/997Vnzx5dc801euedd056D9d66ZQ0c671slGpUiVdcMEFSkpKUr169XTw
4EEtX75c//nPf3TGGWdo+fLlOuOMM46P5zovvZJkznVe9rZu3aqWLVsqPz9fBw4c0OTJkzV48OCT
xnCdO8SCq3Tv3t3y+/1Wenr6Sa+PHDnS8vl81rBhwxyqzL0SEhKsxo0bO12G6yxcuNAKBALH/2+f
z2f1798/6Nh9+/ZZdevWtSpXrmx98803x1/Pzs62LrnkEsvv91tvv/22kbqjWUky37x5s+Xz+axB
gwaZLNF1FixYYH3yyScFXv/111+t+Ph4y+/3W++///7x17nWS6+kmXOtl43s7Oygr99///2Wz+ez
0tLSjr/GdV42SpI513nZu/TSS63ExETr73//u+X3+62pU6eedJzr3DncAu0imzZt0ty5c5WQkKDb
b7/9pGPjxo1T1apVNWPGDB0+fNihCoHi69Spk5o2bVqssbNmzdKuXbt0/fXXq3Xr1sdfr1ixosaP
Hy/LsvTSSy+Fq1TXKEnmKBudO3dWz549C7xer149DR06VJZlaeHChcdf51ovvZJmjrJRsWLFoK9f
e+21kqQNGzYcf43rvGyUJHOUreeee04LFy7UK6+8opiYmKBjuM6dU97pAlB2FixYIEnq3r17gWPV
qlVTu3btNHfuXC1fvlxdunQxXZ6rZWdn64033lBmZqaqVq2qli1bqmPHjvL7+RuTCQsWLJDP51OP
Hj0KHOvYsaNiYmK0dOlS5ebmqkKFCg5U6F7bt2/XpEmTtHv3btWuXVtt27ZVixYtnC7LFY5dq+XL
n/hPNdd6eAXL/Biu9fDIyMiQJJ133nnHX+M6D69gmR/DdV56a9eu1b333qu77rpL7du317x584KO
4zp3Dg2wi6xbt04+n0/NmjULevyss87S3LlztX79ehrgMrZjx46TFguyLEuNGzfWK6+8oo4dOzpY
mTesW7dOkoJe++XKlVPjxo21Zs0abdq0Sc2bNzddnqvNnTtXc+fOPf7PlmWpc+fOmj59us4880wH
K4tueXl5mj59unw+ny6//PLjr3Oth0+ozI/hWi8bTz31lA4ePKi9e/dqAWrERQAADqpJREFUxYoV
+vLLL9WqVSuNHj36+Biu87JVnMyP4Tovnby8PPXv318JCQl65JFHCh3Lde4cGmAX2bt3rySpZs2a
QY8fe53VoMvW4MGD1aFDByUnJ6t69eratGmTJkyYoIkTJ+qKK67QsmXL+OtpmHHtmxcTE6MxY8ao
V69eatKkiSTp+++/19ixYzV//nx169ZN3333napUqeJwpdFp9OjR+uGHH3TllVfqsssuO/4613r4
hMqca71sPf3009q5c+fxf/7zn/+sV199VbVr1z7+Gtd52SpO5lznZWPcuHFatWqVlixZokqVKhU6
luvcOdyfCZTSgw8+qM6dO6tu3bqqXLmykpKS9OKLL2rkyJE6dOiQxo4d63SJQJmrW7euxo4dq1at
WqlGjRqqUaOG2rdvrzlz5uhPf/qTAoGApkyZ4nSZUen555/XP//5TyUlJYVc9Rxlq7DMudbL1i+/
/KK8vDzt2LFD77//vjZu3KhWrVrpu+++c7o01ypO5lznpffVV1/pscce09/+9je1adPG6XJQCBpg
Fzn2l6Jjf1H6o2Ovx8bGGqvJy4YOHSpJWrx4scOVuB/XfuQoV66chgwZIsuyuPZPwYQJE3TXXXfp
3HPP1fz58wtcs1zrZa+ozEPhWi+dunXr6qqrrtLnn3+u3bt3n/QYEdd5eBSWeShc58WTl5enAQMG
qHnz5nrooYdOOmaF2HGW69w5NMAu0rx5c1mWpfXr1wc9fmy1v1DPCKNs1a1bV5J08OBBhytxv2PP
xgS79vPy8vTTTz+pfPnyx2/rQnhx7Z+aZ599Vnfeeadatmyp+fPnq169egXGcK2XreJkXhiu9dKL
j49XUlKSfvjhB2VlZUniOg+3YJkXhuu8aAcOHNCGDRu0du1aVapUSX6///jPsYZ4yJAh8vv9Gjly
pCSucyfRALvIsYWtPv/88wLHDhw4oCVLligmJkYXX3yx6dI8admyZZLEv7gM6Nq1qyzL0meffVbg
2KJFi3To0CG1a9eOVRQN4dovuSeeeEIjR47U+eefrwULFqhOnTpBx3Gtl53iZl4YrvWysX37dkn2
bKPEdW7CHzMvDNd50SpVqqQhQ4bo5ptv1pAhQ076Of/88yVJHTp00JAhQ9S2bVtJXOeOMrzvMMKs
R48elt/vt1544YWTXh8xYoTl8/ms22+/3aHK3Gnt2rXWwYMHC7z+008/WYmJiZbf77cef/xxBypz
l4ULF1o+n8/q379/0OO/30x+xYoVx18/cuSI1bZtW8vv91vvvPOOqXJdoajMv/nmGys/P7/A6//+
97+typUrW36/31q2bFm4y3SFhx56yPL5fFabNm2s3377rdCxXOtloySZc62X3vr16629e/cWeD0/
P9+67777LJ/PZ3Xo0OH461znpVfSzLnOw2fs2LGW3++3pk6detLrXOfO8VlWiBvTEZU2bdqkdu3a
aefOnUpNTdU555yj5cuXa+HChTr77LO1ZMkS1apVy+kyXWPcuHF6+umn1bFjRzVq1EjVq1fXxo0b
9a9//UvZ2dnq2bOn3n///aB7SqJwH330kT788ENJ9jZTc+bMUZMmTdShQwdJUp06dfTkk0+eNL5P
nz6qVKmS+vbtq7i4OGVkZGj9+vXq06eP3nrrLUd+j2hSksy7dOmiDRs26JJLLlHDhg0l2SuGzp8/
Xz6fT+PHj9e9997rzC8SRaZPn65BgwapfPnyuuOOO4KuBpqQkKCBAwce/2eu9dIpaeZc66X33HPP
6d5771X79u3VuHFj1a5dW7/++qsWLVqkTZs26fTTT9e///1vnX322cffw3VeOiXNnOs8fMaNG6dx
48ZpypQpGjx48EnHuM4d4nQHjrK3bds2a/Dgwdbpp59uVapUyUpISLBGjhxp7dmzx+nSXGfRokXW
DTfcYJ1zzjlWrVq1rIoVK1r16tWzunfvbr3++utOlxfVjv3FNNRPkyZNCrxn6dKlVs+ePa24uDgr
JibGatmypfXcc88F/as2CipJ5tOmTbNSUlKsxo0bW9WrV7cqV65sNWrUyLr++uutL7/80sHfIroU
lbnf77e6dOlS4H1c66eupJlzrZfe6tWrreHDh1utW7e26tata1WoUMGKjY212rRpYz300EMhZ+G5
zk9dSTPnOg+fUDPAx3Cdm8cMMAAAAADAE1gECwAAAADgCTTAAAAAAABPoAEGAAAAAHgCDTAAAAAA
wBNogAEAAAAAnkADDAAAAADwBBpgAAAAAIAn0AADAAAAADyBBhgAAAAA4Ak0wAAAAAAAT6ABBgAA
AAB4Ag0wAAAukZeXJ7/fr+7duztdCgAAEYkGGACAP5g2bZr8fr969uwZckzPnj3l9/v18ssvF3qu
Bx54QH6/X/fcc0+Rn3vrrbfK7/frueeeK3HNAACgaD7LsiyniwAAINL06tVLH3/8sSZMmKBhw4ad
dOyll15SWlqaevbsqY8//rjQ82zevFlNmzZV/fr1tXXrVpUrVy7ouEOHDqlBgwbKzc3V9u3bFRsb
W+Ka8/LyVKFCBXXr1k2ff/55id8PAIDbMQMMAEAQkydPVp06dfT3v/9dGzZsOP76+vXrNWrUKNWt
W1dTp04t8jwJCQnq1q2bfv3110Kb5TfffFP79+/XNddcc0rNLwAAKBoNMAAAQdStW1eTJ0/WwYMH
1a9fP+Xn5ysvL0/9+vXT4cOHNWnSJNWrV69Y57r11ltlWZYmT54ccszkyZPl8/l0yy23HH9t7969
+sc//qGuXbuqYcOGqlSpkurXr6+rr75aX3/9dbF/l379+snv92v79u0Fjs2bN09+v1+PPvpogWNZ
WVkaPXq0zjnnHFWpUkW1atVS9+7dNW/evGJ/NgAAkaS80wUAABCpUlNTNXjwYL3yyisaN26cLMvS
ihUrNHjwYF111VXFPs9VV12levXq6fPPP9e2bdvUsGHDk47/8MMP+vrrr3X22WerQ4cOx19fvXq1
xowZo06dOik1NVWxsbHasmWLMjIyNHv2bH366afq2rVrkZ/v8/nk8/mK/4vLvnW7c+fO2rp1qzp2
7KiePXtq//79+uSTT9S9e3dNmzZNAwcOLNE5AQBwGg0wAACFePbZZ7VgwYLjM6RNmjQp8SJV5cuX
10033aQnn3xS06ZN05gxY046PmnSpAKzv5LUokUL/fLLL6pVq9ZJr2/dulVt2rTRiBEjtGrVqlP4
rYrWr18/bdu2TbNmzVLv3r2Pv75371516NBBd9xxh6688krVrl07LJ8PAEA4cAs0AACFqFatmsaM
GaO8vDzl5+frxRdfVNWqVUt8nmPN7SuvvHLS6zk5OXrjjTdUsWLFAjOqNWrUKND8StKZZ56p3r17
a/Xq1dqxY0eJaynKN998o6VLl+q66647qfmVpJo1a2rs2LE6dOiQPvjggzL/bAAAwokZYAAACnHk
yBE9/vjjx28hnjVr1ints9u0aVN16dJFCxYs0Jw5c9SjRw9J0rvvvqusrCz17dtXcXFxBd73xRdf
6Pnnn9dXX32lnTt3Kicn5/gxn8+nn3/+Waeddtop/nbBLVu2TJL022+/ady4cQWO79ixQ5Zlae3a
tWX6uQAAhBsNMAAAhRg1apTWr1+vv/71r1q0aJGmTZumXr16FbpHcCi33HKL5s+frylTphxvgKdM
mSKfz6dbb721wPhZs2bp+uuvV0xMjC677DI1adJEVatWld/v17x58/Tll18qOzu71L/jH+3evVuS
NGfOHM2ZMyfoGJ/Pp4MHD5b5ZwMAEE40wAAAhPD555/rxRdfVMuWLfXEE09o/fr1uvDCC3XLLbdo
9erVQWdsC9O7d2/VqVNHH3/8sf73v/9p3759WrRokc466yx17ty5wPgHH3xQVapU0TfffKPExMST
jmVmZurLL78s1uf6/fYTT0ePHi1wbM+ePQVeq1mzpiQpPT1dQ4cOLdZnAAAQDXgGGACAIH777TcN
GjRIFStW1Ouvv64KFSooOTlZDz/8sHbs2KFhw4aV+JwVKlTQwIEDlZubq1dffVVTp06VZVkaMmRI
0PGbNm3SueeeW6D5zc/PL3bzK+n4c8Rbt24tcOw///lPgRWiL774Ykn27dcAALgJDTAAAEHcdttt
2rFjhx555BElJycff/3uu+9Whw4d9O6772rmzJklPu8tt9xyfE/gV199NejiV8c0atRI69at06+/
/nrS6w8++KDWr19f7M9s06ZN0H2IV61apfT09ALj//SnP6lt27Z655139NprrwU95/fff3/8VmkA
AKIFt0ADAPAHM2bM0LvvvqvOnTtr5MiRJx3z+XyaPn26WrZsqeHDh6tTp04644wzin3uZs2aqWPH
jlq8eLEkqU+fPqpbt27QsSNGjNDw4cPVqlUr/eUvf1H58uX1xRdfaMOGDUpJSdEnn3xSrM/s3bu3
mjZtqhkzZmjLli266KKLju8n3KtXL7399tsF3vPWW2+pW7duGjRokJ599lm1adNGsbGx2rZtm777
7jutXbtW//nPf9gGCQAQVZgBBgDgd7Zu3ao777xTsbGxIWc/ExIS9Mwzz2jPnj26+eabS/wZt956
q3w+n/x+f4G9f3/v9ttv19SpU3Xaaafp1Vdf1cyZM9WkSRN99dVXatmyZdD3+Hy+Arc0V65cWQsW
LFCfPn30/fff68UXX1RmZqZmzZqlIUOGBH3PmWeeqZUrV+rhhx+W3+/XzJkzNWHCBC1fvlxNmjTR
5MmTlZSUVOLfHQAAJ/ksy7KcLgIAAAAAgHBjBhgAAAAA4Ak0wAAAAAAAT6ABBgAAAAB4Ag0wAAAA
AMATaIABAAAAAJ5AAwwAAAAA8AQaYAAAAACAJ9AAAwAAAAA8gQYYAAAAAOAJNMAAAAAAAE+gAQYA
AAAAeAINMAAAAADAE2iAAQAAAACeQAMMAAAAAPAEGmAAAAAAgCfQAAMAAAAAPIEGGAAAAADgCTTA
AAAAAABPoAEGAAAAAHgCDTAAAAAAwBNogAEAAAAAnkADDAAAAADwhP8Pl3WLC8axtDkAAAAASUVO
RK5CYII=
"""
if sys.version_info < (3,):
TEST_IMAGE = TEST_IMAGE.decode('base64')
else:
TEST_IMAGE = base64.b64decode(TEST_IMAGE)
| sonofeft/DigiPlot | digiplot/sample_img.py | Python | gpl-3.0 | 72,038 | 0.000028 |
from __future__ import print_function
from __future__ import unicode_literals
import time
from netmiko.ssh_connection import BaseSSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
from netmiko.ssh_exception import NetMikoTimeoutException, NetMikoAuthenticationException
import paramiko
import socket
class CiscoWlcSSH(BaseSSHConnection):
def establish_connection(self, sleep_time=3, verbose=True, timeout=8, use_keys=False):
'''
Establish SSH connection to the network device
Timeout will generate a NetmikoTimeoutException
Authentication failure will generate a NetmikoAuthenticationException
WLC presents with the following on login
login as: user
(Cisco Controller)
User: user
Password:****
Manually send username/password to work around this.
'''
# Create instance of SSHClient object
self.remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure appropriate for your environment)
self.remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection
if verbose:
print("SSH connection established to {0}:{1}".format(self.ip, self.port))
try:
self.remote_conn_pre.connect(hostname=self.ip, port=self.port,
username=self.username, password=self.password,
look_for_keys=use_keys, allow_agent=False,
timeout=timeout)
except socket.error as e:
msg = "Connection to device timed-out: {device_type} {ip}:{port}".format(
device_type=self.device_type, ip=self.ip, port=self.port)
raise NetMikoTimeoutException(msg)
except paramiko.ssh_exception.AuthenticationException as e:
msg = "Authentication failure: unable to connect {device_type} {ip}:{port}".format(
device_type=self.device_type, ip=self.ip, port=self.port)
msg += '\n' + str(e)
raise NetMikoAuthenticationException(msg)
# Use invoke_shell to establish an 'interactive session'
self.remote_conn = self.remote_conn_pre.invoke_shell()
# Handle WLCs extra
self.remote_conn.send(self.username + '\n')
time.sleep(.2)
self.remote_conn.send(self.password + '\n')
if verbose:
print("Interactive SSH session established")
# Strip the initial router prompt
time.sleep(sleep_time)
return self.remote_conn.recv(MAX_BUFFER)
def session_preparation(self):
'''
Prepare the session after the connection has been established
Cisco WLC uses "config paging disable" to disable paging
'''
self.disable_paging(command="config paging disable\n")
self.set_base_prompt()
def cleanup(self):
'''
Reset WLC back to normal paging
'''
self.send_command("config paging enable\n")
| enzzzy/netmiko | netmiko/cisco/cisco_wlc_ssh.py | Python | mit | 3,082 | 0.004218 |
# -*- coding: UTF-8 -*-
from .model_wrapper import ModelWrapper
class TagWrapper(ModelWrapper):
@property
def spu(self):
return self.sku.spu
@property
def vendor(self):
return self.spu.vendor
| PuZheng/lejian-backend | lejian/apis/tag.py | Python | mit | 229 | 0 |
import csv
import decimal
import os
import datetime
from stocker.common.events import EventStreamNew, EventStockOpen, EventStockClose
from stocker.common.orders import OrderBuy, OrderSell
from stocker.common.utils import Stream
class CompanyProcessor(object):
def __init__(self, dirname, company_id):
self.dirname = os.path.join(dirname, company_id)
self.company_id = company_id
def get_dates(self):
files = [os.path.splitext(fi)[0] for fi in os.walk(self.dirname).next()[2]]
return files
def get_row(self, date):
filename = os.path.join(self.dirname, date) + ".csv"
try:
with open(filename, 'r') as f:
for row in reversed(list(csv.reader(f, delimiter=';'))):
try:
desc = row[5]
if desc.startswith('TRANSAKCJA'):
yield (row, self.company_id)
except IndexError:
pass
except IOError as e:
return
class Processor(object):
def build_stream(self, dirname_in, filename_out):
self.stream = Stream()
self.stream.begin(filename_out)
self.__process_companies(dirname_in)
self.stream.end()
def __process_companies(self, dirname):
companies = []
for company in os.walk(dirname).next()[1]:
companies.append(CompanyProcessor(dirname, company))
dates_set = set()
for company in companies:
dates_set.update(company.get_dates())
dates_ordered = sorted(dates_set, key=lambda date: datetime.datetime.strptime(date, "%Y-%m-%d"))
for date in dates_ordered:
self.__process_date(date, companies)
def __process_date(self, date, companies):
rows = []
correct_generators = []
correct_day = False
generators = [company.get_row(date) for company in companies]
for generator in generators:
try:
row, company_id = generator.next()
row = (company_id, row, generator)
rows.append(row)
correct_generators.append(generator)
except StopIteration as e:
pass
if correct_generators:
# correct day (have transactions)
correct_day = True
if correct_day:
self.stream.add_event(EventStockOpen(
datetime.datetime.combine(datetime.datetime.strptime(date, "%Y-%m-%d"), datetime.time(9, 0))))
# main loop, multiplexing rows
while correct_generators:
row_data = min(rows, key=lambda row: datetime.datetime.strptime(row[1][0], "%H:%M:%S"))
rows.remove(row_data)
company_id, row, generator = row_data
self.__process_row(row, date, company_id)
try:
row, company_id = generator.next()
row = (company_id, row, generator)
rows.append(row)
except StopIteration as e:
correct_generators.remove(generator)
if correct_day:
self.stream.add_event(EventStockClose(
datetime.datetime.combine(datetime.datetime.strptime(date, "%Y-%m-%d"), datetime.time(18, 0))))
def __process_row(self, row, date, company_id):
amount = int(row[3])
limit_price = decimal.Decimal(row[1].replace(',', '.'))
timestamp = datetime.datetime.strptime("%s %s" % (date, row[0]), "%Y-%m-%d %H:%M:%S")
expiration_date = timestamp + datetime.timedelta(days=1)
self.stream.add_event(
EventStreamNew(timestamp, OrderBuy(company_id, amount, limit_price, expiration_date)))
self.stream.add_event(
EventStreamNew(timestamp, OrderSell(company_id, amount, limit_price, expiration_date)))
| donpiekarz/Stocker | stocker/SEP/processor.py | Python | gpl-3.0 | 3,868 | 0.002844 |
#!/usr/bin/python
import serial
import time
import random
import sys
s = None
num_leds = 93
ticks = 96
sleep_time = 0.0
def flush_input():
s.flushInput()
def wait_for_ack():
while s.inWaiting() <= 0:
pass
s.read(s.inWaiting())
def command(cmd_text):
s.write((cmd_text + ':').encode())
wait_for_ack()
def setup():
global s, sleep_time
s = serial.Serial("/dev/ttyS0", 115200)
flush_input()
choose_colors()
command(":::pau:clr:pau")
command("6:zon:red:8:cpy")
command("5:zon:org:6:cpy")
command("4:zon:yel:4:cpy")
command("3:zon:grn:3:cpy")
command("2:zon:blu:2:cpy")
command("1:zon:pur")
command("flu")
if len(sys.argv) > 1:
command(sys.argv[1])
if len(sys.argv) > 2:
sleep_time = float(sys.argv[2])
num_colors = 12
colors = [ "red", "org", "yel", "lgr", "grn", "sea", "cyn", "lbl", "blu", "pur", "mag", "pnk", "blk", "rnd" ]
effects = ['blink1','blink2','blink3','blink4','blink5','blink6']
effect_index = 0
chosen_colors = [0,1,2,3,4,5]
def random_color():
r = random.randrange(0, num_colors)
return colors[r]
def choose_colors():
global chosen_colors
for i in range(0, 6):
chosen_colors[i] = random_color()
def shift_colors():
global chosen_colors
for i in xrange(5, 0, -1):
chosen_colors[i] = chosen_colors[i-1]
def clear_colors():
for j in range(0,6):
chosen_colors[j] = "black"
def place_color(zone, color):
command(str(zone) + ":zone:" + color + ":flood")
def place_colors():
place_color(6, chosen_colors[0])
place_color(5, chosen_colors[1])
place_color(4, chosen_colors[2])
place_color(3, chosen_colors[3])
place_color(2, chosen_colors[4])
place_color(1, chosen_colors[5])
def display():
place_colors()
command("flush")
global idx
idx = -1
def loop():
global idx
idx = idx + 1
do_flush = False
if (idx % 3 == 0):
command("6:zon:rot")
do_flush = True
if (idx % 4 == 0):
command("5:zon:rot")
do_flush = True
if (idx % 6 == 0):
command("4:zon:rot")
do_flush = True
if (idx % 8 == 0):
command("3:zon:rot")
do_flush = True
if (idx % 12 == 0):
command("2:zon:rot")
do_flush = True
if do_flush == True:
command("flu")
time.sleep(sleep_time)
if __name__ == '__main__':
setup()
while True:
loop()
| jhogsett/linkit | python/gaydar4.py | Python | mit | 3,902 | 0.027166 |
"""Implementation of JSONEncoder
"""
import re
# try:
# from _json import encode_basestring_ascii as c_encode_basestring_ascii
# except ImportError:
# c_encode_basestring_ascii = None
c_encode_basestring_ascii = None
# try:
# from _json import make_encoder as c_make_encoder
# except ImportError:
# c_make_encoder = None
c_make_encoder = None
def x4(i):
return ("000%x" % i)[-4:]
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
# ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
# ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# ESCAPE_DCT.setdefault(chr(i), '\\u' + x4(i))
ESCAPE_DCT[chr(i)] = '\\u' + x4(i)
INFINITY = float('inf')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
# return '\\u{0:04x}'.format(n)
#return '\\u%04x' % (n,)
return '\\u' + x4(n)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
# return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
#return '\\u%04x\\u%04x' % (s1, s2)
return '\\u' + x4(s1) + '\\u' + x4(s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If *ensure_ascii* is true (the default), all non-ASCII
characters in the output are escaped with \uXXXX sequences,
and the results are str instances consisting of ASCII
characters only. If ensure_ascii is False, a result may be a
unicode instance. This usually happens if the input contains
unicode strings or the *encoding* parameter is used.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation. Since the default
item separator is ', ', the output might include trailing
whitespace when indent is specified. You can use
separators=(',', ': ') to avoid this.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
# Let the base class default method raise the TypeError
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on the
# internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if (_one_shot and c_make_encoder is not None
and self.indent is None and not self.sort_keys):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| google/grumpy | third_party/stdlib/json/encoder.py | Python | apache-2.0 | 16,692 | 0.001737 |
import pytest
from mpmath import *
def ae(a, b):
return abs(a-b) < 10**(-mp.dps+5)
def test_basic_integrals():
for prec in [15, 30, 100]:
mp.dps = prec
assert ae(quadts(lambda x: x**3 - 3*x**2, [-2, 4]), -12)
assert ae(quadgl(lambda x: x**3 - 3*x**2, [-2, 4]), -12)
assert ae(quadts(sin, [0, pi]), 2)
assert ae(quadts(sin, [0, 2*pi]), 0)
assert ae(quadts(exp, [-inf, -1]), 1/e)
assert ae(quadts(lambda x: exp(-x), [0, inf]), 1)
assert ae(quadts(lambda x: exp(-x*x), [-inf, inf]), sqrt(pi))
assert ae(quadts(lambda x: 1/(1+x*x), [-1, 1]), pi/2)
assert ae(quadts(lambda x: 1/(1+x*x), [-inf, inf]), pi)
assert ae(quadts(lambda x: 2*sqrt(1-x*x), [-1, 1]), pi)
mp.dps = 15
def test_multiple_intervals():
y,err = quad(lambda x: sign(x), [-0.5, 0.9, 1], maxdegree=2, error=True)
assert abs(y-0.5) < 2*err
def test_quad_symmetry():
assert quadts(sin, [-1, 1]) == 0
assert quadgl(sin, [-1, 1]) == 0
def test_quad_infinite_mirror():
# Check mirrored infinite interval
assert ae(quad(lambda x: exp(-x*x), [inf,-inf]), -sqrt(pi))
assert ae(quad(lambda x: exp(x), [0,-inf]), -1)
def test_quadgl_linear():
assert quadgl(lambda x: x, [0, 1], maxdegree=1).ae(0.5)
def test_complex_integration():
assert quadts(lambda x: x, [0, 1+j]).ae(j)
def test_quadosc():
mp.dps = 15
assert quadosc(lambda x: sin(x)/x, [0, inf], period=2*pi).ae(pi/2)
# Double integrals
def test_double_trivial():
assert ae(quadts(lambda x, y: x, [0, 1], [0, 1]), 0.5)
assert ae(quadts(lambda x, y: x, [-1, 1], [-1, 1]), 0.0)
def test_double_1():
assert ae(quadts(lambda x, y: cos(x+y/2), [-pi/2, pi/2], [0, pi]), 4)
def test_double_2():
assert ae(quadts(lambda x, y: (x-1)/((1-x*y)*log(x*y)), [0, 1], [0, 1]), euler)
def test_double_3():
assert ae(quadts(lambda x, y: 1/sqrt(1+x*x+y*y), [-1, 1], [-1, 1]), 4*log(2+sqrt(3))-2*pi/3)
def test_double_4():
assert ae(quadts(lambda x, y: 1/(1-x*x * y*y), [0, 1], [0, 1]), pi**2 / 8)
def test_double_5():
assert ae(quadts(lambda x, y: 1/(1-x*y), [0, 1], [0, 1]), pi**2 / 6)
def test_double_6():
assert ae(quadts(lambda x, y: exp(-(x+y)), [0, inf], [0, inf]), 1)
def test_double_7():
assert ae(quadts(lambda x, y: exp(-x*x-y*y), [-inf, inf], [-inf, inf]), pi)
# Test integrals from "Experimentation in Mathematics" by Borwein,
# Bailey & Girgensohn
def test_expmath_integrals():
for prec in [15, 30, 50]:
mp.dps = prec
assert ae(quadts(lambda x: x/sinh(x), [0, inf]), pi**2 / 4)
assert ae(quadts(lambda x: log(x)**2 / (1+x**2), [0, inf]), pi**3 / 8)
assert ae(quadts(lambda x: (1+x**2)/(1+x**4), [0, inf]), pi/sqrt(2))
assert ae(quadts(lambda x: log(x)/cosh(x)**2, [0, inf]), log(pi)-2*log(2)-euler)
assert ae(quadts(lambda x: log(1+x**3)/(1-x+x**2), [0, inf]), 2*pi*log(3)/sqrt(3))
assert ae(quadts(lambda x: log(x)**2 / (x**2+x+1), [0, 1]), 8*pi**3 / (81*sqrt(3)))
assert ae(quadts(lambda x: log(cos(x))**2, [0, pi/2]), pi/2 * (log(2)**2+pi**2/12))
assert ae(quadts(lambda x: x**2 / sin(x)**2, [0, pi/2]), pi*log(2))
assert ae(quadts(lambda x: x**2/sqrt(exp(x)-1), [0, inf]), 4*pi*(log(2)**2 + pi**2/12))
assert ae(quadts(lambda x: x*exp(-x)*sqrt(1-exp(-2*x)), [0, inf]), pi*(1+2*log(2))/8)
mp.dps = 15
# Do not reach full accuracy
@pytest.mark.xfail
def test_expmath_fail():
assert ae(quadts(lambda x: sqrt(tan(x)), [0, pi/2]), pi*sqrt(2)/2)
assert ae(quadts(lambda x: atan(x)/(x*sqrt(1-x**2)), [0, 1]), pi*log(1+sqrt(2))/2)
assert ae(quadts(lambda x: log(1+x**2)/x**2, [0, 1]), pi/2-log(2))
assert ae(quadts(lambda x: x**2/((1+x**4)*sqrt(1-x**4)), [0, 1]), pi/8)
| fredrik-johansson/mpmath | mpmath/tests/test_quad.py | Python | bsd-3-clause | 3,893 | 0.008477 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
from spack import *
class Slepc(Package):
"""Scalable Library for Eigenvalue Problem Computations."""
homepage = "http://www.grycap.upv.es/slepc"
url = "http://slepc.upv.es/download/distrib/slepc-3.6.2.tar.gz"
git = "https://bitbucket.org/slepc/slepc.git"
version('develop', branch='master')
version('3.9.1', 'e174ea7c127d9161eef976b0288f0c56d443a58d6ab2dc8af1e8bd66f156ce17')
version('3.9.0', '1f3930db56b4065aaf214ea758ddff1a70bf19d45544cbdfd19d2787db4bfe0b')
version('3.8.2', '1e7d20d20eb26da307d36017461fe4a55f40e947e232739179dbe6412e22ed13')
version('3.8.0', 'c58ccc4e852d1da01112466c48efa41f0839649f3a265925788237d76cd3d963')
version('3.7.4', '2fb782844e3bc265a8d181c3c3e2632a4ca073111c874c654f1365d33ca2eb8a')
version('3.7.3', '3ef9bcc645a10c1779d56b3500472ceb66df692e389d635087d30e7c46424df9')
version('3.7.1', '670216f263e3074b21e0623c01bc0f562fdc0bffcd7bd42dd5d8edbe73a532c2')
version('3.6.3', '384939d009546db37bc05ed81260c8b5ba451093bf891391d32eb7109ccff876')
version('3.6.2', '2ab4311bed26ccf7771818665991b2ea3a9b15f97e29fd13911ab1293e8e65df')
variant('arpack', default=True, description='Enables Arpack wrappers')
variant('blopex', default=False, description='Enables BLOPEX wrappers')
# NOTE: make sure PETSc and SLEPc use the same python.
depends_on('python@2.6:2.8', type='build')
# Cannot mix release and development versions of SLEPc and PETSc:
depends_on('petsc@develop', when='@develop')
depends_on('petsc@3.9:3.9.99', when='@3.9:3.9.99')
depends_on('petsc@3.8:3.8.99', when='@3.8:3.8.99')
depends_on('petsc@3.7:3.7.7', when='@3.7.1:3.7.4')
depends_on('petsc@3.6.3:3.6.4', when='@3.6.2:3.6.3')
depends_on('arpack-ng~mpi', when='+arpack^petsc~mpi~int64')
depends_on('arpack-ng+mpi', when='+arpack^petsc+mpi~int64')
patch('install_name_371.patch', when='@3.7.1')
# Arpack can not be used with 64bit integers.
conflicts('+arpack', when='^petsc+int64')
resource(name='blopex',
url='http://slepc.upv.es/download/external/blopex-1.1.2.tar.gz',
sha256='0081ee4c4242e635a8113b32f655910ada057c59043f29af4b613508a762f3ac',
destination=join_path('installed-arch-' + sys.platform + '-c-opt',
'externalpackages'),
when='+blopex')
def install(self, spec, prefix):
# set SLEPC_DIR for installation
# Note that one should set the current (temporary) directory instead
# its symlink in spack/stage/ !
os.environ['SLEPC_DIR'] = os.getcwd()
options = []
if '+arpack' in spec:
options.extend([
'--with-arpack-dir=%s' % spec['arpack-ng'].prefix.lib,
])
if 'arpack-ng~mpi' in spec:
options.extend([
'--with-arpack-flags=-larpack'
])
else:
options.extend([
'--with-arpack-flags=-lparpack,-larpack'
])
# It isn't possible to install BLOPEX separately and link to it;
# BLOPEX has to be downloaded with SLEPc at configure time
if '+blopex' in spec:
options.append('--download-blopex')
configure('--prefix=%s' % prefix, *options)
make('MAKE_NP=%s' % make_jobs, parallel=False)
if self.run_tests:
make('test', parallel=False)
make('install', parallel=False)
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
# set up SLEPC_DIR for everyone using SLEPc package
spack_env.set('SLEPC_DIR', self.prefix)
| mfherbst/spack | var/spack/repos/builtin/packages/slepc/package.py | Python | lgpl-2.1 | 4,929 | 0.002435 |
"""Test backend
$Id$"""
import os
import Image
from imagescanner.backends import base
class ScannerManager(base.ScannerManager):
def _refresh(self):
self._devices = []
scanner = Scanner('test-0', "Pyscan", "Test Device")
self._devices.append(scanner)
class Scanner(base.Scanner):
def __init__(self, scanner_id, manufacturer, name):
self.id = scanner_id
self.manufacturer = manufacturer
self.name = name
def __repr__(self):
return "<%s: %s - %s>" % (self.id, self.manufacturer, self.name)
def scan(self, dpi=200):
imgpath = os.path.join(os.path.dirname(__file__), 'data', 'img1.tiff')
return Image.open(imgpath)
def status(self):
pass
| Eveler/libs | __Python__/edv/edv/imagescanner/backends/test/__init__.py | Python | gpl-3.0 | 751 | 0.007989 |
import numpy as np
def plane_error(results, target):
"""
Computes angle between target orbital plane and actually achieved plane.
:param results: Results struct as output by flight_manager (NOT flight_sim_3d).
:param target: Target struct as output by launch_targeting.
:return: Angle between the two orbital planes.
"""
inc = results.powered[results.n-1].orbit.inc
lan = results.powered[results.n-1].orbit.lan
Rx = np.array([[1, 0, 0],
[0, np.cos(np.deg2rad(inc)), -np.sin(np.deg2rad(inc))],
[0, np.sin(np.deg2rad(inc)), np.cos(np.deg2rad(inc))]])
Rz = np.array([[np.cos(np.deg2rad(lan)), -np.sin(np.deg2rad(lan)), 0],
[np.sin(np.deg2rad(lan)), np.cos(np.deg2rad(lan)), 0],
[0, 0, 1]])
reached = np.matmul(Rz, np.matmul(Rx, np.array([0, 0, -1])))
error = np.rad2deg(np.arccos(np.vdot(target.normal, reached)))
return error
| ubik2/PEGAS-kRPC | kRPC/plane_error.py | Python | mit | 987 | 0.00304 |
import ujson
from mock import patch, MagicMock
from typing import Dict, Optional, Text
from zerver.models import Message
from zerver.lib.webhooks.git import COMMITS_LIMIT
from zerver.lib.test_classes import WebhookTestCase
class GithubWebhookTest(WebhookTestCase):
STREAM_NAME = 'github'
URL_TEMPLATE = "/api/v1/external/github?stream={stream}&api_key={api_key}"
FIXTURE_DIR_NAME = 'github_webhook'
EXPECTED_SUBJECT_REPO_EVENTS = u"public-repo"
EXPECTED_SUBJECT_ISSUE_EVENTS = u"public-repo / Issue #2 Spelling error in the README file"
EXPECTED_SUBJECT_PR_EVENTS = u"public-repo / PR #1 Update the README with new information"
EXPECTED_SUBJECT_DEPLOYMENT_EVENTS = u"public-repo / Deployment on production"
EXPECTED_SUBJECT_ORGANIZATION_EVENTS = u"baxterandthehackers organization"
EXPECTED_SUBJECT_BRANCH_EVENTS = u"public-repo / changes"
EXPECTED_SUBJECT_WIKI_EVENTS = u"public-repo / Wiki Pages"
def test_ping_event(self):
# type: () -> None
expected_message = u"GitHub webhook has been successfully configured by TomaszKolek"
self.send_and_test_stream_message('ping', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='ping')
def test_ping_organization_event(self):
# type: () -> None
expected_message = u"GitHub webhook has been successfully configured by eeshangarg"
self.send_and_test_stream_message('ping_organization', 'zulip-test-org', expected_message, HTTP_X_GITHUB_EVENT='ping')
def test_push_delete_branch(self):
# type: () -> None
expected_message = u"eeshangarg [deleted](https://github.com/eeshangarg/public-repo/compare/2e8cf535fb38...000000000000) the branch feature."
self.send_and_test_stream_message('push_delete_branch', u"public-repo / feature", expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_local_branch_without_commits(self):
# type: () -> None
expected_message = u"eeshangarg [pushed](https://github.com/eeshangarg/public-repo/compare/feature) the branch feature."
self.send_and_test_stream_message('push_local_branch_without_commits', u"public-repo / feature", expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_1_commit(self):
# type: () -> None
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 1 commit to branch changes.\n\n* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))"
self.send_and_test_stream_message('push_1_commit', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_1_commit_without_username(self):
# type: () -> None
expected_message = u"eeshangarg [pushed](https://github.com/eeshangarg/public-repo/compare/0383613da871...2e8cf535fb38) 1 commit to branch changes. Commits by John Snow (1).\n\n* Update the README ([2e8cf53](https://github.com/eeshangarg/public-repo/commit/2e8cf535fb38a3dab2476cdf856efda904ad4c94))"
self.send_and_test_stream_message('push_1_commit_without_username', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_1_commit_filtered_by_branches(self):
# type: () -> None
self.url = self.build_webhook_url('master,changes')
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 1 commit to branch changes.\n\n* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))"
self.send_and_test_stream_message('push_1_commit', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_multiple_comitters(self):
# type: () -> None
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 6 commits to branch changes. Commits by Tomasz (3), Ben (2) and baxterthehacker (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 5)
self.send_and_test_stream_message('push_multiple_committers', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_multiple_comitters_with_others(self):
# type: () -> None
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 10 commits to branch changes. Commits by Tomasz (4), Ben (3), James (2) and others (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 9)
self.send_and_test_stream_message('push_multiple_committers_with_others', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_multiple_comitters_filtered_by_branches(self):
# type: () -> None
self.url = self.build_webhook_url('master,changes')
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 6 commits to branch changes. Commits by Tomasz (3), Ben (2) and baxterthehacker (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 5)
self.send_and_test_stream_message('push_multiple_committers', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_multiple_comitters_with_others_filtered_by_branches(self):
# type: () -> None
self.url = self.build_webhook_url('master,changes')
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 10 commits to branch changes. Commits by Tomasz (4), Ben (3), James (2) and others (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 9)
self.send_and_test_stream_message('push_multiple_committers_with_others', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_50_commits(self):
# type: () -> None
commit_info = "* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n"
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 50 commits to branch changes.\n\n{}[and 30 more commit(s)]".format(
commit_info * COMMITS_LIMIT
)
self.send_and_test_stream_message('push_50_commits', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_50_commits_filtered_by_branches(self):
# type: () -> None
self.url = self.build_webhook_url(branches='master,changes')
commit_info = "* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n"
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 50 commits to branch changes.\n\n{}[and 30 more commit(s)]".format(
commit_info * COMMITS_LIMIT
)
self.send_and_test_stream_message('push_50_commits', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_commit_comment_msg(self):
# type: () -> None
expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b#commitcomment-11056394) on [9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b)\n~~~ quote\nThis is a really good change! :+1:\n~~~"
self.send_and_test_stream_message('commit_comment', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='commit_comment')
def test_create_msg(self):
# type: () -> None
expected_message = u"baxterthehacker created tag 0.0.1"
self.send_and_test_stream_message('create', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='create')
def test_delete_msg(self):
# type: () -> None
expected_message = u"baxterthehacker deleted tag simple-tag"
self.send_and_test_stream_message('delete', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='delete')
def test_deployment_msg(self):
# type: () -> None
expected_message = u"baxterthehacker created new deployment"
self.send_and_test_stream_message('deployment', self.EXPECTED_SUBJECT_DEPLOYMENT_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='deployment')
def test_deployment_status_msg(self):
# type: () -> None
expected_message = u"Deployment changed status to success"
self.send_and_test_stream_message('deployment_status', self.EXPECTED_SUBJECT_DEPLOYMENT_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='deployment_status')
def test_fork_msg(self):
# type: () -> None
expected_message = u"baxterandthehackers forked [public-repo](https://github.com/baxterandthehackers/public-repo)"
self.send_and_test_stream_message('fork', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='fork')
def test_issue_comment_msg(self):
# type: () -> None
expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/issues/2#issuecomment-99262140) on [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2)\n\n~~~ quote\nYou are totally right! I'll get this fixed right away.\n~~~"
self.send_and_test_stream_message('issue_comment', self.EXPECTED_SUBJECT_ISSUE_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='issue_comment')
def test_issue_msg(self):
# type: () -> None
expected_message = u"baxterthehacker opened [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2)\n\n~~~ quote\nIt looks like you accidently spelled 'commit' with two 't's.\n~~~"
self.send_and_test_stream_message('issue', self.EXPECTED_SUBJECT_ISSUE_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='issues')
def test_membership_msg(self):
# type: () -> None
expected_message = u"baxterthehacker added [kdaigle](https://github.com/kdaigle) to Contractors team"
self.send_and_test_stream_message('membership', self.EXPECTED_SUBJECT_ORGANIZATION_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='membership')
def test_member_msg(self):
# type: () -> None
expected_message = u"baxterthehacker added [octocat](https://github.com/octocat) to [public-repo](https://github.com/baxterthehacker/public-repo)"
self.send_and_test_stream_message('member', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='member')
def test_pull_request_opened_msg(self):
# type: () -> None
expected_message = u"baxterthehacker opened [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`\n\n~~~ quote\nThis is a pretty simple change that we need to pull into master.\n~~~"
self.send_and_test_stream_message('opened_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_synchronized_msg(self):
# type: () -> None
expected_message = u"baxterthehacker updated [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`"
self.send_and_test_stream_message('synchronized_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_closed_msg(self):
# type: () -> None
expected_message = u"baxterthehacker closed without merge [PR](https://github.com/baxterthehacker/public-repo/pull/1)"
self.send_and_test_stream_message('closed_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_merged_msg(self):
# type: () -> None
expected_message = u"baxterthehacker merged [PR](https://github.com/baxterthehacker/public-repo/pull/1)"
self.send_and_test_stream_message('merged_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request')
def test_public_msg(self):
# type: () -> None
expected_message = u"baxterthehacker made [the repository](https://github.com/baxterthehacker/public-repo) public"
self.send_and_test_stream_message('public', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='public')
def test_wiki_pages_msg(self):
# type: () -> None
expected_message = u"jasonrudolph:\n* created [Home](https://github.com/baxterthehacker/public-repo/wiki/Home)\n* created [Home](https://github.com/baxterthehacker/public-repo/wiki/Home)"
self.send_and_test_stream_message('wiki_pages', self.EXPECTED_SUBJECT_WIKI_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='gollum')
def test_watch_msg(self):
# type: () -> None
expected_message = u"baxterthehacker starred [the repository](https://github.com/baxterthehacker/public-repo)"
self.send_and_test_stream_message('watch_repository', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='watch')
def test_repository_msg(self):
# type: () -> None
expected_message = u"baxterthehacker created [the repository](https://github.com/baxterandthehackers/public-repo)"
self.send_and_test_stream_message('repository', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='repository')
def test_team_add_msg(self):
# type: () -> None
expected_message = u"[The repository](https://github.com/baxterandthehackers/public-repo) was added to team github"
self.send_and_test_stream_message('team_add', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='team_add')
def test_release_msg(self):
# type: () -> None
expected_message = u"baxterthehacker published [the release](https://github.com/baxterthehacker/public-repo/releases/tag/0.0.1)"
self.send_and_test_stream_message('release', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='release')
def test_page_build_msg(self):
# type: () -> None
expected_message = u"Github Pages build, trigerred by baxterthehacker, is built"
self.send_and_test_stream_message('page_build', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='page_build')
def test_status_msg(self):
# type: () -> None
expected_message = u"[9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b) changed its status to success"
self.send_and_test_stream_message('status', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='status')
def test_pull_request_review_msg(self):
# type: () -> None
expected_message = u"baxterthehacker submitted [PR Review](https://github.com/baxterthehacker/public-repo/pull/1#pullrequestreview-2626884)"
self.send_and_test_stream_message('pull_request_review', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request_review')
def test_pull_request_review_comment_msg(self):
# type: () -> None
expected_message = u"baxterthehacker created [PR Review Comment](https://github.com/baxterthehacker/public-repo/pull/1#discussion_r29724692)\n\n~~~ quote\nMaybe you should use more emojji on this line.\n~~~"
self.send_and_test_stream_message('pull_request_review_comment', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request_review_comment')
def test_push_tag_msg(self):
# type: () -> None
expected_message = u"baxterthehacker pushed tag abc"
self.send_and_test_stream_message('push_tag', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_pull_request_edited_msg(self):
# type: () -> None
expected_message = u"baxterthehacker edited [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`"
self.send_and_test_stream_message('edited_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message,
HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_assigned_msg(self):
# type: () -> None
expected_message = u"baxterthehacker assigned [PR](https://github.com/baxterthehacker/public-repo/pull/1) to baxterthehacker"
self.send_and_test_stream_message('assigned_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message,
HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_unassigned_msg(self):
# type: () -> None
expected_message = u"eeshangarg unassigned [PR](https://github.com/zulip-test-org/helloworld/pull/1)"
self.send_and_test_stream_message(
'unassigned_pull_request',
'helloworld / PR #1 Mention that Zulip rocks!',
expected_message,
HTTP_X_GITHUB_EVENT='pull_request'
)
@patch('zerver.webhooks.github_webhook.view.check_send_message')
def test_pull_request_labeled_ignore(self, check_send_message_mock):
# type: (MagicMock) -> None
payload = self.get_body('labeled_pull_request')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_message')
def test_pull_request_unlabeled_ignore(self, check_send_message_mock):
# type: (MagicMock) -> None
payload = self.get_body('unlabeled_pull_request')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_message')
def test_pull_request_request_review_ignore(self, check_send_message_mock):
# type: (MagicMock) -> None
payload = self.get_body('request_review_pull_request')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_message')
def test_pull_request_request_review_remove_ignore(self, check_send_message_mock):
# type: (MagicMock) -> None
payload = self.get_body('request_review_removed_pull_request')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_message')
def test_push_1_commit_filtered_by_branches_ignore(self, check_send_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push_1_commit')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json")
self.assertFalse(check_send_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_message')
def test_push_50_commits_filtered_by_branches_ignore(self, check_send_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push_50_commits')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json")
self.assertFalse(check_send_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_message')
def test_push_multiple_comitters_filtered_by_branches_ignore(self, check_send_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push_multiple_committers')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json")
self.assertFalse(check_send_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_message')
def test_push_multiple_comitters_with_others_filtered_by_branches_ignore(self, check_send_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push_multiple_committers_with_others')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json")
self.assertFalse(check_send_message_mock.called)
self.assert_json_success(result)
| verma-varsha/zulip | zerver/webhooks/github_webhook/tests.py | Python | apache-2.0 | 22,570 | 0.004386 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created by heyu on 17/3/1
"""
try:
from setuptools import setup
except:
from distutils.core import setup
setup(
name="pyutils",
version="0.0.1",
author="heyu",
author_email="gannicus_yu@163.com",
description="easy and convenient tools written in Python",
long_description=__doc__,
install_requires=["MySQL-python", "docopt"],
url="https://github.com/gannicus-yu/pyutils",
packages=["myutils"],
platforms=['all'],
# test_suite="tests"
)
| gannicus-yu/pyutils | setup.py | Python | apache-2.0 | 546 | 0.001832 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from common_processing import *
import tarfile
import sys
import glob
def untar(ftp_link, out_folder):
tar = tarfile.open(out_folder + ftp_link.split("/")[-1])
tar.extractall(path=out_folder)
tar.close()
def process_nodes_dmp(out_folder):
"""
extract data from file:nodes.dmp
create 2 map_tables:
map_organism2organism
map_organism2rank
"""
map_organism2organism = ""
map_organism2rank = ""
parent_tax_dict = dict()
tax_tree_dict = dict()
with open(out_folder + 'nodes.dmp', 'rb') as f:
for line in f:
tax_id, parent_tax_id, rank, embl_code, division_id, inherited_div_flag, genetic_code_id, inherited_gc_flag, mitochondrial_genetic_code_id, inherited_mgc_flag, genbank_hidden_flag, hidden_subtree_root_flag, comments = line.split("\t|\t")
map_organism2rank += str(tax_id) + "\t" + rank + "\n"
parent_tax_dict.setdefault(tax_id, parent_tax_id)
for tax_id, parent_tax_id in parent_tax_dict.iteritems():
tax_tree_dict.setdefault(tax_id, []).append(parent_tax_id)
while parent_tax_dict[tax_tree_dict[tax_id][-1]] != tax_tree_dict[tax_id][-1]:
tax_tree_dict[tax_id].append(parent_tax_dict[tax_tree_dict[tax_id][-1]])
for tax_id, parent_tax_ids in tax_tree_dict.iteritems():
map_organism2organism += '{}\t{}\t{}\n'.format(tax_id, tax_id, 0)
for level, parent_tax_id in enumerate(parent_tax_ids):
map_organism2organism += '{}\t{}\t{}\n'.format(tax_id, parent_tax_id, level+1)
with open(out_folder + "map_organism2organism.tsv", "wb") as f:
f.write(map_organism2organism)
with open(out_folder + "map_organism2rank.tsv", "wb") as f:
f.write(map_organism2rank)
def process_names_dmp(out_folder):
"""
extract data from file:names.dmp
map_symbol2organism
name type included: scientific name, synonym, acronym, anamorph, misspelling, misnomer, common name,
"""
map_symbol2organism = ''
non_unique_name = set()
with open(out_folder + "names.dmp", "rb") as f:
for line in f:
tax_id, name_txt, unique_name, name_class = line.split("\t|\t")
map_symbol2organism += "{}\t{}\t{}".format(tax_id, name_txt, name_class.split("|")[0].replace("\t", "\n"))
with open(out_folder + "map_symbol2organism.tsv", "wb") as f:
f.write(map_symbol2organism)
def argument_parser():
parser = argparse.ArgumentParser(description="download the Taxonomy PubMed from ftp")
parser.add_argument("-f", "--ftp_link", type=str, help="ftp url link to the file")
parser.add_argument("-o", "--out_folder", type=str, help="target folder of downloaded file")
args = parser.parse_args()
return args
if __name__ == "__main__":
args = argument_parser()
print "processing Taxonomy data"
ftp_download(args.ftp_link, args.out_folder)
untar(args.ftp_link, args.out_folder)
process_nodes_dmp(args.out_folder)
process_names_dmp(args.out_folder)
| TurkuNLP/CAFA3 | sequence_features/process_NCBI_Taxonomy.py | Python | lgpl-3.0 | 3,092 | 0.005498 |
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/platypus/tableofcontents.py
__version__=''' $Id: tableofcontents.py 3627 2010-01-06 14:06:36Z rgbecker $ '''
__doc__="""Experimental class to generate Tables of Contents easily
This module defines a single TableOfContents() class that can be used to
create automatically a table of tontents for Platypus documents like
this:
story = []
toc = TableOfContents()
story.append(toc)
# some heading paragraphs here...
doc = MyTemplate(path)
doc.multiBuild(story)
The data needed to create the table is a list of (level, text, pageNum)
triplets, plus some paragraph styles for each level of the table itself.
The triplets will usually be created in a document template's method
like afterFlowable(), making notification calls using the notify()
method with appropriate data like this:
(level, text, pageNum) = ...
self.notify('TOCEntry', (level, text, pageNum))
Optionally the list can contain four items in which case the last item
is a destination key which the entry should point to. A bookmark
with this key needs to be created first like this:
key = 'ch%s' % self.seq.nextf('chapter')
self.canv.bookmarkPage(key)
self.notify('TOCEntry', (level, text, pageNum, key))
As the table of contents need at least two passes over the Platypus
story which is why the moultiBuild0() method must be called.
The level<NUMBER>ParaStyle variables are the paragraph styles used
to format the entries in the table of contents. Their indentation
is calculated like this: each entry starts at a multiple of some
constant named delta. If one entry spans more than one line, all
lines after the first are indented by the same constant named
epsilon.
"""
from reportlab.lib import enums
from reportlab.lib.units import cm
from reportlab.lib.utils import commasplit
from reportlab.lib.styles import ParagraphStyle
from reportlab.platypus.paragraph import Paragraph
from reportlab.platypus.doctemplate import IndexingFlowable
from reportlab.platypus.tables import TableStyle, Table
from reportlab.platypus.flowables import Spacer, Flowable
from reportlab.pdfbase.pdfmetrics import stringWidth
from reportlab.pdfgen import canvas
from base64 import encodestring, decodestring
try:
import cPickle as pickle
except ImportError:
import pickle
dumps = pickle.dumps
loads = pickle.loads
def unquote(txt):
from xml.sax.saxutils import unescape
return unescape(txt, {"'": "'", """: '"'})
try:
set
except:
class set(list):
def add(self,x):
if x not in self:
list.append(self,x)
def drawPageNumbers(canvas, style, pages, availWidth, availHeight, dot=' . '):
'''
Draws pagestr on the canvas using the given style.
If dot is None, pagestr is drawn at the current position in the canvas.
If dot is a string, pagestr is drawn right-aligned. If the string is not empty,
the gap is filled with it.
'''
pages.sort()
pagestr = ', '.join([str(p) for p, _ in pages])
x, y = canvas._curr_tx_info['cur_x'], canvas._curr_tx_info['cur_y']
pagestrw = stringWidth(pagestr, style.fontName, style.fontSize)
if isinstance(dot, basestring):
if dot:
dotw = stringWidth(dot, style.fontName, style.fontSize)
dotsn = int((availWidth-x-pagestrw)/dotw)
else:
dotsn = dotw = 0
text = '%s%s' % (dotsn * dot, pagestr)
newx = availWidth - dotsn*dotw - pagestrw
pagex = availWidth - pagestrw
elif dot is None:
text = ', ' + pagestr
newx = x
pagex = newx
else:
raise TypeError('Argument dot should either be None or an instance of basestring.')
tx = canvas.beginText(newx, y)
tx.setFont(style.fontName, style.fontSize)
tx.setFillColor(style.textColor)
tx.textLine(text)
canvas.drawText(tx)
commaw = stringWidth(', ', style.fontName, style.fontSize)
for p, key in pages:
if not key:
continue
w = stringWidth(str(p), style.fontName, style.fontSize)
canvas.linkRect('', key, (pagex, y, pagex+w, y+style.leading), relative=1)
pagex += w + commaw
# Default paragraph styles for tables of contents.
# (This could also be generated automatically or even
# on-demand if it is not known how many levels the
# TOC will finally need to display...)
delta = 1*cm
epsilon = 0.5*cm
defaultLevelStyles = [
ParagraphStyle(
name='Level 0',
fontName='Times-Roman',
fontSize=10,
leading=11,
firstLineIndent = 0,
leftIndent = epsilon)]
defaultTableStyle = \
TableStyle([
('VALIGN', (0,0), (-1,-1), 'TOP'),
('RIGHTPADDING', (0,0), (-1,-1), 0),
('LEFTPADDING', (0,0), (-1,-1), 0),
])
class TableOfContents(IndexingFlowable):
"""This creates a formatted table of contents.
It presumes a correct block of data is passed in.
The data block contains a list of (level, text, pageNumber)
triplets. You can supply a paragraph style for each level
(starting at zero).
Set dotsMinLevel to determine from which level on a line of
dots should be drawn between the text and the page number.
If dotsMinLevel is set to a negative value, no dotted lines are drawn.
"""
def __init__(self):
self.rightColumnWidth = 72
self.levelStyles = defaultLevelStyles
self.tableStyle = defaultTableStyle
self.dotsMinLevel = 1
self._table = None
self._entries = []
self._lastEntries = []
def beforeBuild(self):
# keep track of the last run
self._lastEntries = self._entries[:]
self.clearEntries()
def isIndexing(self):
return 1
def isSatisfied(self):
return (self._entries == self._lastEntries)
def notify(self, kind, stuff):
"""The notification hook called to register all kinds of events.
Here we are interested in 'TOCEntry' events only.
"""
if kind == 'TOCEntry':
self.addEntry(*stuff)
def clearEntries(self):
self._entries = []
def getLevelStyle(self, n):
'''Returns the style for level n, generating and caching styles on demand if not present.'''
try:
return self.levelStyles[n]
except IndexError:
prevstyle = self.getLevelStyle(n-1)
self.levelStyles.append(ParagraphStyle(
name='%s-%d-indented' % (prevstyle.name, n),
parent=prevstyle,
firstLineIndent = prevstyle.firstLineIndent+delta,
leftIndent = prevstyle.leftIndent+delta))
return self.levelStyles[n]
def addEntry(self, level, text, pageNum, key=None):
"""Adds one entry to the table of contents.
This allows incremental buildup by a doctemplate.
Requires that enough styles are defined."""
assert type(level) == type(1), "Level must be an integer"
self._entries.append((level, text, pageNum, key))
def addEntries(self, listOfEntries):
"""Bulk creation of entries in the table of contents.
If you knew the titles but not the page numbers, you could
supply them to get sensible output on the first run."""
for entryargs in listOfEntries:
self.addEntry(*entryargs)
def wrap(self, availWidth, availHeight):
"All table properties should be known by now."
# makes an internal table which does all the work.
# we draw the LAST RUN's entries! If there are
# none, we make some dummy data to keep the table
# from complaining
if len(self._lastEntries) == 0:
_tempEntries = [(0,'Placeholder for table of contents',0,None)]
else:
_tempEntries = self._lastEntries
def drawTOCEntryEnd(canvas, kind, label):
'''Callback to draw dots and page numbers after each entry.'''
label = label.split(',')
page, level, key = int(label[0]), int(label[1]), eval(label[2],{})
style = self.getLevelStyle(level)
if self.dotsMinLevel >= 0 and level >= self.dotsMinLevel:
dot = ' . '
else:
dot = ''
drawPageNumbers(canvas, style, [(page, key)], availWidth, availHeight, dot)
self.canv.drawTOCEntryEnd = drawTOCEntryEnd
tableData = []
for (level, text, pageNum, key) in _tempEntries:
style = self.getLevelStyle(level)
if key:
text = '<a href="#%s">%s</a>' % (key, text)
keyVal = repr(key).replace(',','\\x2c').replace('"','\\x2c')
else:
keyVal = None
para = Paragraph('%s<onDraw name="drawTOCEntryEnd" label="%d,%d,%s"/>' % (text, pageNum, level, keyVal), style)
if style.spaceBefore:
tableData.append([Spacer(1, style.spaceBefore),])
tableData.append([para,])
self._table = Table(tableData, colWidths=(availWidth,), style=self.tableStyle)
self.width, self.height = self._table.wrapOn(self.canv,availWidth, availHeight)
return (self.width, self.height)
def split(self, availWidth, availHeight):
"""At this stage we do not care about splitting the entries,
we will just return a list of platypus tables. Presumably the
calling app has a pointer to the original TableOfContents object;
Platypus just sees tables.
"""
return self._table.splitOn(self.canv,availWidth, availHeight)
def drawOn(self, canvas, x, y, _sW=0):
"""Don't do this at home! The standard calls for implementing
draw(); we are hooking this in order to delegate ALL the drawing
work to the embedded table object.
"""
self._table.drawOn(canvas, x, y, _sW)
def makeTuple(x):
if hasattr(x, '__iter__'):
return tuple(x)
return (x,)
class SimpleIndex(IndexingFlowable):
"""Creates multi level indexes.
The styling can be cutomized and alphabetic headers turned on and off.
"""
def __init__(self, **kwargs):
"""
Constructor of SimpleIndex.
Accepts the same arguments as the setup method.
"""
#keep stuff in a dictionary while building
self._entries = {}
self._lastEntries = {}
self._flowable = None
self.setup(**kwargs)
def getFormatFunc(self,format):
try:
exec 'from reportlab.lib.sequencer import _format_%s as formatFunc' % format in locals()
except ImportError:
raise ValueError('Unknown format %r' % format)
return formatFunc
def setup(self, style=None, dot=None, tableStyle=None, headers=True, name=None, format='123', offset=0):
"""
This method makes it possible to change styling and other parameters on an existing object.
style is the paragraph style to use for index entries.
dot can either be None or a string. If it's None, entries are immediatly followed by their
corresponding page numbers. If it's a string, page numbers are aligned on the right side
of the document and the gap filled with a repeating sequence of the string.
tableStyle is the style used by the table which the index uses to draw itself. Use this to
change properties like spacing between elements.
headers is a boolean. If it is True, alphabetic headers are displayed in the Index.
name makes it possible to use several indexes in one document. If you want this use this
parameter to give each index a unique name. You can then index a term by refering to the
name of the index which it should appear in:
<index item="term" name="myindex" />
format can be 'I', 'i', '123', 'ABC', 'abc'
"""
if style is None:
style = ParagraphStyle(name='index',
fontName='Times-Roman',
fontSize=11)
self.textStyle = style
self.tableStyle = tableStyle or defaultTableStyle
self.dot = dot
self.headers = headers
if name is None:
from reportlab.platypus.paraparser import DEFAULT_INDEX_NAME as name
self.name = name
self.formatFunc = self.getFormatFunc(format)
self.offset = offset
def __call__(self,canv,kind,label):
try:
terms, format, offset = loads(decodestring(label))
except:
terms = label
format = offset = None
if format is None:
formatFunc = self.formatFunc
else:
formatFunc = self.getFormatFunc(format)
if offset is None:
offset = self.offset
terms = commasplit(terms)
pns = formatFunc(canv.getPageNumber()-offset)
key = 'ix_%s_%s_p_%s' % (self.name, label, pns)
info = canv._curr_tx_info
canv.bookmarkHorizontal(key, info['cur_x'], info['cur_y'] + info['leading'])
self.addEntry(terms, pns, key)
def getCanvasMaker(self, canvasmaker=canvas.Canvas):
def newcanvasmaker(*args, **kwargs):
from reportlab.pdfgen import canvas
c = canvasmaker(*args, **kwargs)
setattr(c,self.name,self)
return c
return newcanvasmaker
def isIndexing(self):
return 1
def isSatisfied(self):
return (self._entries == self._lastEntries)
def beforeBuild(self):
# keep track of the last run
self._lastEntries = self._entries.copy()
self.clearEntries()
def clearEntries(self):
self._entries = {}
def notify(self, kind, stuff):
"""The notification hook called to register all kinds of events.
Here we are interested in 'IndexEntry' events only.
"""
if kind == 'IndexEntry':
(text, pageNum) = stuff
self.addEntry(text, pageNum)
def addEntry(self, text, pageNum, key=None):
"""Allows incremental buildup"""
self._entries.setdefault(makeTuple(text),set([])).add((pageNum, key))
def split(self, availWidth, availHeight):
"""At this stage we do not care about splitting the entries,
we will just return a list of platypus tables. Presumably the
calling app has a pointer to the original TableOfContents object;
Platypus just sees tables.
"""
return self._flowable.splitOn(self.canv,availWidth, availHeight)
def _getlastEntries(self, dummy=[(['Placeholder for index'],enumerate((None,)*3))]):
'''Return the last run's entries! If there are none, returns dummy.'''
if not self._lastEntries:
if self._entries:
return self._entries.items()
return dummy
return self._lastEntries.items()
def _build(self,availWidth,availHeight):
_tempEntries = self._getlastEntries()
_tempEntries.sort(lambda a,b: cmp([x.upper() for x in a[0]], [x.upper() for x in b[0]]))
leveloffset = self.headers and 1 or 0
def drawIndexEntryEnd(canvas, kind, label):
'''Callback to draw dots and page numbers after each entry.'''
style = self.getLevelStyle(leveloffset)
pages = loads(decodestring(label))
drawPageNumbers(canvas, style, pages, availWidth, availHeight, self.dot)
self.canv.drawIndexEntryEnd = drawIndexEntryEnd
alpha = ''
tableData = []
lastTexts = []
for texts, pageNumbers in _tempEntries:
texts = list(texts)
if self.headers:
alphaStyle = self.getLevelStyle(0)
nalpha = texts[0][0].upper()
if alpha != nalpha:
alpha = nalpha
tableData.append([Spacer(1, alphaStyle.spaceBefore),])
tableData.append([Paragraph(alpha, alphaStyle),])
tableData.append([Spacer(1, alphaStyle.spaceAfter),])
i, diff = listdiff(lastTexts, texts)
if diff:
lastTexts = texts
texts = texts[i:]
label = encodestring(dumps(list(pageNumbers))).strip()
texts[-1] = '%s<onDraw name="drawIndexEntryEnd" label="%s"/>' % (texts[-1], label)
for text in texts:
style = self.getLevelStyle(i+leveloffset)
para = Paragraph(text, style)
if style.spaceBefore:
tableData.append([Spacer(1, style.spaceBefore),])
tableData.append([para,])
i += 1
self._flowable = Table(tableData, colWidths=[availWidth], style=self.tableStyle)
def wrap(self, availWidth, availHeight):
"All table properties should be known by now."
self._build(availWidth,availHeight)
self.width, self.height = self._flowable.wrapOn(self.canv,availWidth, availHeight)
return self.width, self.height
def drawOn(self, canvas, x, y, _sW=0):
"""Don't do this at home! The standard calls for implementing
draw(); we are hooking this in order to delegate ALL the drawing
work to the embedded table object.
"""
self._flowable.drawOn(canvas, x, y, _sW)
def draw(self):
t = self._flowable
ocanv = getattr(t,'canv',None)
if not ocanv:
t.canv = self.canv
try:
t.draw()
finally:
if not ocanv:
del t.canv
def getLevelStyle(self, n):
'''Returns the style for level n, generating and caching styles on demand if not present.'''
if not hasattr(self.textStyle, '__iter__'):
self.textStyle = [self.textStyle]
try:
return self.textStyle[n]
except IndexError:
self.textStyle = list(self.textStyle)
prevstyle = self.getLevelStyle(n-1)
self.textStyle.append(ParagraphStyle(
name='%s-%d-indented' % (prevstyle.name, n),
parent=prevstyle,
firstLineIndent = prevstyle.firstLineIndent+.2*cm,
leftIndent = prevstyle.leftIndent+.2*cm))
return self.textStyle[n]
AlphabeticIndex = SimpleIndex
def listdiff(l1, l2):
m = min(len(l1), len(l2))
for i in range(m):
if l1[i] != l2[i]:
return i, l2[i:]
return m, l2[m:]
class ReferenceText(IndexingFlowable):
"""Fakery to illustrate how a reference would work if we could
put it in a paragraph."""
def __init__(self, textPattern, targetKey):
self.textPattern = textPattern
self.target = targetKey
self.paraStyle = ParagraphStyle('tmp')
self._lastPageNum = None
self._pageNum = -999
self._para = None
def beforeBuild(self):
self._lastPageNum = self._pageNum
def notify(self, kind, stuff):
if kind == 'Target':
(key, pageNum) = stuff
if key == self.target:
self._pageNum = pageNum
def wrap(self, availWidth, availHeight):
text = self.textPattern % self._lastPageNum
self._para = Paragraph(text, self.paraStyle)
return self._para.wrap(availWidth, availHeight)
def drawOn(self, canvas, x, y, _sW=0):
self._para.drawOn(canvas, x, y, _sW)
| fergalmoran/Chrome2Kindle | server/reportlab/platypus/tableofcontents.py | Python | mit | 19,645 | 0.006058 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
import random
from PyQt5.QtWidgets import QMainWindow, QFrame, QDesktopWidget, QApplication
from PyQt5.QtCore import Qt, QBasicTimer, pyqtSignal
from PyQt5.QtGui import QPainter, QColor
class Tetris(QMainWindow):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
self.tboard = Board(self)
self.setCentralWidget(self.tboard)
self.statusbar = self.statusBar()
self.tboard.msg2Statusbar[str].connect(self.statusbar.showMessage)
self.tboard.start()
self.resize(180, 380)
self.center()
self.setWindowTitle('Tetris')
self.show()
def center(self):
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width() - size.width()) / 2,
(screen.height() - size.height()) / 2)
class Board(QFrame):
msg2Statusbar = pyqtSignal(str)
BoardWidth = 10
BoardHeight = 22
Speed = 300
def __init__(self, parent):
super().__init__(parent)
self.initBoard()
def initBoard(self):
self.timer = QBasicTimer()
self.isWaitingAfterLine = False
self.curX = 0
self.curY = 0
self.numLinesRemoved = 0
self.board = []
self.setFocusPolicy(Qt.StrongFocus)
self.isStarted = False
self.isPaused = False
self.clearBoard()
def shapeAt(self, x, y):
return self.board[(y * Board.BoardWidth) + x]
def setShapeAt(self, x, y, shape):
self.board[(y * Board.BoardWidth) + x] = shape
def squareWidth(self):
return self.contentsRect().width() // Board.BoardWidth
def squareHeight(self):
return self.contentsRect().height() // Board.BoardHeight
def start(self):
if self.isPaused:
return
self.isStarted = True
self.isWaitingAfterLine = False
self.numLinesRemoved = 0
self.clearBoard()
self.msg2Statusbar.emit(str(self.numLinesRemoved))
self.newPiece()
self.timer.start(Board.Speed, self)
def pause(self):
if not self.isStarted:
return
self.isPaused = not self.isPaused
if self.isPaused:
self.timer.stop()
self.msg2Statusbar.emit("paused")
else:
self.timer.start(Board.Speed, self)
self.msg2Statusbar.emit(str(self.numLinesRemoved))
self.update()
def paintEvent(self, event):
painter = QPainter(self)
rect = self.contentsRect()
boardTop = rect.bottom() - Board.BoardHeight * self.squareHeight()
for i in range(Board.BoardHeight):
for j in range(Board.BoardWidth):
shape = self.shapeAt(j, Board.BoardHeight - i - 1)
if shape != Tetrominoe.NoShape:
self.drawSquare(painter,
rect.left() + j * self.squareWidth(),
boardTop + i * self.squareHeight(), shape)
if self.curPiece.shape() != Tetrominoe.NoShape:
for i in range(4):
x = self.curX + self.curPiece.x(i)
y = self.curY - self.curPiece.y(i)
self.drawSquare(
painter, rect.left() + x * self.squareWidth(),
boardTop + (Board.BoardHeight - y - 1) * self.squareHeight(),
self.curPiece.shape())
def keyPressEvent(self, event):
if not self.isStarted or self.curPiece.shape() == Tetrominoe.NoShape:
super(Board, self).keyPressEvent(event)
return
key = event.key()
if key == Qt.Key_P:
self.pause()
return
if self.isPaused:
return
elif key == Qt.Key_Left:
self.tryMove(self.curPiece, self.curX - 1, self.curY)
elif key == Qt.Key_Right:
self.tryMove(self.curPiece, self.curX + 1, self.curY)
elif key == Qt.Key_Down:
self.tryMove(self.curPiece.rotateRight(), self.curX, self.curY)
elif key == Qt.Key_Up:
self.tryMove(self.curPiece.rotateLeft(), self.curX, self.curY)
elif key == Qt.Key_Space:
self.dropDown()
elif key == Qt.Key_D:
self.oneLineDown()
else:
super(Board, self).keyPressEvent(event)
def timerEvent(self, event):
if event.timerId() == self.timer.timerId():
if self.isWaitingAfterLine:
self.isWaitingAfterLine = False
self.newPiece()
else:
self.oneLineDown()
else:
super(Board, self).timerEvent(event)
def clearBoard(self):
for i in range(Board.BoardHeight * Board.BoardWidth):
self.board.append(Tetrominoe.NoShape)
def dropDown(self):
newY = self.curY
while newY > 0:
if not self.tryMove(self.curPiece, self.curX, newY - 1):
break
newY -= 1
self.pieceDropped()
def oneLineDown(self):
if not self.tryMove(self.curPiece, self.curX, self.curY - 1):
self.pieceDropped()
def pieceDropped(self):
for i in range(4):
x = self.curX + self.curPiece.x(i)
y = self.curY - self.curPiece.y(i)
self.setShapeAt(x, y, self.curPiece.shape())
self.removeFullLines()
if not self.isWaitingAfterLine:
self.newPiece()
def removeFullLines(self):
numFullLines = 0
rowsToRemove = []
for i in range(Board.BoardHeight):
n = 0
for j in range(Board.BoardWidth):
if not self.shapeAt(j, i) == Tetrominoe.NoShape:
n = n + 1
if n == 10:
rowsToRemove.append(i)
rowsToRemove.reverse()
for m in rowsToRemove:
for k in range(m, Board.BoardHeight):
for l in range(Board.BoardWidth):
self.setShapeAt(l, k, self.shapeAt(l, k + 1))
numFullLines = numFullLines + len(rowsToRemove)
if numFullLines > 0:
self.numLinesRemoved = self.numLinesRemoved + numFullLines
self.msg2Statusbar.emit(str(self.numLinesRemoved))
self.isWaitingAfterLine = True
self.curPiece.setShape(Tetrominoe.NoShape)
self.update()
def newPiece(self):
self.curPiece = Shape()
self.curPiece.setRandomShape()
self.curX = Board.BoardWidth // 2 + 1
self.curY = Board.BoardHeight - 1 + self.curPiece.minY()
if not self.tryMove(self.curPiece, self.curX, self.curY):
self.curPiece.setShape(Tetrominoe.NoShape)
self.timer.stop()
self.isStarted = False
self.msg2Statusbar.emit("Game over")
def tryMove(self, newPiece, newX, newY):
for i in range(4):
x = newX + newPiece.x(i)
y = newY - newPiece.y(i)
if x < 0 or x >= Board.BoardWidth or \
y < 0 or y >= Board.BoardHeight:
return False
if self.shapeAt(x, y) != Tetrominoe.NoShape:
return False
self.curPiece = newPiece
self.curX = newX
self.curY = newY
self.update()
return True
def drawSquare(self, painter, x, y, shape):
colorTable = [0x000000, 0xCC6666, 0x66CC66, 0x6666CC,
0xCCCC66, 0xCC66CC, 0x66CCCC, 0xDAAA00]
color = QColor(colorTable[shape])
painter.fillRect(x + 1, y + 1, self.squareWidth() - 2,
self.squareHeight() - 2, color)
painter.setPen(color.lighter())
painter.drawLine(x, y + self.squareHeight() - 1, x, y)
painter.drawLine(x, y, x + self.squareWidth() - 1, y)
painter.setPen(color.darker())
painter.drawLine(
x + 1, y + self.squareHeight() - 1,
x + self.squareWidth() - 1, y + self.squareHeight() - 1)
painter.drawLine(
x + self.squareWidth() - 1,
y + self.squareHeight() - 1, x + self.squareWidth() - 1, y + 1)
class Tetrominoe(object):
NoShape = 0
ZShape = 1
SShape = 2
LineShape = 3
TShape = 4
SquareShape = 5
LShape = 6
MirroredLShape = 7
class Shape(object):
coordsTable = (
((0, 0), (0, 0), (0, 0), (0, 0)),
((0, -1), (0, 0), (-1, 0), (-1, 1)),
((0, -1), (0, 0), (1, 0), (1, 1)),
((0, -1), (0, 0), (0, 1), (0, 2)),
((-1, 0), (0, 0), (1, 0), (0, 1)),
((0, 0), (1, 0), (0, 1), (1, 1)),
((-1, -1), (0, -1), (0, 0), (0, 1)),
((1, -1), (0, -1), (0, 0), (0, 1))
)
def __init__(self):
self.coords = [[0, 0] for i in range(4)]
self.pieceShape = Tetrominoe.NoShape
self.setShape(Tetrominoe.NoShape)
def shape(self):
return self.pieceShape
def setShape(self, shape):
table = Shape.coordsTable[shape]
for i in range(4):
for j in range(2):
self.coords[i][j] = table[i][j]
self.pieceShape = shape
def setRandomShape(self):
self.setShape(random.randint(1, 7))
def x(self, index):
return self.coords[index][0]
def y(self, index):
return self.coords[index][1]
def setX(self, index, x):
self.coords[index][0] = x
def setY(self, index, y):
self.coords[index][1] = y
def minX(self):
m = self.coords[0][0]
for i in range(4):
m = min(m, self.coords[i][0])
return m
def maxX(self):
m = self.coords[0][0]
for i in range(4):
m = max(m, self.coords[i][0])
return m
def minY(self):
m = self.coords[0][1]
for i in range(4):
m = min(m, self.coords[i][1])
return m
def maxY(self):
m = self.coords[0][1]
for i in range(4):
m = max(m, self.coords[i][1])
return m
def rotateLeft(self):
if self.pieceShape == Tetrominoe.SquareShape:
return self
result = Shape()
result.pieceShape = self.pieceShape
for i in range(4):
result.setX(i, self.y(i))
result.setY(i, -self.x(i))
return result
def rotateRight(self):
if self.pieceShape == Tetrominoe.SquareShape:
return self
result = Shape()
result.pieceShape = self.pieceShape
for i in range(4):
result.setX(i, -self.y(i))
result.setY(i, self.x(i))
return result
if __name__ == '__main__':
app = QApplication([])
tetris = Tetris()
sys.exit(app.exec_())
| quchunguang/test | testpy3/pyqt5_tetris.py | Python | mit | 10,805 | 0.000278 |
import sys
if sys.path[0] != "../..":
sys.path.insert(0, "../..")
import unittest
import io
from pyx.graph import data
class DataTestCase(unittest.TestCase):
def testPoints(self):
mydata = data.points([[1, 2, 3], [4, 5, 6]], a=1, b=2)
self.assertEqual(mydata.columndata[0], [1, 2])
self.assertEqual(mydata.columns["a"], [1, 4])
self.assertEqual(mydata.columndata[2], [2, 5])
self.assertEqual("c" in list(mydata.columns.keys()), 0)
def testValues(self):
mydata = data.values(a=[1, 4])
self.assertEqual(mydata.columns["a"], [1, 4])
self.assertEqual("c" in list(mydata.columns.keys()), 0)
def testData(self):
mydata = data.points([[1], [2]], a=1)
mydata2 = data.data(mydata, a="2*a", b="2*$1*a", c="4*$(i)*a*$(-1)", context={"i":1})
self.assertEqual(mydata.columns["a"], [1, 2])
self.assertAlmostEqual(mydata2.columns["a"][0], 2.0)
self.assertAlmostEqual(mydata2.columns["a"][1], 4.0)
self.assertAlmostEqual(mydata2.columns["b"][0], 2.0)
self.assertAlmostEqual(mydata2.columns["b"][1], 8.0)
self.assertAlmostEqual(mydata2.columns["c"][0], 4.0)
self.assertAlmostEqual(mydata2.columns["c"][1], 32.0)
mydata3 = data.data(mydata2, a="b", b="2*c")
self.assertEqual(mydata3.columns["a"], mydata2.columns["b"])
self.assertAlmostEqual(mydata3.columns["b"][0], 2*mydata2.columns["c"][0])
self.assertAlmostEqual(mydata3.columns["b"][1], 2*mydata2.columns["c"][1])
a = "nothing"
two = 2
f = lambda x: x*x
mydata = data.points([[1], [2]], a=1)
mydata2 = data.data(mydata, b="two*a", c="two*$1*a", d="f($1)", context=locals())
self.assertEqual(mydata.columndata[0], [1, 2])
self.assertAlmostEqual(mydata2.columns["b"][0], 2.0)
self.assertAlmostEqual(mydata2.columns["b"][1], 4.0)
self.assertAlmostEqual(mydata2.columns["c"][0], 2.0)
self.assertAlmostEqual(mydata2.columns["c"][1], 8.0)
self.assertAlmostEqual(mydata2.columns["d"][0], 1.0)
self.assertAlmostEqual(mydata2.columns["d"][1], 4.0)
def testFile(self):
testfile = io.StringIO("""#a
0
1 eins
2 "2"
3 x"x""")
mydata = data.file(testfile, row=0, a="a", b=2)
self.assertEqual(mydata.columns["row"], [1, 2, 3, 4])
self.assertAlmostEqual(mydata.columns["a"][0], 0.0)
self.assertAlmostEqual(mydata.columns["a"][1], 1.0)
self.assertAlmostEqual(mydata.columns["a"][2], 2.0)
self.assertAlmostEqual(mydata.columns["a"][3], 3.0)
self.assertEqual(mydata.columns["b"][0], None)
self.assertEqual(mydata.columns["b"][1], "eins")
self.assertEqual(mydata.columns["b"][2], "2")
self.assertEqual(mydata.columns["b"][3], "x\"x")
testfile = io.StringIO("""#a
0
1
2
3
4
5
6
7
8
9""")
mydata = data.file(testfile, title="title", skiphead=3, skiptail=2, every=2, row=0)
self.assertEqual(mydata.columns["row"], [4, 6, 8])
self.assertEqual(mydata.title, "title")
def testSec(self):
testfile = io.StringIO("""[sec1]
opt1=a1
opt2=a2
val=1
val=2
[sec2]
opt1=a4
opt2=a5
val=2
val=1
[sec1]
opt3=a3""")
mydata = data.conffile(testfile, sec=0, a="opt1", b="opt2", c="opt3", d="val")
self.assertEqual(mydata.columns["sec"], ["sec1", "sec2"])
self.assertEqual(mydata.columns["a"], ["a1", "a4"])
self.assertEqual(mydata.columns["b"], ["a2", "a5"])
self.assertEqual(mydata.columns["c"], ["a3", None])
self.assertAlmostEqual(mydata.columns["d"][0], 2.0)
self.assertAlmostEqual(mydata.columns["d"][1], 1.0)
def testParamfunction(self):
mydata = data.paramfunction("k", 0, 9, "x, y = k, -k", points=10)
for i in range(10):
self.assertEqual(mydata.columns["x"][i], i)
self.assertEqual(mydata.columns["y"][i], -i)
if __name__ == "__main__":
unittest.main()
| mjg/PyX | test/unit/test_data.py | Python | gpl-2.0 | 3,991 | 0.002255 |
from zipline.api import sid, symbol, order, record, get_datetime
import logbook
import pandas as pd
log = logbook.Logger("ZiplineLog")
def initialize(context):
context.set_benchmark(symbol('TOPIX'))
context.assets = [
symbol(sym_str)
for sym_str in [
'2121',
'4689',
'7162',
]
]
def handle_data(context, data):
# log.info(data.history(context.assets, "price", 20, "1d"))
# log.info(data.current(context.assets, "volume"))
# exchange_ts = pd.Timestamp(get_datetime()).tz_convert('Asia/Tokyo')
# exchange_ts = pd.Timestamp(get_datetime())
log.info(pd.Timestamp(get_datetime()).tz_convert('Asia/Tokyo'))
log.info(str(data[symbol('TOPIX')].price))
order(symbol('4689'), -10)
record(Yahoo=data[symbol('4689')].price)
def analyze(context, perf):
pass # print(perf.iloc[-1].T)
| magne-max/zipline-ja | zipline/examples/buy_test.py | Python | apache-2.0 | 888 | 0 |
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class AuthTestTidRequest(AbstractModel):
"""AuthTestTid请求参数结构体
"""
def __init__(self):
r"""
:param Data: 设备端SDK填入测试TID参数后生成的加密数据串
:type Data: str
"""
self.Data = None
def _deserialize(self, params):
self.Data = params.get("Data")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AuthTestTidResponse(AbstractModel):
"""AuthTestTid返回参数结构体
"""
def __init__(self):
r"""
:param Pass: 认证结果
:type Pass: bool
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Pass = None
self.RequestId = None
def _deserialize(self, params):
self.Pass = params.get("Pass")
self.RequestId = params.get("RequestId")
class BurnTidNotifyRequest(AbstractModel):
"""BurnTidNotify请求参数结构体
"""
def __init__(self):
r"""
:param OrderId: 订单编号
:type OrderId: str
:param Tid: TID编号
:type Tid: str
"""
self.OrderId = None
self.Tid = None
def _deserialize(self, params):
self.OrderId = params.get("OrderId")
self.Tid = params.get("Tid")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class BurnTidNotifyResponse(AbstractModel):
"""BurnTidNotify返回参数结构体
"""
def __init__(self):
r"""
:param Tid: 接收回执成功的TID
:type Tid: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Tid = None
self.RequestId = None
def _deserialize(self, params):
self.Tid = params.get("Tid")
self.RequestId = params.get("RequestId")
class DeliverTidNotifyRequest(AbstractModel):
"""DeliverTidNotify请求参数结构体
"""
def __init__(self):
r"""
:param OrderId: 订单编号
:type OrderId: str
:param Tid: TID编号
:type Tid: str
"""
self.OrderId = None
self.Tid = None
def _deserialize(self, params):
self.OrderId = params.get("OrderId")
self.Tid = params.get("Tid")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeliverTidNotifyResponse(AbstractModel):
"""DeliverTidNotify返回参数结构体
"""
def __init__(self):
r"""
:param RemaindCount: 剩余空发数量
:type RemaindCount: int
:param Tid: 已回执的TID编码
:type Tid: str
:param ProductKey: 产品公钥
:type ProductKey: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RemaindCount = None
self.Tid = None
self.ProductKey = None
self.RequestId = None
def _deserialize(self, params):
self.RemaindCount = params.get("RemaindCount")
self.Tid = params.get("Tid")
self.ProductKey = params.get("ProductKey")
self.RequestId = params.get("RequestId")
class DeliverTidsRequest(AbstractModel):
"""DeliverTids请求参数结构体
"""
def __init__(self):
r"""
:param OrderId: 订单ID
:type OrderId: str
:param Quantity: 数量,1~100
:type Quantity: int
"""
self.OrderId = None
self.Quantity = None
def _deserialize(self, params):
self.OrderId = params.get("OrderId")
self.Quantity = params.get("Quantity")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeliverTidsResponse(AbstractModel):
"""DeliverTids返回参数结构体
"""
def __init__(self):
r"""
:param TidSet: 空发的TID信息
注意:此字段可能返回 null,表示取不到有效值。
:type TidSet: list of TidKeysInfo
:param ProductKey: 产品公钥
:type ProductKey: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TidSet = None
self.ProductKey = None
self.RequestId = None
def _deserialize(self, params):
if params.get("TidSet") is not None:
self.TidSet = []
for item in params.get("TidSet"):
obj = TidKeysInfo()
obj._deserialize(item)
self.TidSet.append(obj)
self.ProductKey = params.get("ProductKey")
self.RequestId = params.get("RequestId")
class DescribeAvailableLibCountRequest(AbstractModel):
"""DescribeAvailableLibCount请求参数结构体
"""
def __init__(self):
r"""
:param OrderId: 订单编号
:type OrderId: str
"""
self.OrderId = None
def _deserialize(self, params):
self.OrderId = params.get("OrderId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeAvailableLibCountResponse(AbstractModel):
"""DescribeAvailableLibCount返回参数结构体
"""
def __init__(self):
r"""
:param Quantity: 可空发的白盒密钥数量
:type Quantity: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Quantity = None
self.RequestId = None
def _deserialize(self, params):
self.Quantity = params.get("Quantity")
self.RequestId = params.get("RequestId")
class DescribePermissionRequest(AbstractModel):
"""DescribePermission请求参数结构体
"""
class DescribePermissionResponse(AbstractModel):
"""DescribePermission返回参数结构体
"""
def __init__(self):
r"""
:param EnterpriseUser: 企业用户
:type EnterpriseUser: bool
:param DownloadPermission: 下载控制台权限
:type DownloadPermission: str
:param UsePermission: 使用控制台权限
:type UsePermission: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.EnterpriseUser = None
self.DownloadPermission = None
self.UsePermission = None
self.RequestId = None
def _deserialize(self, params):
self.EnterpriseUser = params.get("EnterpriseUser")
self.DownloadPermission = params.get("DownloadPermission")
self.UsePermission = params.get("UsePermission")
self.RequestId = params.get("RequestId")
class DownloadTidsRequest(AbstractModel):
"""DownloadTids请求参数结构体
"""
def __init__(self):
r"""
:param OrderId: 订单编号
:type OrderId: str
:param Quantity: 下载数量:1~10
:type Quantity: int
"""
self.OrderId = None
self.Quantity = None
def _deserialize(self, params):
self.OrderId = params.get("OrderId")
self.Quantity = params.get("Quantity")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DownloadTidsResponse(AbstractModel):
"""DownloadTids返回参数结构体
"""
def __init__(self):
r"""
:param TidSet: 下载的TID信息列表
注意:此字段可能返回 null,表示取不到有效值。
:type TidSet: list of TidKeysInfo
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TidSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("TidSet") is not None:
self.TidSet = []
for item in params.get("TidSet"):
obj = TidKeysInfo()
obj._deserialize(item)
self.TidSet.append(obj)
self.RequestId = params.get("RequestId")
class TidKeysInfo(AbstractModel):
"""系统生成的TID和密钥信息
"""
def __init__(self):
r"""
:param Tid: TID号码
:type Tid: str
:param PublicKey: 公钥
:type PublicKey: str
:param PrivateKey: 私钥
:type PrivateKey: str
:param Psk: 共享密钥
:type Psk: str
:param DownloadUrl: 软加固白盒密钥下载地址
:type DownloadUrl: str
:param DeviceCode: 软加固设备标识码
:type DeviceCode: str
"""
self.Tid = None
self.PublicKey = None
self.PrivateKey = None
self.Psk = None
self.DownloadUrl = None
self.DeviceCode = None
def _deserialize(self, params):
self.Tid = params.get("Tid")
self.PublicKey = params.get("PublicKey")
self.PrivateKey = params.get("PrivateKey")
self.Psk = params.get("Psk")
self.DownloadUrl = params.get("DownloadUrl")
self.DeviceCode = params.get("DeviceCode")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class UploadDeviceUniqueCodeRequest(AbstractModel):
"""UploadDeviceUniqueCode请求参数结构体
"""
def __init__(self):
r"""
:param CodeSet: 硬件唯一标识码
:type CodeSet: list of str
:param OrderId: 硬件标识码绑定的申请编号
:type OrderId: str
"""
self.CodeSet = None
self.OrderId = None
def _deserialize(self, params):
self.CodeSet = params.get("CodeSet")
self.OrderId = params.get("OrderId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class UploadDeviceUniqueCodeResponse(AbstractModel):
"""UploadDeviceUniqueCode返回参数结构体
"""
def __init__(self):
r"""
:param Count: 本次已上传数量
:type Count: int
:param ExistedCodeSet: 重复的硬件唯一标识码
注意:此字段可能返回 null,表示取不到有效值。
:type ExistedCodeSet: list of str
:param LeftQuantity: 剩余可上传数量
:type LeftQuantity: int
:param IllegalCodeSet: 错误的硬件唯一标识码
注意:此字段可能返回 null,表示取不到有效值。
:type IllegalCodeSet: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Count = None
self.ExistedCodeSet = None
self.LeftQuantity = None
self.IllegalCodeSet = None
self.RequestId = None
def _deserialize(self, params):
self.Count = params.get("Count")
self.ExistedCodeSet = params.get("ExistedCodeSet")
self.LeftQuantity = params.get("LeftQuantity")
self.IllegalCodeSet = params.get("IllegalCodeSet")
self.RequestId = params.get("RequestId")
class VerifyChipBurnInfoRequest(AbstractModel):
"""VerifyChipBurnInfo请求参数结构体
"""
def __init__(self):
r"""
:param Data: 验证数据
:type Data: str
"""
self.Data = None
def _deserialize(self, params):
self.Data = params.get("Data")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class VerifyChipBurnInfoResponse(AbstractModel):
"""VerifyChipBurnInfo返回参数结构体
"""
def __init__(self):
r"""
:param Pass: 验证结果
:type Pass: bool
:param VerifiedTimes: 已验证次数
:type VerifiedTimes: int
:param LeftTimes: 剩余验证次数
:type LeftTimes: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Pass = None
self.VerifiedTimes = None
self.LeftTimes = None
self.RequestId = None
def _deserialize(self, params):
self.Pass = params.get("Pass")
self.VerifiedTimes = params.get("VerifiedTimes")
self.LeftTimes = params.get("LeftTimes")
self.RequestId = params.get("RequestId") | tzpBingo/github-trending | codespace/python/tencentcloud/iottid/v20190411/models.py | Python | mit | 15,254 | 0.002742 |
from datetime import time
from django.test import TestCase
from django.core.files.uploadedfile import SimpleUploadedFile
from .models import Transcript, TranscriptPhrase
class TranscriptTestCase(TestCase):
def setUp(self):
fake_file = SimpleUploadedFile(
'not-really-a-file.txt',
'this is what\'s in the file that isn\'t a file'.encode()
)
Transcript.objects.create(
name='test transcript',
original_transcript=fake_file
)
def test_transcript_file(self):
fake = Transcript.objects.get(name='test transcript')
self.assertEqual(
fake.name,
'test transcript'
)
self.assertEqual(
fake.original_transcript.name,
'original_transcripts/not-really-a-file.txt',
)
self.assertEqual(
fake.original_transcript.read(),
'this is what\'s in the file that isn\'t a file'.encode()
)
def tearDown(self):
fake = Transcript.objects.get(name='test transcript')
fake.original_transcript.delete(False)
class TranscriptPhraseTestCase(TestCase):
def setUp(self):
fake_file = SimpleUploadedFile(
'not-really-a-file2.txt',
'this is what\'s in the file that isn\'t a file'.encode()
)
Transcript.objects.create(
name='test transcript',
original_transcript=fake_file
)
TranscriptPhrase.objects.create(
original_phrase='old and wrong',
time_begin=time(0, 1, 0),
time_end=time(0, 2, 10),
transcript=Transcript.objects.get(
name='test transcript'
)
)
def test_transcript_phrase(self):
fake_transcript = Transcript.objects.get(name='test transcript')
fake_phrase = TranscriptPhrase.objects.get(
original_phrase='old and wrong'
)
self.assertEqual(
fake_phrase.original_phrase,
'old and wrong'
)
self.assertEqual(
fake_phrase.time_begin,
time(0, 1, 0)
)
self.assertEqual(
fake_phrase.time_end,
time(0, 2, 10)
)
self.assertEqual(
fake_phrase.transcript,
fake_transcript
)
def tearDown(self):
fake = Transcript.objects.get(name='test transcript')
fake.original_transcript.delete(False)
| WGBH/FixIt | mla_game/apps/transcript/tests.py | Python | mit | 2,490 | 0 |
"""Tests for the retire_order command"""
from tempfile import NamedTemporaryFile
from django.core.management import call_command
from course_modes.models import CourseMode
from shoppingcart.models import CertificateItem, Order
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class TestRetireOrder(ModuleStoreTestCase):
"""Test the retire_order command"""
def setUp(self):
super(TestRetireOrder, self).setUp()
course = CourseFactory.create()
self.course_key = course.id
CourseMode.objects.create(
course_id=self.course_key,
mode_slug=CourseMode.HONOR,
mode_display_name=CourseMode.HONOR
)
# set up test carts
self.cart, __ = self._create_cart()
self.paying, __ = self._create_cart()
self.paying.start_purchase()
self.already_defunct_cart, __ = self._create_cart()
self.already_defunct_cart.retire()
self.purchased, self.purchased_item = self._create_cart()
self.purchased.status = "purchased"
self.purchased.save()
self.purchased_item.status = "purchased"
self.purchased.save()
def test_retire_order(self):
"""Test the retire_order command"""
nonexistent_id = max(order.id for order in Order.objects.all()) + 1
order_ids = [
self.cart.id,
self.paying.id,
self.already_defunct_cart.id,
self.purchased.id,
nonexistent_id
]
self._create_tempfile_and_call_command(order_ids)
self.assertEqual(
Order.objects.get(id=self.cart.id).status, "defunct-cart"
)
self.assertEqual(
Order.objects.get(id=self.paying.id).status, "defunct-paying"
)
self.assertEqual(
Order.objects.get(id=self.already_defunct_cart.id).status,
"defunct-cart"
)
self.assertEqual(
Order.objects.get(id=self.purchased.id).status, "purchased"
)
def _create_tempfile_and_call_command(self, order_ids):
"""
Takes a list of order_ids, writes them to a tempfile, and then runs the
"retire_order" command on the tempfile
"""
with NamedTemporaryFile() as temp:
temp.write("\n".join(str(order_id) for order_id in order_ids))
temp.seek(0)
call_command('retire_order', temp.name)
def _create_cart(self):
"""Creates a cart and adds a CertificateItem to it"""
cart = Order.get_cart_for_user(UserFactory.create())
item = CertificateItem.add_to_order(
cart, self.course_key, 10, 'honor', currency='usd'
)
return cart, item
| miptliot/edx-platform | lms/djangoapps/shoppingcart/management/tests/test_retire_order.py | Python | agpl-3.0 | 2,853 | 0 |
# flake8: noqa
from .base import Settings, Configuration
from .decorators import pristinemethod
__version__ = '0.5'
__all__ = ['Configuration', 'pristinemethod', 'Settings']
| paltman/django-configurations | configurations/__init__.py | Python | bsd-3-clause | 175 | 0 |
# -*- coding: utf-8 -*-
#
# 2016-05-07 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add realm dropdown
# 2016-04-06 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add time dependency in policy
# 2016-02-22 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add RADIUS passthru policy
# 2016-02-05 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add tokenwizard in scope UI
# 2015-12-30 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add password reset policy
# 2015-12-28 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add registration policy
# 2015-12-16 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add tokenissuer policy
# 2015-11-29 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add getchallenges policy
# 2015-10-31 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add last_auth policy.
# 2015-10-30 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Display user details in token list
# 2015-10-26 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add default token type for enrollment
# 2015-10-14 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add auth_max_success and auth_max_fail actions to
# scope authorization
# 2015-10-09 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add token_page_size and user_page_size policy
# 2015-09-06 Cornelius Kölbel <cornelius.koelbel@netkngihts.it>
# Add challenge_response authentication policy
# 2015-06-30 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add the OTP PIN handling
# 2015-06-29 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add the mangle policy
# 2015-04-03 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add WebUI logout time.
# 2015-03-27 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add PIN policies in USER scope
# 2015-02-06 Cornelius Kölbel <cornelius@privacyidea.org>
# Rewrite for flask migration.
# Policies are not handled by decorators as
# 1. precondition for API calls
# 2. internal modifications of LIB-functions
# 3. postcondition for API calls
#
# Jul 07, 2014 add check_machine_policy, Cornelius Kölbel
# May 08, 2014 Cornelius Kölbel
#
# License: AGPLv3
# contact: http://www.privacyidea.org
#
# privacyIDEA is a fork of LinOTP
# Copyright (C) 2010 - 2014 LSE Leading Security Experts GmbH
# License: AGPLv3
# contact: http://www.linotp.org
# http://www.lsexperts.de
# linotp@lsexperts.de
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Base function to handle the policy entries in the database.
This module only depends on the db/models.py
The functions of this module are tested in tests/test_lib_policy.py
A policy has the attributes
* name
* scope
* action
* realm
* resolver
* user
* client
* active
``name`` is the unique identifier of a policy. ``scope`` is the area,
where this policy is meant for. This can be values like admin, selfservice,
authentication...
``scope`` takes only one value.
``active`` is bool and indicates, whether a policy is active or not.
``action``, ``realm``, ``resolver``, ``user`` and ``client`` can take a comma
separated list of values.
realm and resolver
------------------
If these are empty '*', this policy matches each requested realm.
user
----
If the user is empty or '*', this policy matches each user.
You can exclude users from matching this policy, by prepending a '-' or a '!'.
``*, -admin`` will match for all users except the admin.
client
------
The client is identified by its IP address. A policy can contain a list of
IP addresses or subnets.
You can exclude clients from subnets by prepending the client with a '-' or
a '!'.
``172.16.0.0/24, -172.16.0.17`` will match each client in the subnet except
the 172.16.0.17.
time
----
You can specify a time in which the policy should be active.
Time formats are
<dow>-<dow>:<hh>:<mm>-<hh>:<mm>, ...
<dow>:<hh>:<mm>-<hh>:<mm>
<dow>:<hh>-<hh>
and any combination of it. "dow" being day of week Mon, Tue, Wed, Thu, Fri,
Sat, Sun.
"""
from .log import log_with
from configobj import ConfigObj
from netaddr import IPAddress
from netaddr import IPNetwork
from gettext import gettext as _
import logging
from ..models import (Policy, db)
from privacyidea.lib.config import (get_token_classes, get_token_types)
from privacyidea.lib.error import ParameterError, PolicyError
from privacyidea.lib.realm import get_realms
from privacyidea.lib.resolver import get_resolver_list
from privacyidea.lib.smtpserver import get_smtpservers
from privacyidea.lib.radiusserver import get_radiusservers
from privacyidea.lib.utils import check_time_in_range
log = logging.getLogger(__name__)
optional = True
required = False
class SCOPE(object):
__doc__ = """This is the list of the allowed scopes that can be used in
policy definitions.
"""
AUTHZ = "authorization"
ADMIN = "admin"
AUTH = "authentication"
AUDIT = "audit"
USER = "user" # was selfservice
ENROLL = "enrollment"
GETTOKEN = "gettoken"
WEBUI = "webui"
REGISTER = "register"
class ACTION(object):
__doc__ = """This is the list of usual actions."""
ASSIGN = "assign"
AUDIT = "auditlog"
AUTHITEMS = "fetch_authentication_items"
AUTHMAXSUCCESS = "auth_max_success"
AUTHMAXFAIL = "auth_max_fail"
AUTOASSIGN = "autoassignment"
CACONNECTORREAD = "caconnectorread"
CACONNECTORWRITE = "caconnectorwrite"
CACONNECTORDELETE = "caconnectordelete"
CHALLENGERESPONSE = "challenge_response"
GETCHALLENGES = "getchallenges"
COPYTOKENPIN = "copytokenpin"
COPYTOKENUSER = "copytokenuser"
DEFAULT_TOKENTYPE = "default_tokentype"
DELETE = "delete"
DISABLE = "disable"
EMAILCONFIG = "smtpconfig"
ENABLE = "enable"
ENCRYPTPIN = "encrypt_pin"
GETSERIAL = "getserial"
GETRANDOM = "getrandom"
IMPORT = "importtokens"
LASTAUTH = "last_auth"
LOGINMODE = "login_mode"
LOGOUTTIME = "logout_time"
LOSTTOKEN = 'losttoken'
LOSTTOKENPWLEN = "losttoken_PW_length"
LOSTTOKENPWCONTENTS = "losttoken_PW_contents"
LOSTTOKENVALID = "losttoken_valid"
MACHINERESOLVERWRITE = "mresolverwrite"
MACHINERESOLVERDELETE = "mresolverdelete"
MACHINELIST = "machinelist"
MACHINETOKENS = "manage_machine_tokens"
MANGLE = "mangle"
MAXTOKENREALM = "max_token_per_realm"
MAXTOKENUSER = "max_token_per_user"
NODETAILSUCCESS = "no_detail_on_success"
NODETAILFAIL = "no_detail_on_fail"
OTPPIN = "otppin"
OTPPINRANDOM = "otp_pin_random"
OTPPINMAXLEN = 'otp_pin_maxlength'
OTPPINMINLEN = 'otp_pin_minlength'
OTPPINCONTENTS = 'otp_pin_contents'
PASSNOTOKEN = "passOnNoToken"
PASSNOUSER = "passOnNoUser"
PASSTHRU = "passthru"
PASSWORDRESET = "password_reset"
PINHANDLING = "pinhandling"
POLICYDELETE = "policydelete"
POLICYWRITE = "policywrite"
POLICYTEMPLATEURL = "policy_template_url"
REALM = "realm"
REMOTE_USER = "remote_user"
REQUIREDEMAIL = "requiredemail"
RESET = "reset"
RESOLVERDELETE = "resolverdelete"
RESOLVERWRITE = "resolverwrite"
RESOLVER = "resolver"
RESYNC = "resync"
REVOKE = "revoke"
SET = "set"
SETPIN = "setpin"
SETREALM = "setrealm"
SERIAL = "serial"
SYSTEMDELETE = "configdelete"
SYSTEMWRITE = "configwrite"
CONFIGDOCUMENTATION = "system_documentation"
TOKENISSUER = "tokenissuer"
TOKENLABEL = "tokenlabel"
TOKENPAGESIZE = "token_page_size"
TOKENREALMS = "tokenrealms"
TOKENTYPE = "tokentype"
TOKENWIZARD = "tokenwizard"
TOKENWIZARD2ND = "tokenwizard_2nd_token"
UNASSIGN = "unassign"
USERLIST = "userlist"
USERPAGESIZE = "user_page_size"
ADDUSER = "adduser"
DELETEUSER = "deleteuser"
UPDATEUSER = "updateuser"
USERDETAILS = "user_details"
APIKEY = "api_key_required"
SETHSM = "set_hsm_password"
SMTPSERVERWRITE = "smtpserver_write"
RADIUSSERVERWRITE = "radiusserver_write"
REALMDROPDOWN = "realm_dropdown"
EVENTHANDLINGWRITE = "eventhandling_write"
class LOGINMODE(object):
__doc__ = """This is the list of possible values for the login mode."""
USERSTORE = "userstore"
PRIVACYIDEA = "privacyIDEA"
DISABLE = "disable"
class REMOTE_USER(object):
__doc__ = """The list of possible values for the remote_user policy."""
DISABLE = "disable"
ACTIVE = "allowed"
class ACTIONVALUE(object):
__doc__ = """This is a list of usual action values for e.g. policy
action-values like otppin."""
TOKENPIN = "tokenpin"
USERSTORE = "userstore"
DISABLE = "disable"
NONE = "none"
class AUTOASSIGNVALUE(object):
__doc__ = """This is the possible values for autoassign"""
USERSTORE = "userstore"
NONE = "any_pin"
class PolicyClass(object):
"""
The Policy_Object will contain all database policy entries for easy
filtering and mangling.
It will be created at the beginning of the request and is supposed to stay
alive unchanged during the request.
"""
def __init__(self):
"""
Create the Policy_Object from the database table
"""
self.policies = []
# read the policies from the database and store it in the object
policies = Policy.query.all()
for pol in policies:
# read each policy
self.policies.append(pol.get())
@log_with(log)
def get_policies(self, name=None, scope=None, realm=None, active=None,
resolver=None, user=None, client=None, action=None,
adminrealm=None, time=None, all_times=False):
"""
Return the policies of the given filter values
:param name:
:param scope:
:param realm:
:param active:
:param resolver:
:param user:
:param client:
:param action:
:param adminrealm: This is the realm of the admin. This is only
evaluated in the scope admin.
:param time: The optional time, for which the policies should be
fetched. The default time is now()
:type time: datetime
:param all_times: If True the time restriction of the policies is
ignored. Policies of all time ranges will be returned.
:type all_times: bool
:return: list of policies
:rtype: list of dicts
"""
reduced_policies = self.policies
# filter policy for time. If no time is set or is a time is set and
# it matches the time_range, then we add this policy
if not all_times:
reduced_policies = [policy for policy in reduced_policies if
(policy.get("time") and
check_time_in_range(policy.get("time"), time))
or not policy.get("time")]
log.debug("Policies after matching time: {0!s}".format(
reduced_policies))
# Do exact matches for "name", "active" and "scope", as these fields
# can only contain one entry
p = [("name", name), ("active", active), ("scope", scope)]
for searchkey, searchvalue in p:
if searchvalue is not None:
reduced_policies = [policy for policy in reduced_policies if
policy.get(searchkey) == searchvalue]
log.debug("Policies after matching {1!s}: {0!s}".format(
reduced_policies, searchkey))
p = [("action", action), ("user", user), ("resolver", resolver),
("realm", realm)]
# If this is an admin-policy, we also do check the adminrealm
if scope == "admin":
p.append(("adminrealm", adminrealm))
for searchkey, searchvalue in p:
if searchvalue is not None:
new_policies = []
# first we find policies, that really match!
# Either with the real value or with a "*"
# values can be excluded by a leading "!" or "-"
for policy in reduced_policies:
value_found = False
value_excluded = False
# iterate through the list of values:
for value in policy.get(searchkey):
if value and value[0] in ["!", "-"] and \
searchvalue == value[1:]:
value_excluded = True
elif type(searchvalue) == list and value in \
searchvalue + ["*"]:
value_found = True
elif value in [searchvalue, "*"]:
value_found = True
if value_found and not value_excluded:
new_policies.append(policy)
# We also find the policies with no distinct information
# about the request value
for policy in reduced_policies:
if not policy.get(searchkey):
new_policies.append(policy)
reduced_policies = new_policies
log.debug("Policies after matching {1!s}: {0!s}".format(
reduced_policies, searchkey))
# Match the client IP.
# Client IPs may be direct match, may be located in subnets or may
# be excluded by a leading "-" or "!" sign.
# The client definition in the policy may ba a comma separated list.
# It may start with a "-" or a "!" to exclude the client
# from a subnet.
# Thus a client 10.0.0.2 matches a policy "10.0.0.0/8, -10.0.0.1" but
# the client 10.0.0.1 does not match the policy "10.0.0.0/8, -10.0.0.1".
# An empty client definition in the policy matches all clients.
if client is not None:
new_policies = []
for policy in reduced_policies:
client_found = False
client_excluded = False
for polclient in policy.get("client"):
if polclient[0] in ['-', '!']:
# exclude the client?
if IPAddress(client) in IPNetwork(polclient[1:]):
log.debug("the client %s is excluded by %s in "
"policy %s" % (client, polclient, policy))
client_excluded = True
elif IPAddress(client) in IPNetwork(polclient):
client_found = True
if client_found and not client_excluded:
# The client was contained in the defined subnets and was
# not excluded
new_policies.append(policy)
# If there is a policy without any client, we also add it to the
# accepted list.
for policy in reduced_policies:
if not policy.get("client"):
new_policies.append(policy)
reduced_policies = new_policies
log.debug("Policies after matching client".format(
reduced_policies))
return reduced_policies
@log_with(log)
def get_action_values(self, action, scope=SCOPE.AUTHZ, realm=None,
resolver=None, user=None, client=None, unique=False,
allow_white_space_in_action=False):
"""
Get the defined action values for a certain action like
scope: authorization
action: tokentype
would return a list of the tokentypes
scope: authorization
action: serial
would return a list of allowed serials
:param unique: if set, the function will raise an exception if more
than one value is returned
:param allow_white_space_in_action: Some policies like emailtext
would allow entering text with whitespaces. These whitespaces
must not be used to separate action values!
:type allow_white_space_in_action: bool
:return: A list of the allowed tokentypes
:rtype: list
"""
action_values = []
policies = self.get_policies(scope=scope,
action=action, active=True,
realm=realm, resolver=resolver, user=user,
client=client)
for pol in policies:
action_dict = pol.get("action", {})
action_value = action_dict.get(action, "")
"""
We must distinguish actions like:
tokentype=totp hotp motp,
where the string represents a list divided by spaces, and
smstext='your otp is <otp>'
where the spaces are part of the string.
"""
if action_value.startswith("'") and action_value.endswith("'"):
action_values.append(action_dict.get(action)[1:-1])
elif allow_white_space_in_action:
action_values.append(action_dict.get(action))
else:
action_values.extend(action_dict.get(action, "").split())
# reduce the entries to unique entries
action_values = list(set(action_values))
if unique:
if len(action_values) > 1:
raise PolicyError("There are conflicting %s"
" definitions!" % action)
return action_values
@log_with(log)
def ui_get_rights(self, scope, realm, username, client=None):
"""
Get the rights derived from the policies for the given realm and user.
Works for admins and normal users.
It fetches all policies for this user and compiles a maximum list of
allowed rights, that can be used to hide certain UI elements.
:param scope: Can be SCOPE.ADMIN or SCOPE.USER
:param realm: Is either user users realm or the adminrealm
:param username: The loginname of the user
:param client: The HTTP client IP
:return: A list of actions
"""
from privacyidea.lib.auth import ROLE
from privacyidea.lib.token import get_dynamic_policy_definitions
rights = []
userealm = None
adminrealm = None
logged_in_user = {"username": username,
"realm": realm}
if scope == SCOPE.ADMIN:
adminrealm = realm
logged_in_user["role"] = ROLE.ADMIN
elif scope == SCOPE.USER:
userealm = realm
logged_in_user["role"] = ROLE.USER
pols = self.get_policies(scope=scope,
adminrealm=adminrealm,
realm=userealm,
user=username, active=True,
client=client)
for pol in pols:
for action, action_value in pol.get("action").items():
if action_value:
rights.append(action)
# check if we have policies at all:
pols = self.get_policies(scope=scope, active=True)
if not pols:
# We do not have any policies in this scope, so we return all
# possible actions in this scope.
log.debug("No policies defined, so we set all rights.")
static_rights = get_static_policy_definitions(scope).keys()
enroll_rights = get_dynamic_policy_definitions(scope).keys()
rights = static_rights + enroll_rights
# reduce the list
rights = list(set(rights))
log.debug("returning the admin rights: {0!s}".format(rights))
return rights
@log_with(log)
def ui_get_enroll_tokentypes(self, client, logged_in_user):
"""
Return a dictionary of the allowed tokentypes for the logged in user.
This used for the token enrollment UI.
It looks like this:
{"hotp": "HOTP: event based One Time Passwords",
"totp": "TOTP: time based One Time Passwords",
"spass": "SPass: Simple Pass token. Static passwords",
"motp": "mOTP: classical mobile One Time Passwords",
"sshkey": "SSH Public Key: The public SSH key",
"yubikey": "Yubikey AES mode: One Time Passwords with Yubikey",
"remote": "Remote Token: Forward authentication request to another server",
"yubico": "Yubikey Cloud mode: Forward authentication request to YubiCloud",
"radius": "RADIUS: Forward authentication request to a RADIUS server",
"email": "EMail: Send a One Time Passwort to the users email address",
"sms": "SMS: Send a One Time Password to the users mobile phone",
"certificate": "Certificate: Enroll an x509 Certificate Token."}
:param client: Client IP address
:type client: basestring
:param logged_in_user: The Dict of the logged in user
:type logged_in_user: dict
:return: list of token types, the user may enroll
"""
from privacyidea.lib.auth import ROLE
enroll_types = {}
role = logged_in_user.get("role")
if role == ROLE.ADMIN:
admin_realm = logged_in_user.get("realm")
user_realm = None
else:
admin_realm = None
user_realm = logged_in_user.get("realm")
# check, if we have a policy definition at all.
pols = self.get_policies(scope=role, active=True)
tokenclasses = get_token_classes()
for tokenclass in tokenclasses:
# Check if the tokenclass is ui enrollable for "user" or "admin"
if role in tokenclass.get_class_info("ui_enroll"):
enroll_types[tokenclass.get_class_type()] = \
tokenclass.get_class_info("description")
if pols:
# admin policies or user policies are set, so we need to
# test, which tokens are allowed to be enrolled for this user
for tokentype in enroll_types.keys():
# determine, if there is a enrollment policy for this very type
typepols = self.get_policies(scope=role, client=client,
user=logged_in_user.get("username"),
realm=user_realm,
active=True,
action="enroll"+tokentype.upper(),
adminrealm=admin_realm)
if not typepols:
# If there is no policy allowing the enrollment of this
# tokentype, it is deleted.
del(enroll_types[tokentype])
return enroll_types
# --------------------------------------------------------------------------
#
# NEW STUFF
#
#
@log_with(log)
def set_policy(name=None, scope=None, action=None, realm=None, resolver=None,
user=None, time=None, client=None, active=True, adminrealm=None):
"""
Function to set a policy.
If the policy with this name already exists, it updates the policy.
It expects a dict of with the following keys:
:param name: The name of the policy
:param scope: The scope of the policy. Something like "admin", "system",
"authentication"
:param action: A scope specific action or a comma separated list of actions
:type active: basestring
:param realm: A realm, for which this policy is valid
:param resolver: A resolver, for which this policy is valid
:param user: A username or a list of usernames
:param time: N/A if type()
:param client: A client IP with optionally a subnet like 172.16.0.0/16
:param active: If the policy is active or not
:type active: bool
:return: The database ID od the the policy
:rtype: int
"""
if type(action) == dict:
action_list = []
for k, v in action.items():
if v is not True:
# value key
action_list.append("{0!s}={1!s}".format(k, v))
else:
# simple boolean value
action_list.append(k)
action = ", ".join(action_list)
if type(action) == list:
action = ", ".join(action)
if type(realm) == list:
realm = ", ".join(realm)
if type(adminrealm) == list:
adminrealm = ", ".join(adminrealm)
if type(user) == list:
user = ", ".join(user)
if type(resolver) == list:
resolver = ", ".join(resolver)
if type(client) == list:
client = ", ".join(client)
p = Policy(name, action=action, scope=scope, realm=realm,
user=user, time=time, client=client, active=active,
resolver=resolver, adminrealm=adminrealm).save()
return p
@log_with(log)
def enable_policy(name, enable=True):
"""
Enable or disable the policy with the given name
:param name:
:return: ID of the policy
"""
if not Policy.query.filter(Policy.name == name).first():
raise ParameterError("The policy with name '{0!s}' does not exist".format(name))
# Update the policy
p = set_policy(name=name, active=enable)
return p
@log_with(log)
def delete_policy(name):
"""
Function to delete one named policy
:param name: the name of the policy to be deleted
:return: the count of the deleted policies.
:rtype: int
"""
p = Policy.query.filter_by(name=name)
res = p.delete()
db.session.commit()
return res
@log_with(log)
def export_policies(policies):
"""
This function takes a policy list and creates an export file from it
:param policies: a policy definition
:type policies: list of policy dictionaries
:return: the contents of the file
:rtype: string
"""
file_contents = ""
if policies:
for policy in policies:
file_contents += "[{0!s}]\n".format(policy.get("name"))
for key, value in policy.items():
file_contents += "{0!s} = {1!s}\n".format(key, value)
file_contents += "\n"
return file_contents
@log_with(log)
def import_policies(file_contents):
"""
This function imports policies from a file.
The file has a config_object format, i.e. the text file has a header
[<policy_name>]
key = value
and key value pairs.
:param file_contents: The contents of the file
:type file_contents: basestring
:return: number of imported policies
:rtype: int
"""
policies = ConfigObj(file_contents.split('\n'), encoding="UTF-8")
res = 0
for policy_name, policy in policies.iteritems():
ret = set_policy(name=policy_name,
action=eval(policy.get("action")),
scope=policy.get("scope"),
realm=eval(policy.get("realm", "[]")),
user=eval(policy.get("user", "[]")),
resolver=eval(policy.get("resolver", "[]")),
client=eval(policy.get("client", "[]")),
time=policy.get("time", "")
)
if ret > 0:
log.debug("import policy {0!s}: {1!s}".format(policy_name, ret))
res += 1
return res
@log_with(log)
def get_static_policy_definitions(scope=None):
"""
These are the static hard coded policy definitions.
They can be enhanced by token based policy definitions, that can be found
in lib.token.get_dynamic_policy_definitions.
:param scope: Optional the scope of the policies
:type scope: basestring
:return: allowed scopes with allowed actions, the type of action and a
description.
:rtype: dict
"""
resolvers = get_resolver_list().keys()
realms = get_realms().keys()
smtpconfigs = [server.config.identifier for server in get_smtpservers()]
radiusconfigs = [radius.config.identifier for radius in
get_radiusservers()]
radiusconfigs.insert(0, "userstore")
pol = {
SCOPE.REGISTER: {
ACTION.RESOLVER: {'type': 'str',
'value': resolvers,
'desc': _('Define in which resolver the user '
'should be registered.')},
ACTION.REALM: {'type': 'str',
'value': realms,
'desc': _('Define in which realm the user should '
'be registered.')},
ACTION.EMAILCONFIG: {'type': 'str',
'value': smtpconfigs,
'desc': _('The SMTP server configuration, '
'that should be used to send the '
'registration email.')},
ACTION.REQUIREDEMAIL: {'type': 'str',
'desc': _('Only users with this email '
'address are allowed to '
'register. This is a regular '
'expression.')}
},
SCOPE.ADMIN: {
ACTION.ENABLE: {'type': 'bool',
'desc': _('Admin is allowed to enable tokens.')},
ACTION.DISABLE: {'type': 'bool',
'desc': _('Admin is allowed to disable tokens.')},
ACTION.SET: {'type': 'bool',
'desc': _(
'Admin is allowed to set token properties.')},
ACTION.SETPIN: {'type': 'bool',
'desc': _(
'Admin is allowed to set the OTP PIN of '
'tokens.')},
ACTION.RESYNC: {'type': 'bool',
'desc': _('Admin is allowed to resync tokens.')},
ACTION.RESET: {'type': 'bool',
'desc': _(
'Admin is allowed to reset the Failcounter of '
'a token.')},
ACTION.REVOKE: {'tpye': 'bool',
'desc': _("Admin is allowed to revoke a token")},
ACTION.ASSIGN: {'type': 'bool',
'desc': _(
'Admin is allowed to assign a token to a '
'user.')},
ACTION.UNASSIGN: {'type': 'bool',
'desc': _(
'Admin is allowed to remove the token from '
'a user, '
'i.e. unassign a token.')},
ACTION.IMPORT: {'type': 'bool',
'desc': _(
'Admin is allowed to import token files.')},
ACTION.DELETE: {'type': 'bool',
'desc': _(
'Admin is allowed to remove tokens from the '
'database.')},
ACTION.USERLIST: {'type': 'bool',
'desc': _(
'Admin is allowed to view the list of the '
'users.')},
ACTION.MACHINELIST: {'type': 'bool',
'desc': _('The Admin is allowed to list '
'the machines.')},
ACTION.MACHINETOKENS: {'type': 'bool',
'desc': _('The Admin is allowed to attach '
'and detach tokens to machines.')},
ACTION.AUTHITEMS: {'type': 'bool',
'desc': _('The Admin is allowed to fetch '
'authentication items of tokens '
'assigned to machines.')},
# 'checkstatus': {'type': 'bool',
# 'desc' : _('Admin is allowed to check the
# status of a challenge'
# "group": "tools"},
ACTION.TOKENREALMS: {'type': 'bool',
'desc': _('Admin is allowed to manage the '
'realms of a token.')},
ACTION.GETSERIAL: {'type': 'bool',
'desc': _('Admin is allowed to retrieve a serial'
' for a given OTP value.'),
"group": "tools"},
ACTION.GETRANDOM: {'type': 'bool',
'desc': _('Admin is allowed to retrieve '
'random keys from privacyIDEA.')},
# 'checkserial': {'type': 'bool',
# 'desc': _('Admin is allowed to check if a serial '
# 'is unique'),
# "group": "tools"},
ACTION.COPYTOKENPIN: {'type': 'bool',
'desc': _(
'Admin is allowed to copy the PIN of '
'one token '
'to another token.'),
"group": "tools"},
ACTION.COPYTOKENUSER: {'type': 'bool',
'desc': _(
'Admin is allowed to copy the assigned '
'user to another'
' token, i.e. assign a user ot '
'another token.'),
"group": "tools"},
ACTION.LOSTTOKEN: {'type': 'bool',
'desc': _('Admin is allowed to trigger the '
'lost token workflow.'),
"group": "tools"},
# 'getotp': {
# 'type': 'bool',
# 'desc': _('Allow the administrator to retrieve OTP values
# for tokens.'),
# "group": "tools"},
ACTION.SYSTEMWRITE: {'type': 'bool',
"desc": _("Admin is allowed to write and "
"modify the system configuration."),
"group": "system"},
ACTION.SYSTEMDELETE: {'type': 'bool',
"desc": _("Admin is allowed to delete "
"keys in the system "
"configuration."),
"group": "system"},
ACTION.CONFIGDOCUMENTATION: {'type': 'bool',
'desc': _('Admin is allowed to '
'export a documentation '
'of the complete '
'configuration including '
'resolvers and realm.'),
'group': 'system'},
ACTION.POLICYWRITE: {'type': 'bool',
"desc": _("Admin is allowed to write and "
"modify the policies."),
"group": "system"},
ACTION.POLICYDELETE: {'type': 'bool',
"desc": _("Admin is allowed to delete "
"policies."),
"group": "system"},
ACTION.RESOLVERWRITE: {'type': 'bool',
"desc": _("Admin is allowed to write and "
"modify the "
"resolver and realm "
"configuration."),
"group": "system"},
ACTION.RESOLVERDELETE: {'type': 'bool',
"desc": _("Admin is allowed to delete "
"resolvers and realms."),
"group": "system"},
ACTION.CACONNECTORWRITE: {'type': 'bool',
"desc": _("Admin is allowed to create new"
" CA Connector definitions "
"and modify existing ones."),
"group": "system"},
ACTION.CACONNECTORDELETE: {'type': 'bool',
"desc": _("Admin is allowed to delete "
"CA Connector definitions."),
"group": "system"},
ACTION.MACHINERESOLVERWRITE: {'type': 'bool',
'desc': _("Admin is allowed to "
"write and modify the "
"machine resolvers."),
'group': "system"},
ACTION.MACHINERESOLVERDELETE: {'type': 'bool',
'desc': _("Admin is allowed to "
"delete "
"machine resolvers."),
'group': "system"},
ACTION.AUDIT: {'type': 'bool',
"desc": _("Admin is allowed to view the Audit log."),
"group": "system"},
ACTION.ADDUSER: {'type': 'bool',
"desc": _("Admin is allowed to add users in a "
"userstore/UserIdResolver."),
"group": "system"},
ACTION.UPDATEUSER: {'type': 'bool',
"desc": _("Admin is allowed to update the "
"users data in a userstore."),
"group": "system"},
ACTION.DELETEUSER: {'type': 'bool',
"desc": _("Admin is allowed to delete a user "
"object in a userstore.")},
ACTION.SETHSM: {'type': 'bool',
'desc': _("Admin is allowed to set the password "
"of the HSM/Security Module.")},
ACTION.GETCHALLENGES: {'type': 'bool',
'desc': _("Admin is allowed to retrieve "
"the list of active challenges.")},
ACTION.SMTPSERVERWRITE: {'type': 'bool',
'desc': _("Admin is allowed to write new "
"SMTP server definitions.")},
ACTION.RADIUSSERVERWRITE: {'type': 'bool',
'desc': _("Admin is allowed to write "
"new RADIUS server "
"definitions.")},
ACTION.EVENTHANDLINGWRITE: {'type': 'bool',
'desc': _("Admin is allowed to write "
"and modify the event "
"handling configuration.")}
},
# 'gettoken': {
# 'max_count_dpw': {'type': 'int',
# 'desc' : _('When OTP values are retrieved for
# a DPW token, '
# 'this is the maximum number of
# retrievable OTP values.')},
# 'max_count_hotp': {'type': 'int',
# 'desc' : _('When OTP values are retrieved
# for a HOTP token, '
# 'this is the maximum number of
# retrievable OTP values.')},
# 'max_count_totp': {'type': 'int',
# 'desc' : _('When OTP values are retrieved
# for a TOTP token, '
# 'this is the maximum number of
# retrievable OTP values.')},
# },
SCOPE.USER: {
ACTION.ASSIGN: {
'type': 'bool',
'desc': _("The user is allowed to assign an existing token"
" that is not yet assigned"
" using the token serial number.")},
ACTION.DISABLE: {'type': 'bool',
'desc': _(
'The user is allowed to disable his own '
'tokens.')},
ACTION.ENABLE: {'type': 'bool',
'desc': _(
"The user is allowed to enable his own "
"tokens.")},
ACTION.DELETE: {'type': 'bool',
"desc": _(
"The user is allowed to delete his own "
"tokens.")},
ACTION.UNASSIGN: {'type': 'bool',
"desc": _("The user is allowed to unassign his "
"own tokens.")},
ACTION.RESYNC: {'type': 'bool',
"desc": _("The user is allowed to resyncronize his "
"tokens.")},
ACTION.REVOKE: {'type': 'bool',
'desc': _("The user is allowed to revoke a token")},
ACTION.RESET: {'type': 'bool',
'desc': _('The user is allowed to reset the '
'failcounter of his tokens.')},
ACTION.SETPIN: {'type': 'bool',
"desc": _("The user is allowed to set the OTP "
"PIN "
"of his tokens.")},
ACTION.OTPPINMAXLEN: {'type': 'int',
'value': range(0, 32),
"desc": _("Set the maximum allowed length "
"of the OTP PIN.")},
ACTION.OTPPINMINLEN: {'type': 'int',
'value': range(0, 32),
"desc": _("Set the minimum required length "
"of the OTP PIN.")},
ACTION.OTPPINCONTENTS: {'type': 'str',
"desc": _("Specifiy the required "
"contents of the OTP PIN. "
"(c)haracters, (n)umeric, "
"(s)pecial, (o)thers. [+/-]!")},
# 'setMOTPPIN': {'type': 'bool',
# "desc": _("The user is allowed to set the mOTP
# PIN of his mOTP tokens.")},
# 'getotp': {'type': 'bool',
# "desc": _("The user is allowed to retrieve OTP
# values for his own tokens.")},
# 'activateQR': {'type': 'bool',
# "desc": _("The user is allowed to enroll a QR
# token.")},
# 'max_count_dpw': {'type': 'int',
# "desc": _("This is the maximum number of OTP
# values, the user is allowed to retrieve for a DPW token.")},
# 'max_count_hotp': {'type': 'int',
# "desc": _("This is the maximum number of OTP
# values, the user is allowed to retrieve for a HOTP token.")},
# 'max_count_totp': {'type': 'int',
# "desc": _("This is the maximum number of OTP
# values, the user is allowed to retrieve for a TOTP token.")},
ACTION.AUDIT: {
'type': 'bool',
'desc': _('Allow the user to view his own token history.')},
ACTION.USERLIST: {'type': 'bool',
'desc': _("The user is allowed to view his "
"own user information.")},
ACTION.UPDATEUSER: {'type': 'bool',
'desc': _("The user is allowed to update his "
"own user information, like changing "
"his password.")},
ACTION.PASSWORDRESET: {'type': 'bool',
'desc': _("The user is allowed to do a "
"password reset in an editable "
"UserIdResolver.")}
# 'getserial': {
# 'type': 'bool',
# 'desc': _('Allow the user to search an unassigned token by
# OTP value.')},
},
SCOPE.ENROLL: {
ACTION.MAXTOKENREALM: {
'type': 'int',
'desc': _('Limit the number of allowed tokens in a realm.')},
ACTION.MAXTOKENUSER: {
'type': 'int',
'desc': _('Limit the number of tokens a user may have '
'assigned.')},
ACTION.OTPPINRANDOM: {
'type': 'int',
'value': range(0, 32),
"desc": _("Set a random OTP PIN with this length for a "
"token.")},
ACTION.PINHANDLING: {
'type': 'str',
'desc': _('In case of a random OTP PIN use this python '
'module to process the PIN.')},
ACTION.ENCRYPTPIN: {
'type': 'bool',
"desc": _("The OTP PIN can be hashed or encrypted. Hashing "
"the PIN is the default behaviour.")},
ACTION.TOKENLABEL: {
'type': 'str',
'desc': _("Set label for a new enrolled Google Authenticator. "
"Possible tags are <u> (user), <r> ("
"realm), <s> (serial).")},
ACTION.TOKENISSUER: {
'type': 'str',
'desc': _("This is the issuer label for new enrolled Google "
"Authenticators.")
},
ACTION.AUTOASSIGN: {
'type': 'str',
'value': [AUTOASSIGNVALUE.NONE, AUTOASSIGNVALUE.USERSTORE],
'desc': _("Users can assign a token just by using the "
"unassigned token to authenticate.")},
ACTION.LOSTTOKENPWLEN: {
'type': 'int',
'value': range(1, 32),
'desc': _('The length of the password in case of '
'temporary token (lost token).')},
ACTION.LOSTTOKENPWCONTENTS: {
'type': 'str',
'desc': _('The contents of the temporary password, '
'described by the characters C, c, n, s.')},
ACTION.LOSTTOKENVALID: {
'type': 'int',
'value': range(1, 61),
'desc': _('The length of the validity for the temporary '
'token (in days).')},
},
SCOPE.AUTH: {
ACTION.OTPPIN: {
'type': 'str',
'value': [ACTIONVALUE.TOKENPIN, ACTIONVALUE.USERSTORE,
ACTIONVALUE.NONE],
'desc': _('Either use the Token PIN , use the Userstore '
'Password or use no fixed password '
'component.')},
ACTION.CHALLENGERESPONSE: {
'type': 'str',
'desc': _('This is a whitespace separated list of tokentypes, '
'that can be used with challenge response.')
},
ACTION.PASSTHRU: {
'type': 'str',
'value': radiusconfigs,
'desc': _('If set, the user in this realm will be '
'authenticated against the userstore or against the '
'given RADIUS config,'
' if the user has no tokens assigned.')
},
ACTION.PASSNOTOKEN: {
'type': 'bool',
'desc': _('If the user has no token, the authentication '
'request for this user will always be true.')
},
ACTION.PASSNOUSER: {
'type': 'bool',
'desc': _('If the user user does not exist, '
'the authentication request for this '
'non-existing user will always be true.')
},
ACTION.MANGLE: {
'type': 'str',
'desc': _('Can be used to modify the parameters pass, '
'user and realm in an authentication request. See '
'the documentation for an example.')
}
# 'qrtanurl': {
# 'type': 'str',
# 'desc': _('The URL for the half automatic mode that should
# be '
# 'used in a QR Token')
# },
# 'challenge_response': {
# 'type': 'str',
# 'desc': _('A list of tokentypes for which challenge response '
# 'should be used.')
# }
},
SCOPE.AUTHZ: {
ACTION.AUTHMAXSUCCESS: {
'type': 'str',
'desc': _("You can specify how many successful authentication "
"requests a user is allowed to do in a given time. "
"Specify like 1/5s, 2/10m, 10/1h - s, m, h being "
"second, minute and hour.")
},
ACTION.AUTHMAXFAIL: {
'type': 'str',
'desc': _("You can specify how many failed authentication "
"requests a user is allowed to do in a given time. "
"Specify like 1/5s, 2/10m, 10/1h - s, m, h being "
"second, minute and hour.")
},
ACTION.LASTAUTH: {
'type': 'str',
'desc': _("You can specify in which time frame the user needs "
"to authenticate again with this token. If the user "
"authenticates later, authentication will fail. "
"Specify like 30h, 7d or 1y.")
},
ACTION.TOKENTYPE: {
'type': 'str',
'desc': _('The user will only be authenticated with this '
'very tokentype.')},
ACTION.SERIAL: {
'type': 'str',
'desc': _('The user will only be authenticated if the serial '
'number of the token matches this regexp.')},
ACTION.SETREALM: {
'type': 'str',
'value': realms,
'desc': _('The Realm of the user is set to this very realm. '
'This is important if the user is not contained in '
'the default realm and can not pass his realm.')},
ACTION.NODETAILSUCCESS: {
'type': 'bool',
'desc': _('In case of successful authentication additional '
'no detail information will be returned.')},
ACTION.NODETAILFAIL: {
'type': 'bool',
'desc': _('In case of failed authentication additional '
'no detail information will be returned.')},
ACTION.APIKEY: {
'type': 'bool',
'desc': _('The sending of an API Auth Key is required during'
'authentication. This avoids rogue authenticate '
'requests against the /validate/check interface.')
}
},
SCOPE.WEBUI: {
ACTION.LOGINMODE: {
'type': 'str',
'desc': _(
'If set to "privacyIDEA" the users and admins need to '
'authenticate against privacyIDEA when they log in '
'to the Web UI. Defaults to "userstore"'),
'value': [LOGINMODE.USERSTORE, LOGINMODE.PRIVACYIDEA,
LOGINMODE.DISABLE],
},
ACTION.REMOTE_USER: {
'type': 'str',
'value': [REMOTE_USER.ACTIVE, REMOTE_USER.DISABLE],
'desc': _('The REMOTE_USER set by the webserver can be used '
'to login to privacyIDEA or it will be ignored. '
'Defaults to "disable".')
},
ACTION.LOGOUTTIME: {
'type': 'int',
'desc': _("Set the time in seconds after which the user will "
"be logged out from the WebUI. Default: 120")
},
ACTION.TOKENPAGESIZE: {
'type': 'int',
'desc': _("Set how many tokens should be displayed in the "
"token view on one page.")
},
ACTION.USERPAGESIZE: {
'type': 'int',
'desc': _("Set how many users should be displayed in the user "
"view on one page.")
},
ACTION.USERDETAILS: {
'type': 'bool',
'desc': _("Whether the user ID and the resolver should be "
"displayed in the token list.")
},
ACTION.POLICYTEMPLATEURL: {
'type': 'str',
'desc': _("The URL of a repository, where the policy "
"templates can be found. (Default "
"https://raw.githubusercontent.com/privacyidea/"
"policy-templates/master/templates/)")
},
ACTION.TOKENWIZARD: {
'type': 'bool',
'desc': _("As long as a user has no token, he will only see"
" a token wizard in the UI.")
},
ACTION.TOKENWIZARD2ND: {
'type': 'bool',
'desc': _("The tokenwizard will be displayed in the token "
"menu, even if the user already has a token.")
},
ACTION.DEFAULT_TOKENTYPE: {
'type': 'str',
'desc': _("This is the default token type in the token "
"enrollment dialog."),
'value': get_token_types()
},
ACTION.REALMDROPDOWN: {
'type': 'bool',
'desc': _("If this is checked, a dropdown combobox with the "
"realms is displayed in the login screen.")
}
}
# 'ocra': {
# 'request': {
# 'type': 'bool',
# 'desc': _('Allow to do a ocra/request.')},
# 'status': {
# 'type': 'bool',
# 'desc': _('Allow to check the transaction status.')},
# 'activationcode': {
# 'type': 'bool',
# 'desc': _('Allow to do an ocra/getActivationCode.')},
# 'calcOTP': {
# 'type': 'bool',
# 'desc': _('Allow to do an ocra/calculateOtp.')}
# },
}
if scope:
ret = pol.get(scope, {})
else:
ret = pol
return ret
| jalr/privacyidea | privacyidea/lib/policy.py | Python | agpl-3.0 | 57,193 | 0.000385 |
from . import about
from . import gs
from . import gsq
from . import mg9
from . import nineo
from . import vu
import os
import pprint
import sublime
def gs_init(_={}):
g = globals()
p = 'bi_'
l = len(p)
for nm in list(g.keys()):
if nm.startswith(p):
k = nm[l:].replace('__', '.').replace('_', '-')
nineo.builtin(k, g[nm])
def _do_cl(c, k):
if c.args:
c.ok = k != 'any'
for cn in c.args:
c.cl[k].append(c.sess.cmd(cn, set_stream=c.set_stream))
else:
c.ok = True
c.resume()
def bi_all(c):
_do_cl(c, 'all')
def bi_any(c):
_do_cl(c, 'any')
def bi_each(c):
_do_cl(c, 'each')
def bi_version(c):
c.done(about.VERSION)
def bi_true(c):
c.done()
def bi_false(c):
c.fail()
def bi_confirm(c):
if c.args:
c.resume(sublime.ok_cancel_dialog(' '.join(c.args)))
else:
c.fail('Usage: confirm <message>')
def _dbg_c(c, keys):
d = c.__dict__
if keys:
v = {}
for k in keys:
v[k] = d.get(k)
else:
v = d
return pprint.pformat(v)
def bi_gs__cmdump(c):
if len(c.args) == 0 or not gs.is_a(c.args[0], []):
c.fail('Usage: gs.cmdump <keys-list> [cmd [args...]]')
return
keys = c.args[0]
args = c.args[1:]
s = _dbg_c(c, keys)
print('gs.cmdump: %s' % s)
c.done(s)
def bi_gs__cmdebug(c):
if len(c.args) == 0 or not gs.is_a(c.args[0], []):
c.fail('Usage: gs.cmdebug <keys-list> [cmd [args...]]')
return
keys = c.args[0]
args = c.args[1:]
def cb(x):
print('gs.cmdebug: %s' % _dbg_c(x, keys))
x.resume()
c.resume(x.ok)
c.sess.cmd(args, cb=cb, set_stream=c.set_stream).start()
def bi_echo(c, ok=True):
c.sess.write(' '.join(c.args))
c.resume(ok)
def bi_fail(c):
bi_echo(c, False)
def bi_gs__synchk(c):
def f(res, err):
errs = res.get('Errors', [])
if errs:
for e in errs:
c.attrs.append({
'fn': e.get('Fn', ''),
'message': e.get('Message', ''),
'pos': '%s:%s' % (e.get('Line', -1), e.get('Column', 0)),
})
c.fail()
else:
c.done()
if c.args:
files = [{'Fn': fn} for fn in c.args]
else:
vv = c.sess.vv
fn = vv.fn()
if fn and not vv.view().is_dirty():
files = [{'Fn': fn}]
else:
files = [{'Src': vv.src()}]
if not c.hl:
c.hl = {
'ctx': 'gs.synchk:%s' % vv.vfn(),
}
mg9.acall('synchk', {'Files': files}, f)
def bi_go(c):
if c.args and c.args[0] in ('build', 'install', 'run', 'test', 'vet'):
c.sess.save_all(c.wd)
if not c.hl.get('ctx'):
s = 'compile'
if c.args[0] == 'vet':
s = 'vet'
c.hl['ctx'] = ' '.join(('go', s, c.env.get('_wd_or_vfn', '')))
# note: do *not* resume c, we're *switching* to exec_c, not *starting* a new command
nineo.exec_c(c)
def bi_cd(c):
try:
wd = gs.abspath(' '.join(c.args), dir=c.wd)
os.chdir(wd)
c.sess.wr.vv.view().run_command('gs9o_init', {'wd': wd})
c.done()
except Exception as ex:
c.fail('Cannot chdir: %s' % ex)
def bi_help(c):
vu.open(gs.dist_path('9o.md'))
c.done()
def bi_share(c):
vv = vu.active()
view = vv.view()
if view is None or view.score_selector(0, 'source.go') <= 0:
c.fail('not sharing non-go src')
return
def f(res, err):
if err:
c.fail(err)
else:
s = res.get('Url', '').strip()
if s:
sublime.set_clipboard(s)
c.done(s + ' (url copied to the clipboard)')
else:
c.fail('no url received')
mg9.share(vv.src(), f)
def bi_gs__build_margo(c):
def f():
out = mg9.build_mg()
if out == 'ok':
mg9.killSrv()
c.done('ok')
else:
c.fail(out)
gsq.do('GoSublime', f, msg='Rebuilding MarGo')
| DisposaBoy/GoSublime-next | gosubl/nineo_builtins.py | Python | mit | 3,455 | 0.041389 |
from django.db.models import CharField
from django.utils.translation import ugettext_lazy as _
from localflavor.deprecation import DeprecatedPhoneNumberField
from . import forms
from .au_states import STATE_CHOICES
from .validators import AUBusinessNumberFieldValidator, AUCompanyNumberFieldValidator, AUTaxFileNumberFieldValidator
class AUStateField(CharField):
"""
A model field that stores the three-letter Australian state abbreviation in the database.
It is represented with :data:`~localflavor.au.au_states.STATE_CHOICES`` choices.
"""
description = _("Australian State")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 3
super(AUStateField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(AUStateField, self).deconstruct()
del kwargs['choices']
return name, path, args, kwargs
class AUPostCodeField(CharField):
"""
A model field that stores the four-digit Australian postcode in the database.
This field is represented by forms as a :class:`~localflavor.au.forms.AUPostCodeField` field.
"""
description = _("Australian Postcode")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 4
super(AUPostCodeField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUPostCodeField}
defaults.update(kwargs)
return super(AUPostCodeField, self).formfield(**defaults)
class AUPhoneNumberField(CharField, DeprecatedPhoneNumberField):
"""
A model field that checks that the value is a valid Australian phone number (ten digits).
.. deprecated:: 1.4
Use the django-phonenumber-field_ library instead.
.. _django-phonenumber-field: https://github.com/stefanfoulis/django-phonenumber-field
"""
description = _("Australian Phone number")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 20
super(AUPhoneNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUPhoneNumberField}
defaults.update(kwargs)
return super(AUPhoneNumberField, self).formfield(**defaults)
class AUBusinessNumberField(CharField):
"""
A model field that checks that the value is a valid Australian Business Number (ABN).
.. versionadded:: 1.3
"""
description = _("Australian Business Number")
validators = [AUBusinessNumberFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 11
super(AUBusinessNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUBusinessNumberField}
defaults.update(kwargs)
return super(AUBusinessNumberField, self).formfield(**defaults)
def to_python(self, value):
"""Ensure the ABN is stored without spaces."""
value = super(AUBusinessNumberField, self).to_python(value)
if value is not None:
return ''.join(value.split())
return value
class AUCompanyNumberField(CharField):
"""
A model field that checks that the value is a valid Australian Company Number (ACN).
.. versionadded:: 1.5
"""
description = _("Australian Company Number")
validators = [AUCompanyNumberFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(AUCompanyNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUCompanyNumberField}
defaults.update(kwargs)
return super(AUCompanyNumberField, self).formfield(**defaults)
def to_python(self, value):
"""Ensure the ACN is stored without spaces."""
value = super(AUCompanyNumberField, self).to_python(value)
if value is not None:
return ''.join(value.split())
return value
class AUTaxFileNumberField(CharField):
"""
A model field that checks that the value is a valid Tax File Number (TFN).
A TFN is a number issued to a person by the Commissioner of Taxation and
is used to verify client identity and establish their income levels.
It is a eight or nine digit number without any embedded meaning.
.. versionadded:: 1.4
"""
description = _("Australian Tax File Number")
validators = [AUTaxFileNumberFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 11
super(AUTaxFileNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.AUTaxFileNumberField}
defaults.update(kwargs)
return super(AUTaxFileNumberField, self).formfield(**defaults)
def to_python(self, value):
"""Ensure the TFN is stored without spaces."""
value = super(AUTaxFileNumberField, self).to_python(value)
if value is not None:
return ''.join(value.split())
return value
| thor/django-localflavor | localflavor/au/models.py | Python | bsd-3-clause | 5,128 | 0.001755 |
'''
A Mini-implementation of the Storlet middleware filter.
@author: josep sampe
'''
from swift.common.utils import get_logger
from swift.common.utils import register_swift_info
from swift.common.swob import Request
from swift.common.utils import config_true_value
from storlets.swift_middleware.handlers.base import SwiftFileManager
from swift.common.swob import wsgify
class StorletFilter(object):
def __init__(self, app, conf):
self.app = app
self.conf = conf
self.exec_server = self.conf.get('execution_server')
self.logger = get_logger(self.conf, log_route='storlet_filter')
self.filter_data = self.conf['filter_data']
self.parameters = self.filter_data['params']
self.gateway_class = self.conf['storlets_gateway_module']
self.sreq_class = self.gateway_class.request_class
self.storlet_container = conf.get('storlet_container')
self.storlet_dependency = conf.get('storlet_dependency')
self.log_container = conf.get('storlet_logcontainer')
self.client_conf_file = '/etc/swift/storlet-proxy-server.conf'
self.register_info()
def register_info(self):
register_swift_info('storlet_filter')
def _setup_gateway(self):
"""
Setup gateway instance
"""
self.gateway = self.gateway_class(self.conf, self.logger, self.scope)
def _augment_storlet_request(self, req):
"""
Add to request the storlet parameters to be used in case the request
is forwarded to the data node (GET case)
:param params: parameters to be augmented to request
"""
req.headers['X-Storlet-Language'] = self.filter_data['language']
req.headers['X-Storlet-Main'] = self.filter_data['main']
req.headers['X-Storlet-Dependency'] = self.filter_data['dependencies']
req.headers['X-Storlet-Content-Length'] = self.filter_data['size']
req.headers['X-Storlet-Generate-Log'] = False
req.headers['X-Storlet-X-Timestamp'] = 0
def _get_storlet_invocation_options(self, req):
options = dict()
filtered_key = ['X-Storlet-Range', 'X-Storlet-Generate-Log']
for key in req.headers:
prefix = 'X-Storlet-'
if key.startswith(prefix) and key not in filtered_key:
new_key = 'storlet_' + \
key[len(prefix):].lower().replace('-', '_')
options[new_key] = req.headers.get(key)
generate_log = req.headers.get('X-Storlet-Generate-Log')
options['generate_log'] = config_true_value(generate_log)
options['scope'] = self.scope
options['file_manager'] = \
SwiftFileManager(self.account, self.storlet_container,
self.storlet_dependency, self.log_container,
self.client_conf_file, self.logger)
return options
def _build_storlet_request(self, req_resp, params, data_iter):
storlet_id = self.storlet_name
new_env = dict(req_resp.environ)
req = Request.blank(new_env['PATH_INFO'], new_env)
req.headers['X-Run-Storlet'] = self.storlet_name
self._augment_storlet_request(req)
options = self._get_storlet_invocation_options(req)
if hasattr(data_iter, '_fp'):
sreq = self.sreq_class(storlet_id, params, dict(),
data_fd=data_iter._fp.fileno(),
options=options)
else:
sreq = self.sreq_class(storlet_id, params, dict(),
data_iter, options=options)
return sreq
def _call_gateway(self, req_resp, params, crystal_iter):
sreq = self._build_storlet_request(req_resp, params, crystal_iter)
sresp = self.gateway.invocation_flow(sreq)
return sresp.data_iter
@wsgify
def __call__(self, req):
if req.method in ('GET', 'PUT'):
storlet = self.filter_data.pop('name')
params = self.parameters
self.storlet_name = storlet
etag = None
try:
if self.exec_server == 'proxy':
_, self.account, _, _ = req.split_path(4, 4, rest_with_last=True)
elif self.exec_server == 'object':
_, _, self.account, _, _ = req.split_path(5, 5, rest_with_last=True)
except:
# No object Request
return req.get_response(self.app)
self.scope = self.account[5:18]
self.logger.info('Go to execute ' + storlet +
' storlet with parameters "' + str(params) + '"')
self._setup_gateway()
if 'Etag' in req.headers.keys():
etag = req.headers.pop('Etag')
if req.method == 'GET':
response = req.get_response(self.app)
data_iter = response.app_iter
response.app_iter = self._call_gateway(response, params, data_iter)
if 'Content-Length' in response.headers:
response.headers.pop('Content-Length')
if 'Transfer-Encoding' in response.headers:
response.headers.pop('Transfer-Encoding')
elif req.method == 'PUT':
reader = req.environ['wsgi.input'].read
data_iter = iter(lambda: reader(65536), '')
req.environ['wsgi.input'] = self._call_gateway(req, params, data_iter)
if 'CONTENT_LENGTH' in req.environ:
req.environ.pop('CONTENT_LENGTH')
req.headers['Transfer-Encoding'] = 'chunked'
response = req.get_response(self.app)
if etag:
response.headers['etag'] = etag
else:
response.headers['etag'] = ''
return response
return req.get_response(self.app)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def storlet_filter(app):
return StorletFilter(app, conf)
return storlet_filter
| Crystal-SDS/filter-middleware | crystal_filter_middleware/filters/storlet.py | Python | gpl-3.0 | 6,161 | 0.000812 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import UserList
import logging
logger = logging.getLogger(__name__)
class CycleList(UserList):
"""
一个无限循环输出元素的可迭代对象,如果是on-fly模式
(主要指for循环中的删除动作)推荐使用 `remove_item` 方法
"""
def __init__(self, data):
super().__init__(data)
def __iter__(self):
self.index = 0
while True:
if self.index == len(self.data):
self.index = 0
yield self.data[self.index]
self.index += 1
def remove_item(self, item):
"""
主要是用于 on-fly 模式的列表更改移除操作的修正,
不是on-fly 动态模式,就直接用列表原来的remove方法即可
为了保持和原来的remove方法一致,并没有捕捉异常。
"""
self.data.remove(item)
self.index -= 1
def last_out_game(data, number):
test = CycleList(data)
count = 1
for i in test:
logger.debug('testing', i)
if len(test.data) <= 1:
break
if count == number:
try:
test.remove_item(i)
logger.debug('removing', i)
except ValueError:
pass
count = 0
count += 1
return test.data[0] | a358003542/expython | expython/pattern/__init__.py | Python | mit | 1,439 | 0.002429 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.