text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='VideoPipelineIntegration',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('api_url', models.URLField(help_text='edx-video-pipeline API URL.', verbose_name='Internal API URL')),
('service_username', models.CharField(default=u'video_pipeline_service_user', help_text='Username created for Video Pipeline Integration, e.g. video_pipeline_service_user.', max_length=100)),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')),
],
options={
'ordering': ('-change_date',),
'abstract': False,
},
),
]
| stvstnfrd/edx-platform | openedx/core/djangoapps/video_pipeline/migrations/0001_initial.py | Python | agpl-3.0 | 1,352 | 0.005178 |
# This file is part of VoltDB.
# Copyright (C) 2008-2014 VoltDB Inc.
#
# This file contains original code and/or modifications of original code.
# Any modifications made by VoltDB Inc. are licensed under the following
# terms and conditions:
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
__author__ = 'scooper'
import sys
import os
import optparse
import shlex
import copy
from voltcli import utility
# Volt CLI command processor
# Individual option variables are added by the option parser. They are available
# externally as module attributes.
#===============================================================================
class BaseOption(object):
#===============================================================================
"""
General CLI option specification (uses optparse keywords for now).
"""
def __init__(self, short_opt, long_opt, dest, help_msg, **kwargs):
self.short_opt = short_opt
self.long_opt = long_opt
self.kwargs = kwargs
self.kwargs['dest'] = dest
# A help message of None makes it a hidden option.
if help_msg is not None:
self.kwargs['help'] = help_msg
if 'default' in self.kwargs:
if utility.is_string(kwargs['default']):
self.kwargs['help'] += ' (default="%s")' % self.kwargs['default']
else:
self.kwargs['help'] += ' (default=%s)' % self.kwargs['default']
else:
self.kwargs['help'] = optparse.SUPPRESS_HELP
def get_option_names(self):
return [a for a in (self.short_opt, self.long_opt) if a is not None]
def get_dest(self):
if 'dest' not in self.kwargs:
utility.abort('%s must specify a "dest" property.' % self.__class__.__name__)
return self.kwargs['dest']
def get_default(self):
return self.kwargs.get('default', None)
def postprocess_value(self, value):
# Hook for massaging the option instance value. Default to NOP.
return value
def __str__(self):
return '%s(%s/%s %s)' % (self.__class__.__name__,
self.short_opt, self.long_opt, self.kwargs)
def __cmp__(self, other):
# Sort options by lowercase letter or word, depending on which is available.
if self.short_opt:
if other.short_opt:
return cmp(self.short_opt.lower(), other.short_opt.lower())
return 1
if other.short_opt:
return -1
if self.long_opt:
if other.long_opt:
return cmp(self.long_opt.lower(), other.long_opt.lower())
return 1
if other.long_opt:
return -1
return 0
def has_value(self):
return (not 'action' in self.kwargs or self.kwargs['action'] == 'store')
#===============================================================================
class BooleanOption(BaseOption):
#===============================================================================
"""
Boolean CLI option.
"""
def __init__(self, short_opt, long_opt, dest, help_msg, **kwargs):
BaseOption.__init__(self, short_opt, long_opt, dest, help_msg,
action = 'store_true', **kwargs)
#===============================================================================
class StringOption(BaseOption):
#===============================================================================
"""
CLI string value option.
"""
def __init__(self, short_opt, long_opt, dest, help_msg, **kwargs):
BaseOption.__init__(self, short_opt, long_opt, dest, help_msg, **kwargs)
#===============================================================================
class IntegerOption(BaseOption):
#===============================================================================
"""
Integer CLI option.
"""
def __init__(self, short_opt, long_opt, dest, help_msg, **kwargs):
BaseOption.__init__(self, short_opt, long_opt, dest, help_msg, **kwargs)
def postprocess_value(self, value):
try:
converted = int(value.strip())
except ValueError:
utility.abort('Bad "%s" integer value: %s' % (self.get_dest().upper(), value))
return converted
#===============================================================================
class StringListOption(StringOption):
#===============================================================================
"""
CLI comma-separated string list option.
"""
def __init__(self, short_opt, long_opt, dest, help_msg, **kwargs):
StringOption.__init__(self, short_opt, long_opt, dest, help_msg, **kwargs)
def postprocess_value(self, value):
return [v.strip() for v in value.split(',')]
#===============================================================================
class IntegerListOption(StringOption):
#===============================================================================
"""
CLI comma-separated integer list option.
"""
def __init__(self, short_opt, long_opt, dest, help_msg, **kwargs):
StringOption.__init__(self, short_opt, long_opt, dest, help_msg, **kwargs)
def postprocess_value(self, value):
bad = []
converted = []
for v in value.split(','):
try:
converted.append(int(v.strip()))
except ValueError:
bad.append(v.strip())
if bad:
utility.abort('Bad "%s" integer list value(s):' % self.get_dest().upper(), bad)
return converted
#===============================================================================
class EnumOption(StringOption):
#===============================================================================
"""
Enumeration option for selecting from a list of possible symbols.
"""
def __init__(self, short_opt, long_opt, dest, help_pfx, *values, **kwargs):
if not values or len(values) <= 1:
utility.abort('EnumOption "%s" must specify multiple valid values.' % dest)
self.values = values
help_msg = '%s [%s]' % (help_pfx, '|'.join(self.values))
StringOption.__init__(self, short_opt, long_opt, dest, help_msg, **kwargs)
def postprocess_value(self, value):
if value not in self.values:
utility.abort('EnumOption "%s" value "%s" is not one of the following:'
% (self.get_dest(), value), self.values)
return value
#===============================================================================
class HostOption(StringOption):
#===============================================================================
"""
Comma-separated HOST[:PORT] list option.
"""
def __init__(self, short_opt, long_opt, dest, name, **kwargs):
self.min_count = utility.kwargs_get_integer(kwargs, 'min_count', default = 1)
self.max_count = utility.kwargs_get_integer(kwargs, 'max_count', default = 1)
self.default_port = utility.kwargs_get_integer(kwargs, 'default_port', default = 21212)
if self.max_count == 1:
help_msg = 'the %s HOST[:PORT]' % name
else:
help_msg = 'the comma-separated %s HOST[:PORT] list' % name
if self.default_port:
help_msg += ' (default port=%d)' % self.default_port
StringOption.__init__(self, short_opt, long_opt, dest, help_msg, **kwargs)
def postprocess_value(self, value):
hosts = utility.parse_hosts(value,
min_hosts = self.min_count,
max_hosts = self.max_count,
default_port = self.default_port)
if self.max_count == 1:
return hosts[0]
return hosts
#===============================================================================
class ArgumentException(Exception):
#===============================================================================
pass
#===============================================================================
class BaseArgument(object):
#===============================================================================
def __init__(self, name, help, **kwargs):
self.name = name
self.help = help
self.min_count = kwargs.get('min_count', 1)
self.max_count = kwargs.get('max_count', 1)
# A max_count value of None is interpreted as infinity.
if self.max_count is None:
self.max_count = sys.maxint
def get(self, value):
utility.abort('BaseArgument subclass must implement a get(value) method: %s'
% self.__class__.__name__)
#===============================================================================
class StringArgument(BaseArgument):
#===============================================================================
def __init__(self, name, help, **kwargs):
BaseArgument.__init__(self, name, help, **kwargs)
def get(self, value):
return str(value)
#===============================================================================
class IntegerArgument(BaseArgument):
#===============================================================================
def __init__(self, name, help, **kwargs):
BaseArgument.__init__(self, name, help, **kwargs)
def get(self, value):
try:
return int(value)
except ValueError, e:
raise ArgumentException('%s value is not a valid integer: %s'
% (self.name.upper(), str(value)))
#===============================================================================
class PathArgument(StringArgument):
#===============================================================================
def __init__(self, name, help, **kwargs):
# For now the only intelligence is to check for absolute paths when required.
# TODO: Add options to check for directories, files, attributes, etc..
self.absolute = utility.kwargs_get_boolean(kwargs, 'absolute', default = False)
self.exists = utility.kwargs_get_boolean(kwargs, 'exists', default = False)
requirements = []
help2 = ''
if self.absolute:
requirements.append('absolute path')
if self.exists:
requirements.append('must exist')
if requirements:
help2 = ' (%s)' % ', '.join(requirements)
StringArgument.__init__(self, name, help + help2, **kwargs)
def get(self, value):
svalue = str(value)
if self.absolute and not svalue.startswith('/'):
raise ArgumentException('%s path is not absolute: %s' % (self.name.upper(), svalue))
if self.exists and not os.path.exists(svalue):
raise ArgumentException('%s path does not exist: %s' % (self.name.upper(), svalue))
return svalue
#===============================================================================
class ParsedCommand(object):
#===============================================================================
"""
Holds the result of parsing a CLI command.
"""
def __init__(self, parser, opts, args, verb):
self.opts = opts
self.args = args
self.parser = parser
self.verb = verb
def __str__(self):
return 'ParsedCommand: %s %s %s' % (self.verb.name, self.opts, self.args)
#===============================================================================
class ExtendedHelpOptionParser(optparse.OptionParser):
#===============================================================================
'''
Extends OptionParser in order to support extended help.
'''
def __init__(self, *args, **kwargs):
self.format_epilog_called = False
optparse.OptionParser.__init__(self, *args, **kwargs)
def format_epilog(self, formatter):
"""
OptionParser hook that allows us to append verb descriptions to the
help message.
"""
self.format_epilog_called = True
return self.on_format_epilog()
def print_help(self):
"""
Override OptionParser.print_help() to work around Python 2.4 optparse
not supporting format_epilog().
"""
self.format_epilog_called = False
optparse.OptionParser.print_help(self)
if not self.format_epilog_called:
sys.stdout.write(self.on_format_epilog())
def on_format_epilog(self):
utility.abort('ExtendedHelpOptionParser subclass must override on_format_epilog(): %s'
% self.__class__.__name__)
#===============================================================================
class CLIParser(ExtendedHelpOptionParser):
#===============================================================================
"""
Command/sub-command (verb) argument and option parsing and validation.
"""
def __init__(self, prog, verbs, base_options, usage, description, version):
"""
Command line processor constructor.
"""
self.prog = prog
self.verb = None
self.verbs = verbs
self.verb_names = verbs.keys()
self.base_options = base_options
self.verb_names.sort()
self.base_options.sort()
optparse.OptionParser.__init__(self,
prog = prog,
description = description,
usage = usage,
version = version)
def add_base_options(self):
"""
Add the base options.
"""
for option in self.base_options:
self.add_option(*option.get_option_names(), **option.kwargs)
def add_verb_options(self, verb):
"""
Add options for verb command line.
"""
for option in verb.iter_options():
try:
self.add_option(*option.get_option_names(), **option.kwargs)
except Exception, e:
utility.abort('Exception initializing options for verb "%s".' % verb.name, e)
def process_verb_options(self, verb, opts):
"""
Validate the verb options and post-process the values.
"""
max_width = 0
missing = []
# Post-process the option values, e.g. convert strings to lists as needed.
for o in verb.iter_options():
dest = o.get_dest()
value = getattr(opts, dest)
if not value is None:
setattr(opts, dest, o.postprocess_value(value))
def process_verb_arguments(self, verb, verb_args, verb_opts):
"""
Validate the verb arguments. Check that required arguments are present
and populate verb_opts attributes with scalar values or lists (for
trailing arguments with max_count > 1).
"""
# Add fixed arguments passed in through the decorator to the verb object.
args = copy.copy(verb_args) + verb.command_arguments
# Set attributes for required arguments.
missing = []
exceptions = []
iarg = 0
nargs = verb.get_argument_count()
for arg in verb.iter_arguments():
# It's missing if we've exhausted all the arguments before
# exhausting all the argument specs, unless it's the last argument
# spec and it's optional.
if iarg > len(args) or (iarg == len(args) and arg.min_count > 0):
missing.append((arg.name, arg.help))
else:
value = None
# The last argument can have repeated arguments. If more than
# one are allowed the values are put into a list.
if iarg == nargs - 1 and arg.max_count > 1:
if len(args) - iarg < arg.min_count:
utility.abort('A minimum of %d %s arguments are required.'
% (arg.min_count, arg.name.upper()))
if len(args) - iarg > arg.max_count:
utility.abort('A maximum of %d %s arguments are allowed.'
% (arg.max_count, arg.name.upper()))
# Pass through argument class get() for validation, conversion, etc..
# Skip bad values and report on them at the end.
value = []
for v in args[iarg:]:
try:
value.append(arg.get(v))
except ArgumentException, e:
exceptions.append(e)
iarg = len(args)
elif len(args) > 0:
# All other arguments are treated as scalars.
# Pass through argument class get() for validation, conversion, etc..
try:
value = arg.get(args[iarg])
except ArgumentException, e:
exceptions.append(e)
iarg += 1
if value is not None or arg.min_count == 0:
setattr(verb_opts, arg.name, value)
# Run the gauntlet of error disclosure. Abort and display usage as appropriate.
had_errors = 0
show_usage = False
if exceptions:
msg = 'Argument value %s:' % utility.pluralize('error', len(exceptions))
utility.error(msg, [str(e) for e in exceptions])
had_errors += 1
if iarg < len(args):
self._abort('Extra arguments were provided:', args[iarg:])
had_errors += 1
show_usage = True
if missing:
fmt = '%%-%ds %%s' % max([len(o) for (o, h) in missing])
msg = 'Missing required %s:' % utility.pluralize('argument', len(missing))
utility.error(msg, [fmt % (o.upper(), h) for (o, h) in missing])
had_errors += 1
show_usage = True
if had_errors > 0:
if show_usage:
self._abort()
sys.exit(1)
def initialize_verb(self, verb_name):
"""
Initialize command line options for a specific verb.
"""
# Add the base options that are applicable to all verbs.
self.add_base_options()
# See if we know about the verb.
if verb_name.startswith('-'):
self._abort('The first argument must be a verb, not an option.')
if verb_name not in self.verbs:
self._abort('Unknown verb: %s' % verb_name)
self.verb = self.verbs[verb_name]
# Change the messaging from generic to verb-specific.
self.set_usage(self._get_verb_usage(self.verb, brief=False))
self.set_description(self.verb.cli_spec.get_attr('description', 'No description provided'))
# Parse the command-specific options.
self.add_verb_options(self.verb)
def parse(self, *cmdargs):
"""
Parse command line.
"""
# Need something.
if not cmdargs:
self._abort('No verb was specified.')
pre_opts = preprocess_options(self.base_options, cmdargs)
# Support verb-less options like -h, --help and --version.
if cmdargs[0].startswith('-') and (pre_opts.help or pre_opts.version):
opts, args = self.parse_args(list(cmdargs))
return ParsedCommand(self, opts, args, None)
# Initialize options and arguments.
self.initialize_verb(cmdargs[0])
verb_cmdargs = list(cmdargs[1:])
if self.verb.cli_spec.passthrough:
# Provide all options and arguments without processing the options.
# E.g. Java programs want to handle all the options without interference.
verb_args = verb_cmdargs
verb_opts = None
else:
# Parse the verb command line.
verb_opts, verb_parsed_args = self.parse_args(verb_cmdargs)
# Post-process options.
self.process_verb_options(self.verb, verb_opts)
# Post-process arguments.
self.process_verb_arguments(self.verb, verb_parsed_args, verb_opts)
# The arguments should all be attributes in verb_opts now.
verb_args = []
return ParsedCommand(self, verb_opts, verb_args, self.verb)
def get_usage_string(self):
"""
Get usage string.
"""
# Swap stdout with UsageScraper pseudo-file object so that output is captured.
# Necessary because optparse only sends help to stdout.
class UsageScraper(object):
def __init__(self):
self.usage = []
def write(self, s):
self.usage.append(s)
scraper = UsageScraper()
stdout_save = sys.stdout
try:
sys.stdout = scraper
self.print_help()
finally:
sys.stdout = stdout_saves
return ''.join(scraper.usage)
def on_format_epilog(self):
if not self.verb:
return self._format_verb_list()
blocks = []
if self.verb.get_argument_count() > 0:
rows = [(get_argument_usage(a), a.help) for a in self.verb.iter_arguments()]
blocks.append('\n'.join(['Arguments:', utility.format_table(rows, indent = 2)]))
# other_info is used for the multi-verb variation list.
other_info = self.verb.cli_spec.get_attr('other_info', None)
if other_info:
blocks.append(other_info.strip())
# Automatically wrap description2 as a paragraph.
description2 = self.verb.cli_spec.get_attr('description2', None)
if description2:
blocks.append(utility.paragraph(description2))
return '\n%s' % '\n\n'.join(blocks)
def _abort(self, *msgs):
utility.error(*msgs)
sys.stdout.write('\n')
self.print_help()
sys.stdout.write('\n')
sys.exit(1)
def _format_verb_list(self):
rows1 = []
rows2 = []
for verb_name in self.verb_names:
verb = self.verbs[verb_name]
if not verb.cli_spec.hideverb:
usage = self._get_verb_usage(verb, brief=True)
if verb.cli_spec.baseverb:
rows2.append((usage, verb.cli_spec.description))
else:
rows1.append((usage, verb.cli_spec.description))
table1 = utility.format_table(rows1, caption = 'Verb Descriptions', separator = ' ')
table2 = utility.format_table(rows2, caption = 'Common Verbs', separator = ' ')
return '%s\n%s' % (table1, table2)
def _iter_options(self, verb):
options = []
for option in self.base_options:
yield option
if verb:
for option in verb.iter_options():
yield option
def _iter_visible_options(self, verb):
for option in self._iter_options(verb):
if option.kwargs.get('help', None) != optparse.SUPPRESS_HELP:
yield option
def _count_visible_options(self, verb):
return len([o for o in self._iter_visible_options(verb)])
def _get_verb_usage(self, verb, brief=False):
"""
Provide the full usage string, including argument names, for a verb.
"""
args = [get_argument_usage(a) for a in verb.iter_arguments()]
usage = [self.prog, verb.name]
if not brief:
num_visible_options = self._count_visible_options(verb)
if num_visible_options > 0:
usage.append('[ OPTIONS ... ]')
if verb.cli_spec.usage:
usage.append(verb.cli_spec.usage)
if args:
usage.append(' '.join(args))
return ' '.join(usage)
#===============================================================================
class CLISpec(object):
#===============================================================================
def __init__(self, **kwargs):
self._kwargs = kwargs
# Make sure options and arguments are flat lists.
if 'options' in self._kwargs:
self._kwargs['options'] = utility.flatten_to_list(self._kwargs['options'])
else:
self._kwargs['options'] = []
if 'arguments' in self._kwargs:
self._kwargs['arguments'] = utility.flatten_to_list(self._kwargs['arguments'])
else:
self._kwargs['arguments'] = []
def __getattr__(self, name):
return self._kwargs.get(name, None)
def __str__(self):
s = 'CLISpec: [\n'
keys = self._kwargs.keys()
keys.sort()
for key in keys:
s += ' %s: %s\n' % (key, utility.to_display_string(self._kwargs[key]))
s += ']'
return s
def add_to_list(self, name, *args):
utility.kwargs_merge_list(self._kwargs, name, *args)
def get_attr(self, name, default = None):
return utility.kwargs_get(self._kwargs, name, default = default, remove = False)
def pop_attr(self, name, default = None):
return utility.kwargs_get(self._kwargs, name, default = default, remove = True)
def merge_java_options(self, name, *options):
utility.kwargs_merge_java_options(self._kwargs, name, options)
def set_defaults(self, **kwargs):
utility.kwargs_set_defaults(self._kwargs, **kwargs)
def find_option(self, dest_name):
for o in self._kwargs['options']:
if o.get_dest() == dest_name:
return o
return None
def find_argument(self, dest_name):
for a in self._kwargs['arguments']:
if a.name == dest_name:
return a
return None
#===============================================================================
def get_argument_usage(a):
#===============================================================================
if a.max_count > 1:
ellipsis = ' ...'
else:
ellipsis = ''
if a.min_count == 0:
fmt = '[ %s%s ]'
else:
fmt = '%s%s'
return fmt % (a.name.upper(), ellipsis)
#===============================================================================
def preprocess_options(base_options, cmdargs):
#===============================================================================
"""
Simplistically parses command line options to allow early option checking.
Allows the parsing process to display debug messages. Returns an object
with attributes set for option values.
"""
class OptionValues(object):
pass
option_values = OptionValues()
# Create a base option dictionary indexed by short and long options.
# Add the built-in optparse help and version options so that they can be
# detected as stand-alone options.
options = {}
builtins = [BooleanOption('-h', '--help', 'help', ''),
BooleanOption(None, '--version', 'version', '')]
for opt in list(base_options) + builtins:
setattr(option_values, opt.get_dest(), opt.get_default())
if opt.short_opt:
options[opt.short_opt] = opt
if opt.long_opt:
options[opt.long_opt] = opt
# Walk through the options and arguments and set option values as attributes.
iopt = 0
while iopt < len(cmdargs):
if cmdargs[iopt].startswith('-'):
if cmdargs[iopt] in options:
opt = options[cmdargs[iopt]]
if opt.has_value():
# Option with argument
setattr(option_values, opt.get_dest(), cmdargs[iopt+1])
iopt += 1
else:
# Boolean option
setattr(option_values, opt.get_dest(), True)
iopt += 1
return option_values
| zheguang/voltdb | lib/python/voltcli/cli.py | Python | agpl-3.0 | 28,839 | 0.008808 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'LikeCfiItem'
db.create_table(u'catalog_likecfiitem', (
(u'abstractlike_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['catalog.AbstractLike'], unique=True, primary_key=True)),
('cfi_item', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['catalog.CfiStoreItem'])),
))
db.send_create_signal('catalog', ['LikeCfiItem'])
# Adding model 'CfiStoreItem'
db.create_table(u'catalog_cfistoreitem', (
(u'basemodel_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['catalog.BaseModel'], unique=True, primary_key=True)),
('item', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['catalog.Product'])),
))
db.send_create_signal('catalog', ['CfiStoreItem'])
def backwards(self, orm):
# Deleting model 'LikeCfiItem'
db.delete_table(u'catalog_likecfiitem')
# Deleting model 'CfiStoreItem'
db.delete_table(u'catalog_cfistoreitem')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'catalog.abstractlike': {
'Meta': {'object_name': 'AbstractLike', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'liked_time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.abstracttop': {
'Meta': {'object_name': 'AbstractTop', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'recorded_time': ('django.db.models.fields.DateTimeField', [], {})
},
'catalog.basemodel': {
'Meta': {'object_name': 'BaseModel'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.cfistoreitem': {
'Meta': {'object_name': 'CfiStoreItem', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'liker': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'through': "orm['catalog.LikeCfiItem']", 'symmetrical': 'False'})
},
'catalog.comment': {
'Meta': {'object_name': 'Comment', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.documentation': {
'Meta': {'object_name': 'Documentation', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.emailcollect': {
'Meta': {'object_name': 'EmailCollect'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'catalog.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'large_url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'small_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'images'", 'null': 'True', 'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.like': {
'Meta': {'object_name': 'Like', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'})
},
'catalog.likecfiitem': {
'Meta': {'object_name': 'LikeCfiItem', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'cfi_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.CfiStoreItem']"})
},
'catalog.likemakey': {
'Meta': {'object_name': 'LikeMakey', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"})
},
'catalog.likeproduct': {
'Meta': {'object_name': 'LikeProduct', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"})
},
'catalog.likeproductdescription': {
'Meta': {'object_name': 'LikeProductDescription', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'product_description': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductDescription']"})
},
'catalog.likeproductimage': {
'Meta': {'object_name': 'LikeProductImage', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductImage']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"})
},
'catalog.likeproducttutorial': {
'Meta': {'object_name': 'LikeProductTutorial', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"})
},
'catalog.likeshop': {
'Meta': {'object_name': 'LikeShop', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"})
},
'catalog.list': {
'Meta': {'object_name': 'List', '_ormbases': ['catalog.BaseModel']},
'access': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'access'", 'symmetrical': 'False', 'to': u"orm['django_facebook.FacebookCustomUser']"}),
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'is_private': ('django.db.models.fields.BooleanField', [], {}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.ListItem']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.listgroup': {
'Meta': {'object_name': 'ListGroup', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'lists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.List']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'catalog.listitem': {
'Meta': {'object_name': 'ListItem', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'createdby': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"})
},
'catalog.location': {
'Meta': {'object_name': 'Location', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'catalog.logidenticalproduct': {
'Meta': {'object_name': 'LogIdenticalProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product1'", 'to': "orm['catalog.Product']"}),
'product2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product2'", 'to': "orm['catalog.Product']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.makey': {
'Meta': {'object_name': 'Makey', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'collaborators'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeycomments'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Comment']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'documentations': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeydocumentations'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Documentation']"}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeyimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeynotes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Note']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.note': {
'Meta': {'object_name': 'Note', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.product': {
'Meta': {'object_name': 'Product', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'identicalto': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']", 'null': 'True', 'blank': 'True'}),
'makeys': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'partsused'", 'blank': 'True', 'to': "orm['catalog.Makey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'sku': ('django.db.models.fields.IntegerField', [], {}),
'tutorials': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.Tutorial']", 'symmetrical': 'False', 'blank': 'True'})
},
'catalog.productdescription': {
'Meta': {'object_name': 'ProductDescription', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productdescriptions'", 'to': "orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'blank': 'True'}),
'user_or_shop': ('django.db.models.fields.BooleanField', [], {})
},
'catalog.productimage': {
'Meta': {'object_name': 'ProductImage', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productimages'", 'to': "orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.productshopurl': {
'Meta': {'object_name': 'ProductShopUrl', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productshopurls'", 'to': "orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.searchlog': {
'Meta': {'object_name': 'SearchLog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.shop': {
'Meta': {'object_name': 'Shop', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'shopimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.toindexstore': {
'Meta': {'object_name': 'ToIndexStore'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.topmakeys': {
'Meta': {'object_name': 'TopMakeys', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"})
},
'catalog.topproducts': {
'Meta': {'object_name': 'TopProducts', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"})
},
'catalog.topshops': {
'Meta': {'object_name': 'TopShops', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"})
},
'catalog.toptutorials': {
'Meta': {'object_name': 'TopTutorials', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"})
},
'catalog.topusers': {
'Meta': {'object_name': 'TopUsers', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.tutorial': {
'Meta': {'object_name': 'Tutorial', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tutorialimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'django_facebook.facebookcustomuser': {
'Meta': {'object_name': 'FacebookCustomUser'},
'about_me': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'access_token': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'blog_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook_open_graph': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'facebook_profile_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'new_token_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'raw_data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['catalog'] | Makeystreet/makeystreet | woot/apps/catalog/migrations/0036_auto__add_likecfiitem__add_cfistoreitem.py | Python | apache-2.0 | 27,149 | 0.007219 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ts=4:sw=4:expandtab:
# Copyright 2008 Mark Mitchell
# License: see __license__ below.
__doc__ = """
Reads a GraphicsMagick source file and parses the specially formatted
comment blocks which precede each function and writes the information
obtained from the comment block into a reStructuredText file.
Usage:
format_c_api_docs.py [options] SRCFILE OUTFILE
SRCFILE is the path to a Graphicsmagick API .c file.
For example: ./magick/animate.c
OUTFILE is the path where the reStructuredText file is written.
Options:
-h --help -- Print this help message
-w --whatis-file -- The path to a file containing "whatis" information for
the source files. The format of this file is:
* one line per source file
* source filename (without directory paths) and whatis text
are separated by whitespace
* blank lines are ignored
* lines starting with '#' are ignored
-i --include-rst -- Comma-separated list of file paths to be objects of reST
..include:: directives inserted in OUTFILE.
The default is the single file 'api_hyperlinks.rst'
Example of whatis file format:
animate.c Interactively animate an image sequence
annotate.c Annotate an image with text
"""
__copyright__ = "2008, Mark Mitchell"
__license__ = """
Copyright 2008, Mark Mitchell
Permission is hereby granted, free of charge, to any person obtaining
a copy of this Software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
The Software is provided "as is", without warranty of any kind,
express or implied, including but not limited to the warranties of
merchantability, fitness for a particular purpose and noninfringement.
In no event shall the authors or copyright holders be liable for any
claim, damages or other liability, whether in an action of contract,
tort or otherwise, arising from, out of or in connection with Software
or the use or other dealings in the Software.
"""
import sys
import getopt
import os, os.path
import re
import textwrap
# Key words to replace with HTML links
keywords = {
'AffineMatrix' : '`AffineMatrix`_',
'BlobInfo' : '`BlobInfo`_',
'Cache' : '`Cache`_',
'ChannelType' : '`ChannelType`_',
'ChromaticityInfo' : '`ChromaticityInfo`_',
'ClassType' : '`ClassType`_',
'ClipPathUnits' : '`ClipPathUnits`_',
'ColorPacket' : '`ColorPacket`_',
'ColorspaceType' : '`ColorspaceType`_',
'ComplianceType' : '`ComplianceType`_',
'CompositeOperator' : '`CompositeOperator`_',
'CompressionType' : '`CompressionType`_',
'DecorationType' : '`DecorationType`_',
'DrawContext' : '`DrawContext`_',
'DrawInfo' : '`DrawInfo`_',
'ErrorHandler' : '`ErrorHandler`_',
'ExceptionInfo' : '`ExceptionInfo`_',
'ExceptionType' : '`ExceptionType`_',
'FillRule' : '`FillRule`_',
'FilterTypes' : '`FilterTypes`_',
'FrameInfo' : '`FrameInfo`_',
'GravityType' : '`GravityType`_',
'Image' : '`Image`_',
'ImageInfo' : '`ImageInfo`_',
'ImageType' : '`ImageType`_',
'InterlaceType' : '`InterlaceType`_',
'LayerType' : '`LayerType`_',
'MagickInfo' : '`MagickInfo`_',
'MonitorHandler' : '`MonitorHandler`_',
'MontageInfo' : '`MontageInfo`_',
'NoiseType' : '`NoiseType`_',
'PaintMethod' : '`PaintMethod`_',
'PixelPacket' : '`PixelPacket`_',
'PointInfo' : '`PointInfo`_',
'ProfileInfo' : '`ProfileInfo`_',
'QuantizeInfo' : '`QuantizeInfo`_',
'Quantum' : '`Quantum`_',
'QuantumType' : '`QuantumType`_',
'RectangleInfo' : '`RectangleInfo`_',
'RegistryType' : '`RegistryType`_',
'RenderingIntent' : '`RenderingIntent`_',
'ResolutionType' : '`ResolutionType`_',
'ResourceType' : '`ResourceType`_',
'SegmentInfo' : '`SegmentInfo`_',
'SignatureInfo' : '`SignatureInfo`_',
'StorageType' : '`StorageType`_',
'StreamHandler' : '`StreamHandler`_',
'StretchType' : '`StretchType`_',
'StyleType' : '`StyleType`_',
'TypeMetric' : '`TypeMetric`_',
'ViewInfo' : '`ViewInfo`_',
'VirtualPixelMethod' : '`VirtualPixelMethod`_',
'MagickXResourceInfo' : '`MagickXResourceInfo`_',
}
state_init = 0
state_found_fcncomment = 1
state_found_fcntitle = 2
state_found_fcndoc = 3
state_more_prototype = 4
state_found_prototype = 5
state_found_private = 6
state_parmdescr = 7
def warn(msg):
print >> sys.stderr, msg
def debugtrace(msg):
print >> sys.stdout, msg
def nodebugtrace(msg):
pass
dtrace = nodebugtrace
#dtrace = debugtrace
# extract and save function title. example:
# + X M a g i c k C o m m a n d %
# % X A n i m a t e B a c k g r o u n d I m a g e %
# Lines starting with '+' are private APIs which should not appear in
# in the output.
re_func_title = re.compile(r'^[+|%]\s+((\w )+)\s*%')
def proto_pretty(line):
"""fixes up inconsistent spaces in C function prototypes"""
line = re.sub(r',', ' , ', line)
line = re.sub(r'\(', ' ( ', line)
line = re.sub(r'\)', ' ) ', line)
line = re.sub(r'\*', ' * ', line)
line = re.sub(r'\s+', ' ', line)
line = re.sub(r'\(\s+\*', '(*', line)
line = re.sub(r' ,', ',', line)
line = re.sub(r' \(', '(', line)
line = re.sub(r'\) ', ')', line)
line = re.sub(r' \* ', ' *', line)
line = re.sub('^\s*', '', line)
return line
class Paragraph:
"Paragraphs consist of one or more lines of text."
def __init__(self):
self.lines = []
def __str__(self):
#return '\n'.join(self.lines)
return '\n'.join([line.strip() for line in self.lines])
class Prototype:
def __init__(self):
self.lines = []
def __str__(self):
proto = ' '.join(self.lines)
proto = proto_pretty(proto)
# escape all the '*' chars
proto = re.sub(r'\*', '\\*', proto)
# escape all the '_' chars
proto = re.sub(r'_', '\\_', proto)
# now replace keywords with hyperlinks
for k,v in keywords.iteritems():
proto = re.sub(r'^%s ' % k, '%s ' % v, proto)
proto = re.sub(r' %s ' % k, ' %s ' % v, proto)
# make some attempt to wrap the text nicely
openparen_index = proto.find('(')
if openparen_index > 0:
fcn = proto[:openparen_index+1]
indent_len = len(fcn) + 3
toomuch = (2 * fcn.count('\\')) + (3 * fcn.count('`_'))
if toomuch > 0: # account for the space following the opening paren
toomuch -= 1
indent_len -= toomuch
params = proto[openparen_index+1:].split(',')
params = [p.strip() for p in params]
max_param_len = 0
for x in params:
if len(x) > max_param_len:
max_param_len = len(x)
wrap_width = max(96, max_param_len + indent_len)
proto_lines = []
line = fcn + ' '
while params:
x = params.pop(0)
if len(line) + len(x) > wrap_width:
proto_lines.append(line)
line = ' ' * indent_len
line += x
if params:
line += ', '
proto_lines.append(line)
proto = '\n '.join(proto_lines)
return ".. parsed-literal::\n\n %s" % proto
class ListItem:
"""List items are used for parameter descriptions, and consist of the
parameter name and one or more lines of description text."""
def __init__(self, name):
self.name = name
self.lines = []
def __str__(self):
s = []
s.append('%s:' % self.name)
for line in self.lines:
s.append(' %s' % line.strip())
return '\n'.join(s)
class Function:
def __init__(self, name):
self.name = name
self.prototype = None
# Description is a list, the items of which are either Paragraph or
# ListItem or Prototype instances.
self.description = []
def __str__(self):
lines = []
lines.append('')
lines.append('')
lines.append(self.name)
lines.append('=' * len(self.name))
lines.append('')
lines.append('Synopsis')
lines.append('--------')
lines.append(str(self.prototype))
lines.append('')
lines.append('Description')
lines.append('-----------')
for item in self.description:
lines.append(str(item))
lines.append('')
return '\n'.join(lines)
def parse(srcfilepath):
list_item = None
proto = None
para = None
func = None
functions = []
state = state_init
linecnt = 0
ftitle = None
f = file(srcfilepath, 'r')
for line in f:
linecnt += 1
if not (line.startswith('%') or line.startswith('+') or re.search(r'\*/', line)):
continue
line = line.strip()
if state == state_init:
# Find first line of function title/comment block
if line.startswith('%%%%%%%%'):
dtrace('Line %d: start of function comment block ############' % linecnt)
state = state_found_fcncomment
continue
elif state == state_found_fcncomment:
# Search for the function name, with spaces between each letter
if line.startswith('%%%%%%%%'):
warn('Line %d: WARNING: no function name found, found start of function comment block instead.' % linecnt)
state = state_init
continue
m = re_func_title.search(line)
if m:
if line.startswith('+'):
dtrace('Line %d: private API' % linecnt)
# private API, skip it
state = state_found_private
else:
# public API, process it
ftitle = re.sub(' ', '', m.group(1))
dtrace('Line %d: public API %s' % (linecnt, ftitle))
func = Function(ftitle)
functions.append(func)
state = state_found_fcntitle
continue
elif state == state_found_private:
# skip to end of function title block
if line.startswith('%%%%%%%%'):
dtrace('Line %d: end of private function comment block' % linecnt)
state = state_init
continue
elif state == state_found_fcntitle:
# skip to first line following end of function title block.
# lines of the function title block start with and end with '%'.
if not re.match(r'%.+%', line):
dtrace('Line %d: end of public function comment block %s' % (linecnt, ftitle))
state = state_found_fcndoc
# fall through
elif state == state_found_fcndoc:
# extract function prototype
if line.startswith('% '):
line = re.sub(r'^%\s{0,2}', '', line, 1)
# if empty args (), it's not the prototype, but the one-line summary
if re.search(r'%s\(\)' % ftitle, line):
if para is None:
dtrace('Line %d: found_fcndoc start paragraph ()' % linecnt)
para = Paragraph()
func.description.append(para)
para.lines.append(line)
# is this only line of prototype?
elif re.search(r'%s\([^)]+\)$' % ftitle, line):
if para:
dtrace('Line %d: found_fcndoc end paragraph by proto ()' % linecnt)
para = None
dtrace('Line %d: one-line prototype' % linecnt)
proto = Prototype()
proto.lines.append(line)
func.description.append(proto)
func.prototype = proto
proto = None
state = state_found_prototype
# is this first line of multiline prototype?
elif re.search(r'%s\([^)]*$' % ftitle, line):
if para:
dtrace('Line %d: found_fcndoc end paragraph by proto (' % linecnt)
para = None
dtrace('Line %d: first line of multi-line prototype' % linecnt)
proto = Prototype()
proto.lines.append(line)
func.description.append(proto)
func.prototype = proto
state = state_more_prototype
else:
if para is None:
dtrace('Line %d: found_fcndoc start paragraph' % linecnt)
para = Paragraph()
func.description.append(para)
para.lines.append(line)
else:
if line.startswith('%%%%%%%%'):
warn('Line %d: WARNING: no prototype found for %s, found start of function comment block instead.' % (linecnt, ftitle))
state = state_found_fcncomment
continue
if line.strip() == '%':
# empty line terminates paragraph
if para:
dtrace('Line %d: found_fcndoc end paragraph by blank line' % linecnt)
para = None
if proto:
dtrace('Line %d: found_fcndoc end proto by blank line' % linecnt)
proto = None
continue
elif state == state_more_prototype:
if re.match(r'%.+%', line):
# really this should raise a warning of "incomplete prototype"
continue
line = re.sub(r'^%\s{0,2}', '', line, 1)
if re.search(r'^\s*$', line):
dtrace('Line %d: end of more prototype' % linecnt)
state = state_found_prototype
else:
func.prototype.lines.append(line)
continue
elif state == state_found_prototype:
dtrace('Line %d: found prototype of function %s' % (linecnt, ftitle))
func.prototype.lines.append(';')
#print 'Function %s' % func.name
#print 'Synopsis'
#print ' '.join(func.prototype)
#print
# Process parm description.
# Description consists of two kinds of texts: paragraphs, and lists.
# Lists consist of list items. List items are one or more lines.
# List items are separated by blank lines. The first line of a list
# item starts with 'o '.
# Paragraphs consist of one or more lines which don't start with 'o '.
# Paragraphs are separated from each other and from adjacent list items
# by blank lines.
# In theory, a line which starts with 'o ' which is not preceded by a
# blank line is illegal syntax.
para = None
state = state_parmdescr
# fall through
elif state == state_parmdescr:
if line.endswith('*/'):
# end of function comment block
dtrace('Line %d: end of parmdescr ************' % linecnt)
if list_item:
func.description.append(list_item)
list_item = None
if para:
func.description.append(para)
dtrace('Line %d: parmdescr end paragraph ()' % linecnt)
para = None
func = None
state = state_init
continue
line = re.sub(r'^%\s{0,2}', '', line, 1)
if line:
# look for list item, which starts with 'o'
m = re.search(r'^\s+o\s+([^:]+:|o|[0-9]\.)\s(.*)', line)
if m:
# first line of list item
if list_item: # if blank lines separate list items, this should never evaluate true
dtrace('Line %d: surprising end of list item' % linecnt)
func.description.append(list_item)
list_item = None
dtrace('Line %d: start list item' % linecnt)
list_item = ListItem(m.group(1).strip().rstrip(':'))
list_item.lines.append(m.group(2))
else:
# either a line of paragraph or subsequent line of list item
if list_item:
# subsequent line of list item
list_item.lines.append(line)
else:
# line of paragraph
if list_item: # if blank lines after list items, this should never evaluate true
dtrace('Line %d: end of list item, end of list' % linecnt)
func.description.append(list_item)
list_item = None
if para is None:
dtrace('Line %d: parmdescr start paragraph' % linecnt)
para = Paragraph()
para.lines.append(line)
else:
# empty line, two cases:
# 1. terminate multi-line list item
# 2. terminate multi-line paragraph
if list_item:
dtrace('Line %d: parmdescr end of list item by blank line' % linecnt)
func.description.append(list_item)
list_item = None
elif para:
# terminate any paragraph
dtrace('Line %d: parmdescr end of paragraph by blank line' % linecnt)
func.description.append(para)
para = None
continue
f.close()
return functions
def process_srcfile(srcfilepath, basename, whatis, outfile, include_rst):
"""outfile is a file object open for writing"""
functions = parse(srcfilepath)
print >> outfile, "=" * len(basename)
print >> outfile, basename
print >> outfile, "=" * len(basename)
if whatis:
print >> outfile, "-" * len(whatis)
print >> outfile, whatis
print >> outfile, "-" * len(whatis)
print >> outfile
print >> outfile, '.. contents:: :depth: 1'
print >> outfile
for x in include_rst:
print >> outfile, '.. include:: %s' % x
print >> outfile
# print all functions found in this source file
for func in functions:
print >> outfile, func
#para = para.strip() # trim leading and trailing whitespace
#para = re.sub(r'\s+', ' ', para) # canonicalize inner whitespace
#para = re.sub(r"""([a-zA-Z0-9][.!?][)'"]*) """, '\1 ', para) # Fix sentence ends
def find_val(key, keyval_file):
val = None
f = file(keyval_file, 'r')
cnt = 0
for line in f:
cnt += 1
if not line.strip():
continue
if line.startswith('#'):
continue
try:
k, v = line.split(None, 1)
except ValueError:
print >> sys.stderr, "Line %u of %s: improper format" % (cnt, keyval_file)
return None
if k == key:
val = v
break
f.close()
return val.strip()
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
# parse command line options
try:
opts, posn_args = getopt.getopt(argv, 'hw:i:',
['help',
'whatis-file=',
'include-rst=',
])
except getopt.GetoptError, msg:
print msg
print __doc__
return 1
# process options
whatis_file = None
include_rst = ['api_hyperlinks.rst']
for opt, val in opts:
if opt in ("-h", "--help"):
print __doc__
return 0
if opt in ("-w", "--whatis-file"):
whatis_file = val
if opt in ("-i", "--include-rst"):
include_rst = [x for x in val.split(',') if x]
if len(posn_args) != 2:
print >> sys.stderr, 'Missing arguments'
print >> sys.stderr, __doc__
return 1
srcfile_path = posn_args[0]
outfile_path = posn_args[1]
srcfile = os.path.basename(srcfile_path)
base, ext = os.path.splitext(srcfile)
if whatis_file:
whatis = find_val(srcfile, whatis_file)
else:
whatis = None
fout = file(outfile_path, 'w')
process_srcfile(srcfile_path, base, whatis, fout, include_rst)
fout.close()
return 0
if __name__ == '__main__':
sys.exit(main())
| kazuyaujihara/osra_vs | GraphicsMagick/scripts/format_c_api_doc.py | Python | gpl-2.0 | 21,967 | 0.005281 |
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
import copy
from rapidsms.connection import Connection
from rapidsms.person import Person
from datetime import datetime
from rapidsms import utils
class StatusCodes:
'''Enum for representing status types of a message or response.'''
NONE = "None" # we don't know. the default
OK = "Ok" # is great success!
APP_ERROR = "Application Error" # application specific errors - e.g. bad data
GENERIC_ERROR = "Generic error" # generic errors - e.g. a catch all responder
class Message(object):
def __init__(self, connection=None, text=None, person=None, date=None):
if connection == None and person == None:
raise Exception("Message __init__() must take one of: connection, person")
self._connection = connection
self.text = text
self.date = ( datetime.utcnow() if date is None
else utils.to_naive_utc_dt(date) )
self.person = person
self.responses = []
self.status = StatusCodes.NONE
# a message is considered "unprocessed" until
# rapidsms has dispatched it to all apps, and
# flushed the responses out
self.processed = False
def __unicode__(self):
return self.text
@property
def connection(self):
# connection is read-only, since it's an
# immutable property of this object
if self._connection is not None:
return self._connection
else:
return self.person.connection
@property
def peer (self):
# return the identity (e.g. phone number) of
# the other end of this message's connection
return self.connection.identity
def send(self):
"""Send this message via self.connection.backend, returning
True if the message was sent successfully."""
return self.connection.backend.router.outgoing(self)
def flush_responses (self):
"""Sends all responses added to this message (via the
Message.respond method) in the order which they were
added, and clears self.responses"""
# keep on iterating until all of
# the messages have been sent
while self.responses:
self.responses.pop(0).send()
def error(self, text, level):
"""Apps send error messages here rather than through respond
so users only receive one - the with the highest level of specificity"""
#TODO implement this
pass
def respond(self, text, status = StatusCodes.NONE):
"""Send the given text back to the original caller of this
message on the same route that it came in on"""
if self.connection:
response = self.get_response(text, status)
self.responses.append(response)
return True
else:
return False
def get_response(self, text, status):
response = copy.copy(self)
response.text = text
response.status = status
return response
def forward (self, identity, text=None):
if self.connection:
target = self.connection.fork(identity)
if text is None: text = self.text
message = type(self)(connection=target, text=text)
self.responses.append(message)
return True
else:
return False
class EmailMessage(Message):
"""Email version of a message object, with some extra stuff that can
be consumed by email backends/apps."""
def __init__(self, connection=None, text=None, person=None, date=None,
subject=None, mime_type="text/plain"):
super(EmailMessage, self).__init__(connection=connection, text=text,
person=person, date=date)
self.subject = subject
self.mime_type = mime_type
def get_response(self, text, status):
response = Message.get_response(self, text, status)
response.subject = "re: %s" % self.subject
return response
| icomms/rapidsms | lib/rapidsms/message.py | Python | lgpl-3.0 | 4,140 | 0.008454 |
from pathlib import Path
from jobman.jobman import JobMan
from mc.clients.job_record_client import JobRecordClient
from mc.clients.flow_record_client import FlowRecordClient
from mc.flows.flow_engine import FlowEngine
from mc.db.db import Db
from mc.runners.flow_runner import FlowRunner
from mc.runners.jobman_job_runner.job_runner import JobRunner
class HoustonUtils(object):
JOBS_SUBDIRS = ['pending', 'queued', 'executed', 'archive']
def __init__(self, houston=None):
self.houston = houston
@property
def cfg(self): return self.houston.cfg
@property
def db(self):
if not hasattr(self, '_db'):
self._db = self.generate_db(db_uri=self.cfg['MC_DB_URI'])
return self._db
def generate_db(self, db_uri=None, schema=None):
return Db(db_uri=db_uri, schema=schema)
@db.setter
def db(self, value): self._subcommands = value
def ensure_queues(self):
self.ensure_queue(queue_cfg=self.cfg['FLOW_QUEUE'])
self.ensure_queue(queue_cfg=self.cfg['JOB_QUEUE'])
def ensure_queue(self, queue_cfg=None):
try:
self.db.get_item_by_key(item_type='queue', key=queue_cfg['key'])
except self.db.ItemNotFoundError:
self.db.create_item(
item_type='queue',
item_kwargs={
'key': queue_cfg['key'],
**queue_cfg.get('queue_kwargs', {})
}
)
@property
def flow_runner(self):
if not hasattr(self, '_flow_runner'):
self._flow_runner = FlowRunner(
flow_engine=self.flow_engine,
flow_record_client=self.flow_record_client,
task_ctx={
'mc.flow_record_client': self.flow_record_client,
'mc.job_record_client': self.job_record_client,
}
)
return self._flow_runner
@flow_runner.setter
def flow_runner(self, new_value): self._flow_runner = new_value
@property
def flow_engine(self):
if not hasattr(self, '_flow_engine'):
self._flow_engine = FlowEngine()
return self._flow_engine
@flow_engine.setter
def flow_engine(self, new_value): self._flow_engine = new_value
@property
def flow_record_client(self):
if not hasattr(self, '_flow_record_client'):
self._flow_record_client = self._get_mc_client(record_type='flow')
return self._flow_record_client
@flow_record_client.setter
def flow_record_client(self, new_value):
self._flow_record_client = new_value
@property
def job_record_client(self):
if not hasattr(self, '_job_record_client'):
self._job_record_client = self._get_mc_client(record_type='job')
return self._job_record_client
def _get_mc_client(self, record_type=None):
client_cls = None
if record_type == 'flow':
client_cls = FlowRecordClient
elif record_type == 'job':
client_cls = JobRecordClient
assert client_cls is not None
queue_cfg = self.cfg[record_type.upper() + '_QUEUE']
return client_cls(mc_db=self.db,
use_locks=self.cfg.get('USE_LOCKS', True),
queue_key=queue_cfg['key'])
@job_record_client.setter
def job_record_client(self, new_value): self._job_record_client = new_value
@property
def job_runner(self, mc_clients=None):
if not hasattr(self, '_job_runner'):
self._job_runner = JobRunner(
artifact_handler=self.cfg['ARTIFACT_HANDLER'],
job_record_client=self.job_record_client,
jobman=self.jobman,
jobdirs_dir=self.cfg.get('JOBDIRS_DIR', None),
build_jobdir_fn=self.build_jobdir,
)
return self._job_runner
@job_runner.setter
def job_runner(self, new_value): self._job_runner = new_value
@property
def jobman(self):
if not hasattr(self, '_jobman'):
self._jobman = JobMan.from_cfg(cfg=self.cfg['JOBMAN_CFG'])
return self._jobman
@jobman.setter
def jobman(self, new_value): self._jobman = new_value
def build_jobdir(self, *args, **kwargs):
try:
build_jobdir_fn = self.cfg['BUILD_JOBDIR_FN']
except:
def build_jobdir_fn(*args, **kwargs):
return self.houston.run_command('build_job_dir')
return build_jobdir_fn(*args, **kwargs)
def has_unfinished_mc_records(self):
unfinished_records = self.get_unfinished_mc_records()
for record_type, records in unfinished_records.items():
if len(records) > 0:
return True
return False
def get_unfinished_mc_records(self):
return {
record_type: self._get_unfinished_mc_items(item_type=record_type)
for record_type in ['flow', 'job']
}
def _get_unfinished_mc_items(self, item_type=None):
return self.db.query_items(item_type=item_type, query={
'filters': [
{'field': 'status', 'op': '! IN',
'arg': ['FAILED', 'COMPLETED']}
]
})
def ensure_job_dirs(self):
for dir in self.job_dirs.values():
Path(dir).mkdir(parents=True, exist_ok=True)
@property
def job_dirs(self):
if not hasattr(self, '_job_dirs'):
self._job_dirs = {'root': self.cfg.get('JOB_DIRS_ROOT', None)}
for jobs_subdir in self.JOBS_SUBDIRS:
self._job_dirs[jobs_subdir] = str(Path(self._job_dirs['root'],
jobs_subdir))
return self._job_dirs
@job_dirs.setter
def job_dirs(self, value): self._job_dirs = value
@property
def archiver(self):
if not hasattr(self, '_archiver'):
self._archiver = self._generate_archiver()
return self._archiver
def _generate_archiver(self):
from mc.utils.archivers.dir_archiver import DirArchiver
return DirArchiver(root_dir=self.job_dirs['archive'])
@property
def entity_selector(self):
if not hasattr(self, '_entity_selector'):
from mc.utils.selectors.basic_entity_selector import (
BasicEntitySelector)
self._entity_selector = BasicEntitySelector(db=self.db)
return self._entity_selector
@property
def request_selector(self):
if not hasattr(self, '_request_selector'):
from mc.utils.selectors.basic_request_selector import (
BasicRequestSelector)
self._request_selector = BasicRequestSelector(db=self.db)
return self._request_selector
| aspuru-guzik-group/mission_control | mc/houston/utils.py | Python | apache-2.0 | 6,799 | 0.000147 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the Windows prefetch parser."""
import unittest
# pylint: disable=unused-import
from plaso.formatters import winprefetch as winprefetch_formatter
from plaso.lib import eventdata
from plaso.lib import timelib_test
from plaso.parsers import test_lib
from plaso.parsers import winprefetch
class WinPrefetchParserTest(test_lib.ParserTestCase):
"""Tests for the Windows prefetch parser."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._parser = winprefetch.WinPrefetchParser()
def testParse17(self):
"""Tests the Parse function on a version 17 Prefetch file."""
test_file = self._GetTestFilePath(['CMD.EXE-087B4001.pf'])
event_queue_consumer = self._ParseFile(self._parser, test_file)
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
self.assertEquals(len(event_objects), 2)
# The prefetch last run event.
event_object = event_objects[1]
self.assertEquals(event_object.version, 17)
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-03-10 10:11:49.281250')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc, eventdata.EventTimestamp.LAST_RUNTIME)
self.assertEquals(event_object.executable, u'CMD.EXE')
self.assertEquals(event_object.prefetch_hash, 0x087b4001)
self.assertEquals(event_object.volume_serial_numbers[0], 0x24cb074b)
expected_mapped_files = [
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\NTDLL.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\KERNEL32.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\UNICODE.NLS',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\LOCALE.NLS',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\SORTTBLS.NLS',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\MSVCRT.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\CMD.EXE',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\USER32.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\GDI32.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\SHIMENG.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\APPPATCH\\SYSMAIN.SDB',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\APPPATCH\\ACGENRAL.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\ADVAPI32.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\RPCRT4.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\WINMM.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\OLE32.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\OLEAUT32.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\MSACM32.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\VERSION.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\SHELL32.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\SHLWAPI.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\USERENV.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\UXTHEME.DLL',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\CTYPE.NLS',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\SORTKEY.NLS',
(u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\WINSXS\\X86_MICROSOFT.WINDOWS.'
u'COMMON-CONTROLS_6595B64144CCF1DF_6.0.2600.2180_X-WW_A84F1FF9\\'
u'COMCTL32.DLL'),
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\WINDOWSSHELL.MANIFEST',
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\COMCTL32.DLL',
(u'\\DEVICE\\HARDDISKVOLUME1\\D50FF1E628137B1A251B47AB9466\\UPDATE\\'
u'UPDATE.EXE.MANIFEST'),
u'\\DEVICE\\HARDDISKVOLUME1\\$MFT',
(u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\IE7\\SPUNINST\\SPUNINST.EXE.'
u'MANIFEST'),
(u'\\DEVICE\\HARDDISKVOLUME1\\D50FF1E628137B1A251B47AB9466\\UPDATE\\'
u'IERESETICONS.EXE'),
u'\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\IE7\\SPUNINST\\IERESETICONS.EXE']
self.assertEquals(event_object.mapped_files, expected_mapped_files)
# The volume creation event.
event_object = event_objects[0]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-03-10 10:19:46.234375')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
expected_msg = (
u'\\DEVICE\\HARDDISKVOLUME1 '
u'Serial number: 0x24CB074B '
u'Origin: CMD.EXE-087B4001.pf')
expected_msg_short = (
u'\\DEVICE\\HARDDISKVOLUME1 '
u'Origin: CMD.EXE-087B4001.pf')
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
def testParse23(self):
"""Tests the Parse function on a version 23 Prefetch file."""
test_file = self._GetTestFilePath(['PING.EXE-B29F6629.pf'])
event_queue_consumer = self._ParseFile(self._parser, test_file)
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
self.assertEquals(len(event_objects), 2)
# The prefetch last run event.
event_object = event_objects[1]
self.assertEquals(event_object.version, 23)
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2012-04-06 19:00:55.932955')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc, eventdata.EventTimestamp.LAST_RUNTIME)
self.assertEquals(event_object.executable, u'PING.EXE')
self.assertEquals(event_object.prefetch_hash, 0xb29f6629)
self.assertEquals(
event_object.path, u'\\WINDOWS\\SYSTEM32\\PING.EXE')
self.assertEquals(event_object.run_count, 14)
self.assertEquals(
event_object.volume_device_paths[0], u'\\DEVICE\\HARDDISKVOLUME1')
self.assertEquals(event_object.volume_serial_numbers[0], 0xac036525)
expected_msg = (
u'Prefetch [PING.EXE] was executed - run count 14 path: '
u'\\WINDOWS\\SYSTEM32\\PING.EXE '
u'hash: 0xB29F6629 '
u'volume: 1 [serial number: 0xAC036525, '
u'device path: \\DEVICE\\HARDDISKVOLUME1]')
expected_msg_short = u'PING.EXE was run 14 time(s)'
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
# The volume creation event.
event_object = event_objects[0]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2010-11-10 17:37:26.484375')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
def testParse23MultiVolume(self):
"""Tests the Parse function on a mulit volume version 23 Prefetch file."""
test_file = self._GetTestFilePath(['WUAUCLT.EXE-830BCC14.pf'])
event_queue_consumer = self._ParseFile(self._parser, test_file)
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
self.assertEquals(len(event_objects), 6)
# The prefetch last run event.
event_object = event_objects[5]
self.assertEquals(event_object.version, 23)
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2012-03-15 21:17:39.807996')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc, eventdata.EventTimestamp.LAST_RUNTIME)
self.assertEquals(event_object.executable, u'WUAUCLT.EXE')
self.assertEquals(event_object.prefetch_hash, 0x830bcc14)
self.assertEquals(
event_object.path, u'\\WINDOWS\\SYSTEM32\\WUAUCLT.EXE')
self.assertEquals(event_object.run_count, 25)
self.assertEquals(
event_object.volume_device_paths[0], u'\\DEVICE\\HARDDISKVOLUME1')
self.assertEquals(event_object.volume_serial_numbers[0], 0xac036525)
expected_msg = (
u'Prefetch [WUAUCLT.EXE] was executed - run count 25 path: '
u'\\WINDOWS\\SYSTEM32\\WUAUCLT.EXE '
u'hash: 0x830BCC14 '
u'volume: 1 [serial number: 0xAC036525, '
u'device path: \\DEVICE\\HARDDISKVOLUME1], '
u'volume: 2 [serial number: 0xAC036525, '
u'device path: \\DEVICE\\HARDDISKVOLUMESHADOWCOPY2], '
u'volume: 3 [serial number: 0xAC036525, '
u'device path: \\DEVICE\\HARDDISKVOLUMESHADOWCOPY4], '
u'volume: 4 [serial number: 0xAC036525, '
u'device path: \\DEVICE\\HARDDISKVOLUMESHADOWCOPY7], '
u'volume: 5 [serial number: 0xAC036525, '
u'device path: \\DEVICE\\HARDDISKVOLUMESHADOWCOPY8]')
expected_msg_short = u'WUAUCLT.EXE was run 25 time(s)'
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
# The volume creation event.
event_object = event_objects[0]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2010-11-10 17:37:26.484375')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
expected_msg = (
u'\\DEVICE\\HARDDISKVOLUME1 '
u'Serial number: 0xAC036525 '
u'Origin: WUAUCLT.EXE-830BCC14.pf')
expected_msg_short = (
u'\\DEVICE\\HARDDISKVOLUME1 '
u'Origin: WUAUCLT.EXE-830BCC14.pf')
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
def testParse26(self):
"""Tests the Parse function on a version 26 Prefetch file."""
test_file = self._GetTestFilePath(['TASKHOST.EXE-3AE259FC.pf'])
event_queue_consumer = self._ParseFile(self._parser, test_file)
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
self.assertEquals(len(event_objects), 5)
# The prefetch last run event.
event_object = event_objects[1]
self.assertEquals(event_object.version, 26)
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-10-04 15:40:09.037833')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc, eventdata.EventTimestamp.LAST_RUNTIME)
self.assertEquals(event_object.executable, u'TASKHOST.EXE')
self.assertEquals(event_object.prefetch_hash, 0x3ae259fc)
# The prefetch previous last run event.
event_object = event_objects[2]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-10-04 15:28:09.010356')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc,
u'Previous {0:s}'.format(eventdata.EventTimestamp.LAST_RUNTIME))
expected_mapped_files = [
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\NTDLL.DLL '
u'[MFT entry: 46299, sequence: 1]'),
u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\TASKHOST.EXE',
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\KERNEL32.DLL '
u'[MFT entry: 45747, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\KERNELBASE.DLL '
u'[MFT entry: 45734, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\LOCALE.NLS '
u'[MFT entry: 45777, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\MSVCRT.DLL '
u'[MFT entry: 46033, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\RPCRT4.DLL '
u'[MFT entry: 46668, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\COMBASE.DLL '
u'[MFT entry: 44616, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\OLEAUT32.DLL '
u'[MFT entry: 46309, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\OLE32.DLL '
u'[MFT entry: 46348, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\RPCSS.DLL '
u'[MFT entry: 46654, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\KERNEL.APPCORE.DLL '
u'[MFT entry: 45698, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\CRYPTBASE.DLL '
u'[MFT entry: 44560, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\BCRYPTPRIMITIVES.DLL '
u'[MFT entry: 44355, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\USER32.DLL '
u'[MFT entry: 47130, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\GDI32.DLL '
u'[MFT entry: 45344, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\EN-US\\'
u'TASKHOST.EXE.MUI'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\SECHOST.DLL '
u'[MFT entry: 46699, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\CLBCATQ.DLL '
u'[MFT entry: 44511, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\RACENGN.DLL '
u'[MFT entry: 46549, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\NTMARTA.DLL '
u'[MFT entry: 46262, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\WEVTAPI.DLL '
u'[MFT entry: 47223, sequence: 1]'),
u'\\DEVICE\\HARDDISKVOLUME2\\$MFT',
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\SQMAPI.DLL '
u'[MFT entry: 46832, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\AEPIC.DLL '
u'[MFT entry: 43991, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\WINTRUST.DLL '
u'[MFT entry: 47372, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\SLWGA.DLL '
u'[MFT entry: 46762, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\DXGI.DLL '
u'[MFT entry: 44935, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\ESENT.DLL '
u'[MFT entry: 45256, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\WMICLNT.DLL '
u'[MFT entry: 47413, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\ADVAPI32.DLL '
u'[MFT entry: 43994, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\SFC_OS.DLL '
u'[MFT entry: 46729, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\VERSION.DLL '
u'[MFT entry: 47120, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\CRYPT32.DLL '
u'[MFT entry: 44645, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\MSASN1.DLL '
u'[MFT entry: 45909, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\WTSAPI32.DLL '
u'[MFT entry: 47527, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\SPPC.DLL '
u'[MFT entry: 46803, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\POWRPROF.DLL '
u'[MFT entry: 46413, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\PROFAPI.DLL '
u'[MFT entry: 46441, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\PROGRAMDATA\\MICROSOFT\\RAC\\STATEDATA\\'
u'RACMETADATA.DAT [MFT entry: 39345, sequence: 2]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\GLOBALIZATION\\SORTING\\'
u'SORTDEFAULT.NLS [MFT entry: 37452, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\RACRULES.XML '
u'[MFT entry: 46509, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\TASKSCHD.DLL '
u'[MFT entry: 47043, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\SSPICLI.DLL '
u'[MFT entry: 46856, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\XMLLITE.DLL '
u'[MFT entry: 47569, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\PROGRAMDATA\\MICROSOFT\\RAC\\STATEDATA\\'
u'RACWMIEVENTDATA.DAT [MFT entry: 23870, sequence: 3]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\PROGRAMDATA\\MICROSOFT\\RAC\\STATEDATA\\'
u'RACWMIDATABOOKMARKS.DAT [MFT entry: 23871, sequence: 2]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\TPMTASKS.DLL '
u'[MFT entry: 47003, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\NCRYPT.DLL '
u'[MFT entry: 46073, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\BCRYPT.DLL '
u'[MFT entry: 44346, sequence: 1]'),
(u'\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\SYSTEM32\\NTASN1.DLL '
u'[MFT entry: 46261, sequence: 1]')]
self.assertEquals(event_object.mapped_files, expected_mapped_files)
# The volume creation event.
event_object = event_objects[0]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-10-04 15:57:26.146547')
self.assertEquals(event_object.timestamp, expected_timestamp)
self.assertEquals(
event_object.timestamp_desc, eventdata.EventTimestamp.CREATION_TIME)
if __name__ == '__main__':
unittest.main()
| cvandeplas/plaso | plaso/parsers/winprefetch_test.py | Python | apache-2.0 | 17,693 | 0.000396 |
from data import *
# white
pvals = {
PAWN: 100,\
BISHOP: 300,\
KNIGHT: 300,\
ROOK: 500,\
QUEEN: 900,\
-PAWN: -100,\
-BISHOP: -300,\
-KNIGHT: -300,\
-ROOK: -500,\
-QUEEN: -900,\
KING: 10000,\
-KING: -10000,\
EMPTY: 0,\
}
def value(state):
return state.som * sum(pvals[state.board[cord]] for cord in fcords)
def game_lost(state):
try:
state.board.index(KING*state.som)
return False
except ValueError:
return True
def game_drawn(state):
if state.turn >= 80:
return True
else:
return False
| edrex/minichess | minichess/eval.py | Python | gpl-2.0 | 605 | 0.028099 |
#
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from zoo.pipeline.api.utils import remove_batch
from .engine.topology import KerasNet
from bigdl.util.common import to_list
from zoo.common.utils import callZooFunc
if sys.version >= '3':
long = int
unicode = str
class Sequential(KerasNet):
"""
Container for a sequential model.
# Arguments
name: String to specify the name of the sequential model. Default is None.
>>> sequential = Sequential(name="seq1")
creating: createZooKerasSequential
"""
def __init__(self, jvalue=None, **kwargs):
super(Sequential, self).__init__(jvalue, **kwargs)
# TODO: expose is_built from scala side
def is_built(self):
try:
self.get_output_shape()
return True
except:
return False
def add(self, model):
from zoo.pipeline.api.autograd import Lambda
if (isinstance(model, Lambda)):
if not self.is_built():
if not model.input_shape:
raise Exception("You should specify inputShape for the first layer")
input_shapes = model.input_shape
else:
input_shapes = self.get_output_shape()
model = model.create(remove_batch(input_shapes))
self.value.add(model.value)
return self
@staticmethod
def from_jvalue(jvalue, bigdl_type="float"):
"""
Create a Python Model base on the given java value
:param jvalue: Java object create by Py4j
:return: A Python Model
"""
model = Sequential(jvalue=jvalue)
model.value = jvalue
return model
class Model(KerasNet):
"""
Container for a graph model.
# Arguments
input: An input node or a list of input nodes.
output: An output node or a list of output nodes.
name: String to specify the name of the graph model. Default is None.
"""
def __init__(self, input, output, jvalue=None, **kwargs):
super(Model, self).__init__(jvalue,
to_list(input),
to_list(output),
**kwargs)
def save_graph_topology(self, log_path, backward=False):
"""
Save the current model graph to a folder, which can be displayed in TensorBoard
by running the command:
tensorboard --logdir log_path
# Arguments
log_path: The path to save the model graph.
backward: The name of the application.
"""
callZooFunc(self.bigdl_type, "zooSaveGraphTopology",
self.value,
log_path,
backward)
def new_graph(self, outputs):
value = callZooFunc(self.bigdl_type, "newGraph", self.value, outputs)
return self.from_jvalue(value)
def freeze_up_to(self, names):
callZooFunc(self.bigdl_type, "freezeUpTo", self.value, names)
def unfreeze(self, names):
callZooFunc(self.bigdl_type, "unFreeze", self.value, names)
@staticmethod
def from_jvalue(jvalue, bigdl_type="float"):
"""
Create a Python Model base on the given java value
:param jvalue: Java object create by Py4j
:return: A Python Model
"""
model = Model([], [], jvalue=jvalue)
model.value = jvalue
return model
| intel-analytics/analytics-zoo | pyzoo/zoo/pipeline/api/keras/models.py | Python | apache-2.0 | 3,962 | 0.000757 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom neural network layers.
Low-level primitives such as custom convolution with custom initialization.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import tensorflow as tf
def downscale2d(x, n):
"""Box downscaling.
Args:
x: 4D tensor in NHWC format.
n: integer scale.
Returns:
4D tensor down scaled by a factor n.
"""
if n <= 1:
return x
if n % 2 == 0:
x = tf.nn.avg_pool(x, [1, 2, 2, 1], [1, 2, 2, 1], 'VALID')
return downscale2d(x, n // 2)
return tf.nn.avg_pool(x, [1, n, n, 1], [1, n, n, 1], 'VALID')
def upscale2d(x, n):
"""Box upscaling (also called nearest neighbors).
Args:
x: 4D tensor in NHWC format.
n: integer scale (must be a power of 2).
Returns:
4D tensor up scaled by a factor n.
"""
if n == 1:
return x
return tf.batch_to_space(tf.tile(x, [n**2, 1, 1, 1]), [[0, 0], [0, 0]], n)
class HeModifiedNormalInitializer(tf.initializers.random_normal):
def __init__(self, slope):
self.slope = slope
def get_config(self):
return dict(slope=self.slope)
def __call__(self, shape, dtype=None, partition_info=None):
del partition_info
dtype = dtype or tf.float32
std = tf.rsqrt((1. + self.slope**2) *
tf.cast(tf.reduce_prod(shape[:-1]), tf.float32))
return tf.random_normal(shape, stddev=std, dtype=dtype)
def encoder(x, scales, depth, latent, scope):
activation = tf.nn.leaky_relu
conv_op = functools.partial(
tf.layers.conv2d, padding='same',
kernel_initializer=HeModifiedNormalInitializer(0.2))
with tf.variable_scope(scope, reuse=tf.AUTO_REUSE):
y = conv_op(x, depth, 1)
for scale in range(scales):
y = conv_op(y, depth << scale, 3, activation=activation)
y = conv_op(y, depth << scale, 3, activation=activation)
y = downscale2d(y, 2)
y = conv_op(y, depth << scales, 3, activation=activation)
y = conv_op(y, latent, 3)
return y
def decoder(x, scales, depth, colors, scope):
activation = tf.nn.leaky_relu
conv_op = functools.partial(
tf.layers.conv2d, padding='same',
kernel_initializer=HeModifiedNormalInitializer(0.2))
y = x
with tf.variable_scope(scope, reuse=tf.AUTO_REUSE):
for scale in range(scales - 1, -1, -1):
y = conv_op(y, depth << scale, 3, activation=activation)
y = conv_op(y, depth << scale, 3, activation=activation)
y = upscale2d(y, 2)
y = conv_op(y, depth, 3, activation=activation)
y = conv_op(y, colors, 3)
return y
| brain-research/acai | lib/layers.py | Python | apache-2.0 | 3,324 | 0 |
#! /usr/bin/env python
import os
from function import function
import speech_recognition as sr
from server import EmailServer
from pygame import mixer
from subprocess import call
from send import SendEmail
from detect import face_rec
from face import face_detect
from trainer import face_train
mixer.init()
r = sr.Recognizer()
with sr.Microphone() as source:
r.adjust_for_ambient_noise(source)
while True:
def get_speech():
with sr.Microphone() as source:
audio = r.listen(source)
try:
recon=r.recognize_google(audio)
print recon
return recon
except:
recon=r.recognize_sphinx(audio)
call(["espeak","-s","160","i can't understand what you said, please say again"])
return get_speech()
def import_con():
call(["espeak","-s","160","Do you want to import your contact?"])
speech = get_speech()
if "yes" in speech.lower():
conn.import_contact()
rec = face_rec()
if rec.rec() != "0":
computername = rec.rec()
else:
call(["espeak","-s","160","This is Your First Time using me"])
call(["espeak","-s","160","Do you want to create a new account?"])
speech = get_speech()
if "yes" in speech.lower() or "yeah" in speech.lower():
det = face_detect()
det.new()
server_ad = function()
server_ad.add_user()
train = face_train()
train.train()
rec = face_rec()
computername = rec.rec()
else:
break
call(["espeak","-s","160","Hello "+computername+" can i help you?"])
speech = get_speech()
if "email" in speech.lower():
try:
server = function()
if server.get_last_id() == "0":
id=1
else:
id= server.get_last_id()
email,passwd = server.get_login_passwd(id)
email_server = email.split("@")[1].split(".")[0]
adress,port = server.GetServer(email_server,'imap')
print adress
print port
call(["espeak","-s","160","ok i will check it for you"])
conn = EmailServer()
conn.login_server(email.rstrip(),passwd,adress,port)
conn.inbox()
import_con()
listid = conn.returnid()
nb = server.get_email_nb(id)
up_nb = conn.emailnumber()
server.update_email_nb(id,up_nb)
conn.access_server(listid,nb)
except sr.UnknownValueError:
call(["espeak","there is errer"])
elif "send" in speech.lower() or "reply" in speech.lower() or "response" in speech.lower():
try:
call(["espeak","-s","160","you want to send email?"])
speech = get_speech()
if "yes" in speech.lower() or "yeah" in speech.lower():
call(["espeak","-s","160","ok i will send email for you"])
server_ad = function()
adress,port = server_ad.GetServer('gmail','smtp')
name,email,passwd = server_ad.get_login_passwd(2)
call(["espeak","-s","160","what's the subject of this email?"])
sub = get_speech()
call(["espeak","-s","160","what you want to say to him?"])
body = get_speech()
call(["espeak","-s","160","to who you want to send it?"])
to_txt = get_speech()
to = server_ad.get_to(2,to_txt)
send = SendEmail()
send.Send(email.rstrip(),passwd,sub,body,to,adress,port)
except sr.UnknownValueError:
call(["espeak","-s","160","there is errer"])
elif "add" in speech.lower() and "server" in speech.lower():
try:
call(["espeak","-s","160","are you sure you want to add new server?"])
speech = get_speech()
if "yes" in speech.lower():
server_ad = function()
server_ad.AddServer()
except sr.UnknownValueError:
call(["espeak","-s","160","there is errer"])
elif "no" in speech.lower() or "quit" in speech.lower() or "close" in speech.lower():
call(["espeak","-s","160","ok Good By."])
call(["espeak","-s","160","if you need me please run me any time"])
break
| yahya-idriss/Python-Personal-assistant | start.py | Python | mit | 4,231 | 0.038762 |
from django.contrib import admin
from image_cropping import ImageCroppingMixin
from imageboard.models import Image
class ImageAdmin(ImageCroppingMixin, admin.ModelAdmin):
list_display = ['__str__', 'tag_list', 'owner', 'created', 'updated', 'visible', 'get_image_url']
list_filter = ['owner', 'visible', 'created', 'updated']
list_editable = ['visible']
def save_model(self, request, obj, form, change):
obj.owner = request.user.profile
obj.save()
def get_queryset(self, request):
return super(ImageAdmin, self).get_queryset(request).prefetch_related('tags')
def tag_list(self, obj):
return u", ".join(o.name for o in obj.tags.all())
def get_image_url(self, obj):
return '<a href="{0}"><img src="{0}" width="100px"></a>'.format(obj.img.url)
get_image_url.allow_tags = True
get_image_url.short_description = 'Превью'
tag_list.short_description = 'Теги'
admin.site.register(Image, ImageAdmin)
| andrewnsk/dorokhin.moscow | imageboard/admin.py | Python | mit | 990 | 0.004082 |
#!/usr/bin/python
# Google Spreadsheet BMP Sensor Data-logging Example
# Depends on the 'gspread' package being installed. If you have pip installed
# execute:
# sudo pip install gspread
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import time
import datetime
import json
import ssl
import Adafruit_BMP.BMP085 as BMP085
from Adafruit_LED_Backpack.SevenSegment import SevenSegment
from phant import Phant
LOGGING = True
COUNT = 0
# How long to wait (in seconds) between measurements.
FREQUENCY_SECONDS = 300
# How long to wait (in seconds) to display F or C.
ALTERNATE_TEMP_SCALE_SECONDS = 5
# Approximately how often measurements are made (in seconds)
MEASUREMENT_INTERVAL = 2 * ALTERNATE_TEMP_SCALE_SECONDS
# How seldom to upload the sensor data, if LOGGING is on
COUNT_INTERVAL = FREQUENCY_SECONDS / MEASUREMENT_INTERVAL
# Create sensor instance with default I2C bus (On Raspberry Pi either 0 or
# 1 based on the revision, on Beaglebone Black default to 1).
bmp = BMP085.BMP085(mode=BMP085.BMP085_HIGHRES, address=0x77)
# Initialize a LED display
#segment = SevenSegment(address=0x70)
segment = SevenSegment(address=0x71)
print segment
# Read in Phant config
json_keys_file2 = 'data.crookster.org.json'
p2=Phant(jsonPath=json_keys_file2)
print 'Logging sensor measurements taken every {2} seconds to "{0}" every {1} seconds.'.format(p2.title, FREQUENCY_SECONDS, MEASUREMENT_INTERVAL)
print p2
print 'Press Ctrl-C to quit.'
while True:
error_tables = {}
try:
# Attempt to get sensor readings.
temp = bmp.read_temperature()
pressure = bmp.read_pressure()
altitude = bmp.read_altitude()
temp_in_F = (temp * 9.0 / 5.0) + 32.0
print "Temperature: %.2f C" % temp
print "Temperature: %.2f F" % temp_in_F
print "Pressure: %.2f hPa" % (pressure / 100.0)
print "Altitude: %.2f m" % altitude
print "Press CTRL+C to exit"
print ""
for display_tmp_in_F in [False, True]:
if display_tmp_in_F:
if round(temp_in_F * 10.0) < 1000.0:
segment.set_digit(0, int(round(temp_in_F) / 10)) # Tens
segment.set_digit(1, int(round(temp_in_F) % 10)) # Ones
segment.set_digit(2, int(int(round(temp_in_F * 10.0)) % 10)) # Tenth
segment.set_digit(3, 'F')
segment.set_colon(True)
else:
segment.set_digit(0, int(round(temp_in_F) / 100)) # Hundreds
segment.set_digit(1, int(round(temp_in_F - 100.0) / 10)) # Tens
segment.set_digit(2, int(round(temp_in_F) % 10)) # Ones
segment.set_digit(3, 'F')
segment.set_colon(False)
else:
# write degrees
segment.set_digit(0, int(round(temp) / 10)) # Tens
segment.set_digit(1, int(round(temp) % 10)) # Ones
segment.set_digit(2, int(int(round(temp * 10.0)) % 10)) # Tenth
segment.set_digit(3, 'C')
segment.set_colon(True)
segment.write_display()
time.sleep(ALTERNATE_TEMP_SCALE_SECONDS)
if LOGGING:
ambient_temp_C = temp
ambient_temp_F = temp_in_F
ambient_pressure = pressure / 100.0
fields = (ambient_pressure, ambient_temp_C, ambient_temp_F, altitude)
print fields
if (COUNT % COUNT_INTERVAL) == 0:
p2.log(altitude, ambient_pressure, ambient_temp_C, ambient_temp_F)
print 'Wrote a row to {0}'.format(p2.title)
print(p2.remaining_bytes, p2.cap)
else:
print 'at {0} seconds out of {1}'.format((COUNT * MEASUREMENT_INTERVAL) % FREQUENCY_SECONDS, FREQUENCY_SECONDS )
COUNT = COUNT + 1
except KeyboardInterrupt:
segment.clear()
segment.write_display()
if 'KI' not in error_tables:
error_tables['KI'] = 1
else:
error_tables['KI'] += 1
print error_tables
sys.exit(0)
except ssl.SSLError:
# we had a network issue, try again later
if 'ssl.SSLError' not in error_tables:
error_tables['ssl.SSLError'] = 1
else:
error_tables['ssl.SSLError'] += 1
segment.clear()
segment.write_display()
print error_tables
# except:
# print "unhandled exception, skipping"
# if 'Unhandled' not in error_tables:
# error_tables['Unhandled'] = 1
# else:
# error_tables['Unhandled'] += 1
# print error_tables
finally:
segment.clear()
segment.write_display()
print error_tables
| dpcrook/timetemp | install/Archive/logging_sparkfun.py | Python | mit | 5,870 | 0.005281 |
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
import sqlite3 as sql3
import time
import json
import time
from datetime import datetime
import os
#import sys
#consumer key, consumer secret, access token, access secret.
ckey= 'TWITTER_CKEY' in os.environ
csecret= 'TWITTER_CSECRET' in os.environ
atoken= 'TWITTER_TOKEN'in os.environ
asecret= 'TWITTER_SECRET' in os.environ
new = 0
con = sql3.connect("tweets.db")
cur = con.cursor()
def newTweets():
if new == 0:
return
class Listener(StreamListener):
def on_data(self, data):
all_data = json.loads(data)
id = all_data["id_str"]
timestamp = time.strftime('%Y.%m.%d %H:%M', time.strptime(all_data["created_at"],'%a %b %d %H:%M:%S +0000 %Y'))
name = all_data["user"]["name"]
screen_name = all_data["user"]["screen_name"]
tagit = all_data["entities"]["hashtags"]
cur.execute("INSERT INTO twitter_tweets (tweetID, time, username, screen_name) VALUES (?, ?, ?, ?)",
(id, timestamp, name, screen_name))
for text in tagit:
cur.execute("INSERT INTO twitter_tags (tweetID, hashtag) VALUES (?, ?)",
(id, text["text"]))
con.commit()
print((id ,screen_name))
# print tagit
return True
def on_error(self, status):
if status == 420:
#returning False in on_data disconnects the stream
return False
print status
#auth = OAuthHandler(ckey, csecret)
#auth.set_access_token(atoken, asecret)
#twitterStream = Stream(auth, Listener())
#twitterStream.filter(track=["#car"])
def runStream():
auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
twitterStream = Stream(auth, Listener())
twitterStream.filter(track=["#car"])
if __name__ == '__main__':
runStream()
| ssmdevelopers/jklhoods | streamtweet.py | Python | gpl-2.0 | 1,889 | 0.022763 |
from __future__ import (absolute_import, division, print_function)
import filecmp
import numpy as np
import os
import stresstesting
import tempfile
import mantid.simpleapi as mantid
import mantid.kernel as kernel
from tube_calib_fit_params import TubeCalibFitParams
from tube_calib import getCalibratedPixelPositions, getPoints
from tube_spec import TubeSpec
from ideal_tube import IdealTube
import tube
class WishCalibration(stresstesting.MantidStressTest):
"""
Runs the WISH calibration script and checks the result produced is sane
"""
def __init__(self):
super(WishCalibration, self).__init__()
self.calibration_table = None
self.correction_table = None
self.calibration_ref_name = "WishCalibrate_correction_table.csv"
self.correction_ref_name = "WishCalibrate_calibration_table.csv"
self.calibration_out_path = tempfile.NamedTemporaryFile().name
self.correction_out_path = tempfile.NamedTemporaryFile().name
def skipTests(self):
return True
def cleanup(self):
mantid.mtd.clear()
try:
os.remove(self.calibration_out_path)
os.remove(self.correction_out_path)
except OSError:
print("Failed to remove an temp output file in WishCalibration")
def requiredFiles(self):
return [self.calibration_ref_name, self.correction_ref_name]
def validate(self):
calibration_ref_path = mantid.FileFinder.getFullPath(self.calibration_ref_name)
correction_ref_path = mantid.FileFinder.getFullPath(self.correction_ref_name)
cal_result = filecmp.cmp(calibration_ref_path, self.calibration_out_path, False)
cor_result = filecmp.cmp(correction_ref_path, self.correction_out_path, False)
if not cal_result:
print("Calibration did not match in WishCalibrate")
if not cor_result:
print("Correction did not match in WishCalibrate")
return cal_result and cor_result
def runTest(self):
# This script calibrates WISH using known peak positions from
# neutron absorbing bands. The workspace with suffix "_calib"
# contains calibrated data. The workspace with suxxic "_corrected"
# contains calibrated data with known problematic tubes also corrected
ws = mantid.LoadNexusProcessed(Filename="WISH30541_integrated.nxs")
# This array defines the positions of peaks on the detector in
# meters from the center (0)
# For wish this is calculated as follows:
# Height of all 7 bands = 0.26m => each band is separated by 0.260 / 6 = 0.4333m
# The bands are on a cylinder diameter 0.923m. So we can work out the angle as
# (0.4333 * n) / (0.923 / 2) where n is the number of bands above (or below) the
# center band.
# Putting this together with the distance to the detector tubes (2.2m) we get
# the following: (0.4333n) / 0.4615 * 2200 = Expected peak positions
# From this we can show there should be 5 peaks (peaks 6 + 7 are too high/low)
# at: 0, 0.206, 0.413 respectively (this is symmetrical so +/-)
peak_positions = np.array([-0.413, -0.206, 0, 0.206, 0.413])
funcForm = 5 * [1] # 5 gaussian peaks
fitPar = TubeCalibFitParams([59, 161, 258, 353, 448])
fitPar.setAutomatic(True)
instrument = ws.getInstrument()
spec = TubeSpec(ws)
spec.setTubeSpecByString(instrument.getFullName())
idealTube = IdealTube()
idealTube.setArray(peak_positions)
# First calibrate all of the detectors
calibrationTable, peaks = tube.calibrate(ws, spec, peak_positions, funcForm, margin=15,
outputPeak=True, fitPar=fitPar)
self.calibration_table = calibrationTable
def findBadPeakFits(peaksTable, threshold=10):
""" Find peaks whose fit values fall outside of a given tolerance
of the mean peak centers across all tubes.
Tubes are defined as have a bad fit if the absolute difference
between the fitted peak centers for a specific tube and the
mean of the fitted peak centers for all tubes differ more than
the threshold parameter.
@param peakTable: the table containing fitted peak centers
@param threshold: the tolerance on the difference from the mean value
@return A list of expected peak positions and a list of indicies of tubes
to correct
"""
n = len(peaksTable)
num_peaks = peaksTable.columnCount() - 1
column_names = ['Peak%d' % i for i in range(1, num_peaks + 1)]
data = np.zeros((n, num_peaks))
for i, row in enumerate(peaksTable):
data_row = [row[name] for name in column_names]
data[i, :] = data_row
# data now has all the peaks positions for each tube
# the mean value is the expected value for the peak position for each tube
expected_peak_pos = np.mean(data, axis=0)
# calculate how far from the expected position each peak position is
distance_from_expected = np.abs(data - expected_peak_pos)
check = np.where(distance_from_expected > threshold)[0]
problematic_tubes = list(set(check))
print("Problematic tubes are: " + str(problematic_tubes))
return expected_peak_pos, problematic_tubes
def correctMisalignedTubes(ws, calibrationTable, peaksTable, spec, idealTube, fitPar, threshold=10):
""" Correct misaligned tubes due to poor fitting results
during the first round of calibration.
Misaligned tubes are first identified according to a tolerance
applied to the absolute difference between the fitted tube
positions and the mean across all tubes.
The FindPeaks algorithm is then used to find a better fit
with the ideal tube positions as starting parameters
for the peak centers.
From the refitted peaks the positions of the detectors in the
tube are recalculated.
@param ws: the workspace to get the tube geometry from
@param calibrationTable: the calibration table ouput from running calibration
@param peaksTable: the table containing the fitted peak centers from calibration
@param spec: the tube spec for the instrument
@param idealTube: the ideal tube for the instrument
@param fitPar: the fitting parameters for calibration
@param threshold: tolerance defining is a peak is outside of the acceptable range
@return table of corrected detector positions
"""
table_name = calibrationTable.name() + 'Corrected'
corrections_table = mantid.CreateEmptyTableWorkspace(OutputWorkspace=table_name)
corrections_table.addColumn('int', "Detector ID")
corrections_table.addColumn('V3D', "Detector Position")
mean_peaks, bad_tubes = findBadPeakFits(peaksTable, threshold)
for index in bad_tubes:
print("Refitting tube %s" % spec.getTubeName(index))
tube_dets, _ = spec.getTube(index)
getPoints(ws, idealTube.getFunctionalForms(), fitPar, tube_dets)
tube_ws = mantid.mtd['TubePlot']
fit_ws = mantid.FindPeaks(InputWorkspace=tube_ws, WorkspaceIndex=0,
PeakPositions=fitPar.getPeaks(), PeaksList='RefittedPeaks')
centers = [row['centre'] for row in fit_ws]
detIDList, detPosList = getCalibratedPixelPositions(ws, centers, idealTube.getArray(), tube_dets)
for id, pos in zip(detIDList, detPosList):
corrections_table.addRow({'Detector ID': id, 'Detector Position': kernel.V3D(*pos)})
return corrections_table
corrected_calibration_table = correctMisalignedTubes(ws, calibrationTable, peaks, spec, idealTube, fitPar)
self.correction_table = corrected_calibration_table
tube.saveCalibration(self.correction_table.getName(), out_path=self.calibration_out_path)
tube.saveCalibration(self.calibration_table.getName(), out_path=self.correction_out_path)
| wdzhou/mantid | Testing/SystemTests/tests/analysis/WishCalibrate.py | Python | gpl-3.0 | 8,447 | 0.003315 |
from SuperDiffer import app, db
from SuperDiffer.id import controllers as ID
from flask import Flask, render_template, request, abort, jsonify
import json,base64,pdb
"""Routes to allow clients to add left and right base64 encoded on JSON values and fetch their diff"""
#References: https://blog.miguelgrinberg.com/post/designing-a-restful-api-with-python-and-flask
@app.route('/v1/diff/<int:id>', methods=['GET'])
def diff_right_left(id):
"""Calculates the diff between left and right descriptors of a given ID and remove those descriptors if they're found (even if the data lenght is not the same and no diff is made)"""
all_diff_data = ID.diff(id, ["left","right"])
if not all_diff_data or not all_diff_data["left_right"]:
abort(400)
ID.remove_all(id, ["left","right"])
return jsonify(all_diff_data["left_right"])
@app.route('/v1/diff/<int:id>/left', methods=['POST'])
def add_left_to_id(id):
"""Add a JSON base64 value (in the format: {"data":"base64value"}) to the left descriptor of a given ID"""
return _add_data_to_id_description(id, "left", request.json)
@app.route('/v1/diff/<int:id>/right', methods=['POST'])
def add_right_to_id(id):
"""Add a JSON base64 value (in the format: {"data":"base64value"}) to the right descriptor of a given ID"""
return _add_data_to_id_description(id, "right", request.json)
def _is_base64(value):
"""Returns true only if value only has base64 chars (A-Z,a-z,0-9,+ or /)"""
#http://stackoverflow.com/questions/12315398/verify-is-a-string-is-encoded-in-base64-python
try:
enc = base64.b64encode(base64.b64decode(value)).strip()
return enc == value
except TypeError:
return False
def _add_data_to_id_description(id, descriptor, request_json):
"""Add a base64 value obtained from a JSON in the format {"data":"base64value"}) to the given descriptor of a given ID"""
if not "data" in request_json:# no data key on json ? abort !
abort(400)
try:#arrays or other objects that doesnt have encode methods should not be accepted - abort !
no_unicode_data = request_json["data"].encode("utf-8")
except:
abort(400)
if not _is_base64(no_unicode_data):# no base64 value on data key ? abort !
abort(400)
if not ID.add(id, descriptor, no_unicode_data):# add failed due to some database problem ? yeah, abort !
abort(400)
return "Created", 201 #yey!
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
| gpaOliveira/SuperDiffer | SuperDiffer/routes.py | Python | mit | 2,533 | 0.013423 |
#!/usr/bin/env python
# vim: sts=4 sw=4 et
# This is a component of EMC
# probe.py Copyright 2010 Michael Haberler
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA''''''
'''
gladevcp probe demo example
Michael Haberler 11/2010
'''
import os,sys
from gladevcp.persistence import IniFile,widget_defaults,set_debug,select_widgets
import hal
import hal_glib
import gtk
import glib
import emc
debug = 0
class EmcInterface(object):
def __init__(self):
try:
emcIniFile = linuxcnc.ini(os.environ['INI_FILE_NAME'])
linuxcnc.nmlfile = os.path.join(os.path.dirname(os.environ['INI_FILE_NAME']), emcIniFile.find("EMC", "NML_FILE"))
self.s = linuxcnc.stat();
self.c = linuxcnc.command()
except Exception, msg:
print "cant initialize EmcInterface: %s - EMC not running?" %(msg)
def running(self,do_poll=True):
if do_poll: self.s.poll()
return self.s.task_mode == linuxcnc.MODE_AUTO and self.s.interp_state != linuxcnc.INTERP_IDLE
def manual_ok(self,do_poll=True):
if do_poll: self.s.poll()
if self.s.task_state != linuxcnc.STATE_ON: return False
return self.s.interp_state == linuxcnc.INTERP_IDLE
def ensure_mode(self,m, *p):
'''
If emc is not already in one of the modes given, switch it to the first mode
example:
ensure_mode(linuxcnc.MODE_MDI)
ensure_mode(linuxcnc.MODE_AUTO, linuxcnc.MODE_MDI)
'''
self.s.poll()
if self.s.task_mode == m or self.s.task_mode in p: return True
if self.running(do_poll=False): return False
self.c.mode(m)
self.c.wait_complete()
return True
def active_codes(self):
self.s.poll()
return self.s.gcodes
def get_current_system(self):
for i in self.active_codes():
if i >= 540 and i <= 590:
return i/10 - 53
elif i >= 590 and i <= 593:
return i - 584
return 1
def mdi_command(self,command, wait=True):
#ensure_mode(emself.c.MODE_MDI)
self.c.mdi(command)
if wait: self.c.wait_complete()
def emc_status(self):
'''
return tuple (task mode, task state, exec state, interp state) as strings
'''
self.s.poll()
task_mode = ['invalid', 'MANUAL', 'AUTO', 'MDI'][self.s.task_mode]
task_state = ['invalid', 'ESTOP', 'ESTOP_RESET', 'OFF', 'ON'][self.s.task_state]
exec_state = ['invalid', 'ERROR', 'DONE',
'WAITING_FOR_MOTION',
'WAITING_FOR_MOTION_QUEUE',
'WAITING_FOR_IO',
'WAITING_FOR_PAUSE',
'WAITING_FOR_MOTION_AND_IO',
'WAITING_FOR_DELAY',
'WAITING_FOR_SYSTEM_CMD' ][self.s.exec_state]
interp_state = ['invalid', 'IDLE', 'READING', 'PAUSED', 'WAITING'][self.s.interp_state]
return (task_mode, task_state, exec_state, interp_state)
class HandlerClass:
def on_manual_mode(self,widget,data=None):
if self.e.ensure_mode(linuxcnc.MODE_MANUAL):
print "switched to manual mode"
else:
print "cant switch to manual in this state"
def on_mdi_mode(self,widget,data=None):
if self.e.ensure_mode(linuxcnc.MODE_MDI):
print "switched to MDI mode"
else:
print "cant switch to MDI in this state"
def _query_emc_status(self,data=None):
(task_mode, task_state, exec_state, interp_state) = self.e.emc_status()
self.builder.get_object('task_mode').set_label("Task mode: " + task_mode)
self.builder.get_object('task_state').set_label("Task state: " + task_state)
self.builder.get_object('exec_state').set_label("Exec state: " + exec_state)
self.builder.get_object('interp_state').set_label("Interp state: " + interp_state)
return True
def on_probe(self,widget,data=None):
label = widget.get_label()
axis = ord(label[0].lower()) - ord('x')
direction = 1.0
if label[1] == '-':
direction = -1.0
self.e.s.poll()
self.start_feed = self.e.s.settings[1]
# determine system we are touching off - 1...g54 etc
self.current_system = self.e.get_current_system()
# remember current abs or rel mode - g91
self.start_relative = (910 in self.e.active_codes())
self.previous_mode = self.e.s.task_mode
if self.e.s.task_state != linuxcnc.STATE_ON:
print "machine not turned on"
return
if not self.e.s.homed[axis]:
print "%s axis not homed" %(chr(axis + ord('X')))
return
if self.e.running(do_poll=False):
print "cant do that now - intepreter running"
return
self.e.ensure_mode(linuxcnc.MODE_MDI)
self.e.mdi_command("#<_Probe_System> = %d " % (self.current_system ),wait=False)
self.e.mdi_command("#<_Probe_Axis> = %d " % (axis),wait=False)
self.e.mdi_command("#<_Probe_Speed> = %s " % (self.builder.get_object('probe_feed').get_value()),wait=False)
self.e.mdi_command("#<_Probe_Diameter> = %s " % (self.builder.get_object('probe_diameter').get_value() ),wait=False)
self.e.mdi_command("#<_Probe_Distance> = %s " % (self.builder.get_object('probe_travel').get_value() * direction),wait=False)
self.e.mdi_command("#<_Probe_Retract> = %s " % (self.builder.get_object('retract').get_value() * direction * -1.0),wait=False)
self.e.mdi_command("O<probe> call",wait=False)
self.e.mdi_command('F%f' % (self.start_feed),wait=False)
self.e.mdi_command('G91' if self.start_relative else 'G90',wait=False)
# self.e.ensure_mode(self.previous_mode)
def on_destroy(self,obj,data=None):
self.ini.save_state(self)
def on_restore_defaults(self,button,data=None):
'''
example callback for 'Reset to defaults' button
currently unused
'''
self.ini.create_default_ini()
self.ini.restore_state(self)
def __init__(self, halcomp,builder,useropts):
self.halcomp = halcomp
self.builder = builder
self.ini_filename = __name__ + '.ini'
self.defaults = { IniFile.vars: dict(),
IniFile.widgets : widget_defaults(select_widgets(self.builder.get_objects(), hal_only=False,output_only = True))
}
self.ini = IniFile(self.ini_filename,self.defaults,self.builder)
self.ini.restore_state(self)
self.e = EmcInterface()
glib.timeout_add_seconds(1, self._query_emc_status)
def get_handlers(halcomp,builder,useropts):
global debug
for cmd in useropts:
exec cmd in globals()
set_debug(debug)
return [HandlerClass(halcomp,builder,useropts)]
| araisrobo/linuxcnc | configs/gladevcp/probe/probe.py | Python | lgpl-2.1 | 7,589 | 0.011859 |
#!/usr/bin/env python
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Barbican Keystone notification listener server.
"""
import eventlet
import os
import sys
# Oslo messaging notification server uses eventlet.
#
# To have remote debugging, thread module needs to be disabled.
# eventlet.monkey_patch(thread=False)
eventlet.monkey_patch()
# 'Borrowed' from the Glance project:
# If ../barbican/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
sys.path.insert(0, possible_topdir)
from barbican.common import config
from barbican.openstack.common import log
from barbican.openstack.common import service
from barbican import queue
from barbican.queue import keystone_listener
from oslo_config import cfg
def fail(returncode, e):
sys.stderr.write("ERROR: {0}\n".format(e))
sys.exit(returncode)
if __name__ == '__main__':
try:
config.parse_args()
config.setup_remote_pydev_debug()
# Import and configure logging.
log.setup('barbican')
LOG = log.getLogger(__name__)
LOG.info("Booting up Barbican Keystone listener node...")
# Queuing initialization
CONF = cfg.CONF
queue.init(CONF)
if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'):
service.launch(
keystone_listener.MessageServer(CONF)
).wait()
else:
LOG.info("Exiting as Barbican Keystone listener is not enabled...")
except RuntimeError as e:
fail(1, e)
| jmvrbanac/barbican | bin/barbican-keystone-listener.py | Python | apache-2.0 | 2,403 | 0.002913 |
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
from .provider import FakeProvider
class FakeOAuth2Adapter(OAuth2Adapter):
provider_id = FakeProvider.id
access_token_url = 'https://localhost/o/oauth2/token'
authorize_url = 'https://localhost/o/oauth2/auth'
profile_url = 'https://localhost/oauth2/v1/userinfo'
def complete_login(self, request, app, token, **kwargs):
resp = requests.get(self.profile_url,
params={'access_token': token.token,
'alt': 'json'})
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(
request, extra_data)
oauth2_login = OAuth2LoginView.adapter_view(FakeOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(FakeOAuth2Adapter)
| AltSchool/django-allauth | allauth/socialaccount/providers/fake/views.py | Python | mit | 910 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2017-04-06
@author: Heysion Yuan
@copyright: 2017, Heysion Yuan <heysions@gmail.com>
@license: GPLv3
'''
from dab.webui import WebBase
from dab.core.db.models import Task
class TaskIndex(WebBase):
def get(self):
dataset = self.get_task_top_all()
self.render("taskindex.html",tasklist=dataset)
# task_items = [
# {"id":1,"name":"deepin-auto-build","createtime":"2017","state":"success","resultinfo":"info"},
# {"id":2,"name":"deepin-auto-build","createtime":"2017","state":"success","resultinfo":"info"}
# ]
# self.render("task.html", tasklist=task_items)
# pass
def get_task_top_all(self):
dataset = Task.select(Task.id,Task.src_name,
Task.create_time,Task.state,
Task.owner_name)
datalist = []
if dataset :
for data in dataset:
datalist.append({"id":data.id,"name":data.name,
"ceatetime":data.createtime,"state":data.state,
"resultinfo":data.ower_name})
return datalist
class TaskInfo(WebBase):
pass
class TaskNew(WebBase):
def get(self):
self.render("tasknew.html")
pass
def post(self):
req_data = { k: self.get_argument(k) for k in self.request.arguments }
if not ("arches" in req_data.keys()):
self.render("404.html")
if not ("name" in req_data and req_data["name"] is not None) :
self.render("404.html")
self.save_new_task(req_data)
self.render("/taskindex")
def save_new_task(self,data):
new_task = Task.select(Task.name).where(Task.name==data["name"])
if not new_task :
new_task = Task.create(name=data["name"],
suite=data["suite"],
codename=data["codename"],
architectures=data["arches"],
workdir=data["workbase"],
description=data["description"])
new_task.save()
else:
return None
return new_task
| heysion/deepin-auto-build | dab/webui/taskctrl.py | Python | gpl-3.0 | 2,297 | 0.01219 |
"""
:mod: DataIntegrityHandler
.. module: DataIntegrityHandler
:synopsis: DataIntegrityHandler is the implementation of the Data Integrity service in
the DISET framework
"""
# from DIRAC
from DIRAC import S_OK
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.DataManagementSystem.DB.DataIntegrityDB import DataIntegrityDB
class DataIntegrityHandlerMixin:
"""
.. class:: DataIntegrityHandler
Implementation of the Data Integrity service in the DISET framework.
"""
@classmethod
def initializeHandler(cls, serviceInfoDict):
"""Initialization of DB object"""
cls.dataIntegrityDB = DataIntegrityDB()
return S_OK()
types_removeProblematic = [[int, list]]
def export_removeProblematic(self, fileID):
"""Remove the file with the supplied FileID from the database"""
if isinstance(fileID, list):
fileIDs = fileID
else:
fileIDs = [int(fileID)]
self.log.info("DataIntegrityHandler.removeProblematic: Attempting to remove problematic.")
res = self.dataIntegrityDB.removeProblematic(fileIDs)
if not res["OK"]:
self.log.error("DataIntegrityHandler.removeProblematic: Failed to remove problematic.", res["Message"])
return res
types_getProblematic = []
def export_getProblematic(self):
"""Get the next problematic to resolve from the IntegrityDB"""
self.log.info("DataIntegrityHandler.getProblematic: Getting file to resolve.")
res = self.dataIntegrityDB.getProblematic()
if not res["OK"]:
self.log.error(
"DataIntegrityHandler.getProblematic: Failed to get problematic file to resolve.", res["Message"]
)
return res
types_getPrognosisProblematics = [str]
def export_getPrognosisProblematics(self, prognosis):
"""Get problematic files from the problematics table of the IntegrityDB"""
self.log.info("DataIntegrityHandler.getPrognosisProblematics: Getting files with %s prognosis." % prognosis)
res = self.dataIntegrityDB.getPrognosisProblematics(prognosis)
if not res["OK"]:
self.log.error(
"DataIntegrityHandler.getPrognosisProblematics: Failed to get prognosis files.", res["Message"]
)
return res
types_setProblematicStatus = [int, str]
def export_setProblematicStatus(self, fileID, status):
"""Update the status of the problematics with the provided fileID"""
self.log.info("DataIntegrityHandler.setProblematicStatus: Setting file %s status to %s." % (fileID, status))
res = self.dataIntegrityDB.setProblematicStatus(fileID, status)
if not res["OK"]:
self.log.error("DataIntegrityHandler.setProblematicStatus: Failed to set status.", res["Message"])
return res
types_incrementProblematicRetry = [int]
def export_incrementProblematicRetry(self, fileID):
"""Update the retry count for supplied file ID."""
self.log.info("DataIntegrityHandler.incrementProblematicRetry: Incrementing retries for file %s." % (fileID))
res = self.dataIntegrityDB.incrementProblematicRetry(fileID)
if not res["OK"]:
self.log.error(
"DataIntegrityHandler.incrementProblematicRetry: Failed to increment retries.", res["Message"]
)
return res
types_insertProblematic = [str, dict]
def export_insertProblematic(self, source, fileMetadata):
"""Insert problematic files into the problematics table of the IntegrityDB"""
self.log.info("DataIntegrityHandler.insertProblematic: Inserting problematic file to integrity DB.")
res = self.dataIntegrityDB.insertProblematic(source, fileMetadata)
if not res["OK"]:
self.log.error("DataIntegrityHandler.insertProblematic: Failed to insert.", res["Message"])
return res
types_changeProblematicPrognosis = []
def export_changeProblematicPrognosis(self, fileID, newPrognosis):
"""Change the prognosis for the supplied file"""
self.log.info("DataIntegrityHandler.changeProblematicPrognosis: Changing problematic prognosis.")
res = self.dataIntegrityDB.changeProblematicPrognosis(fileID, newPrognosis)
if not res["OK"]:
self.log.error("DataIntegrityHandler.changeProblematicPrognosis: Failed to update.", res["Message"])
return res
types_getTransformationProblematics = [int]
def export_getTransformationProblematics(self, transID):
"""Get the problematics for a given transformation"""
self.log.info("DataIntegrityHandler.getTransformationProblematics: Getting problematics for transformation.")
res = self.dataIntegrityDB.getTransformationProblematics(transID)
if not res["OK"]:
self.log.error("DataIntegrityHandler.getTransformationProblematics: Failed.", res["Message"])
return res
types_getProblematicsSummary = []
def export_getProblematicsSummary(self):
"""Get a summary from the Problematics table from the IntegrityDB"""
self.log.info("DataIntegrityHandler.getProblematicsSummary: Getting problematics summary.")
res = self.dataIntegrityDB.getProblematicsSummary()
if res["OK"]:
for prognosis, statusDict in res["Value"].items():
self.log.info("DataIntegrityHandler.getProblematicsSummary: %s." % prognosis)
for status, count in statusDict.items():
self.log.info("DataIntegrityHandler.getProblematicsSummary: \t%-10s %-10s." % (status, str(count)))
else:
self.log.error("DataIntegrityHandler.getProblematicsSummary: Failed to get summary.", res["Message"])
return res
types_getDistinctPrognosis = []
def export_getDistinctPrognosis(self):
"""Get a list of the distinct prognosis from the IntegrityDB"""
self.log.info("DataIntegrityHandler.getDistinctPrognosis: Getting distinct prognosis.")
res = self.dataIntegrityDB.getDistinctPrognosis()
if res["OK"]:
for prognosis in res["Value"]:
self.log.info("DataIntegrityHandler.getDistinctPrognosis: \t%s." % prognosis)
else:
self.log.error("DataIntegrityHandler.getDistinctPrognosis: Failed to get unique prognosis.", res["Message"])
return res
class DataIntegrityHandler(DataIntegrityHandlerMixin, RequestHandler):
pass
| DIRACGrid/DIRAC | src/DIRAC/DataManagementSystem/Service/DataIntegrityHandler.py | Python | gpl-3.0 | 6,511 | 0.004147 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Cubic ERP SAC (<http://cubicerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
class res_company(osv.osv):
_name = "res.company"
_inherit = 'res.company'
_columns = {
'currency2_id' : fields.many2one('res.currency', string="Secondary Currency"),
} | jolevq/odoopub | extra-addons/customize/res_company.py | Python | agpl-3.0 | 1,213 | 0.003298 |
XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXXXXX X XXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXX X
XXXX XXXXXXXXXX
XXXXXXXXXXX X
XXXXXX X XX XX XX XX XX X X
X
X XX
X XX
XXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XXXXXXX X XXXXXX XXXXXXXX XXXXX XX XXX XXXX XX XXX XXXX XXXXXX XXXXXXXXX XXX XXXX XXXXX XXXXXXX XXX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XX XX XXXXX XX XXXXXXXX XXXX XXX XXXXXX XXXXXXXX XXX XXXXXXXXX
XXXXXX XXXX XXXXXXXXXXX XXXXX XXX XXXXXXXXXX XXXXXXXX XXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX
XXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXX
XXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXX
XXXXXXXXXX XXXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXX
XXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXXX
XXX XXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXX
XXXXX
XXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXX
XXXXXX XXXXXXXXXX XXXXX XXXXX XX XXXX XX XXXXXXXXXX XXX XXXXX XXXXX XX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXX X
XXXX XXXXXXXXXX
XXXXXXXXXXX X
XXXXXX X XX XX XX XX XX X X
X
X XX
X XXXXXXXXX
XXXXX XXXXXXXX XX XXX XXXXX XXXXX XXX XXXXXXXXX XXXXXXXXXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXXXXXXX
XXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXXX
XXXXXX XXXX XXXXX XXXXX XX XXX XXX XXXX XXXXX XXXXXXXX XXXXXX XX XXX XXXX XXXXXXXX XX XXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXXX
XXXXXXX XXXXXXX XXXX X XXXXXX XXX XX XXXXXXXXXX XXX XXXXXX XXXX XX XXXXXX XXXX XXX XXXXXXX XXXXX XXXXXXXX XX XXXXX XX XXXXXXXXX XXXXXXX XXX XXXXXX XXX
XXXXXXXXXX XXX XXXX XX XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX XXXXXXXXX XXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXX XX XXXXXXX XXX XXXXXXX XX XXX XXXXXXXXXX
XXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXX XXXXX XXXXX XXXX XX XXXXX XXX XXXXXX XXXX XXXX XXX XXXX XXXXXX XX XXXXX XXXXXX XXXX XXXX XXXX XXXXXX XXXXXXXXXXXXX XX XXX XXXXXXXXXX XXXX XX
XXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXXXX XXXXXX XXXX XX XXXXXXX XXX XXXXXXXXXXX XXXXXXXXXX XXX XXXX XXXXX XX XXXXX XXXXXX XXXXXX XXXX XXXX XXXX XX XXXX XX XXXXXXX XXXXXX XXXXX XXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXX XXX XX XXXXXXX XX XXX XXXXXXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXX XX XXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXX
XXXXXXXXX
XXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXX XXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXXX XXX XXX XXXXXXXXXX XXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXX XXX X XXXX XXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXX XXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XX XXXXXXXX XXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXXX
XXXXXXX | dnaextrim/django_adminlte_x | adminlte/static/plugins/datatables/extensions/ColReorder/examples/predefined.html.py | Python | mit | 16,794 | 0.037752 |
import numpy as np
import pdb
from scipy import linalg as splinalg
# A = np.array([
# [1, 1, -2, 1, 3, -1],
# [2, -1, 1, 2, 1, -3],
# [1, 3, -3, -1, 2, 1],
# [5, 2, -1, -1, 2, 1],
# [-3, -1, 2, 3, 1, 3],
# [4, 3, 1, -6, -3, -2]
# ], dtype=float)
# b = np.array([4, 20, -15, -3, 16, -27], dtype=float)
A = np.array([
[8,4,4],
[2,-4,1],
[2,-1,3]
], dtype = float)
b = np.array([
80, 7, 22
], dtype=float)
# A = np.array([
# [3,-0.1,-0.2],
# [0.1,7,-0.3],
# [0.3,-0.2,10]
# ], dtype = float)
# b = np.array([
# 7.85, -19.3, 71.4
# ], dtype=float)
# Simplest version
def gauss1(A, b):
assert A.shape[0] == len(b), "A and b must have the same length"
dim = A.shape[0]
x = np.zeros(dim)
# Elimination
for i in range(dim - 1):
for j in range(i + 1, dim):
c = A[j, i] / A[i, i]
A[j, :] -= (c * A[i, :])
b[j] -= (c * b[i])
# Substitution
x[-1] = b[-1] / A[-1, -1]
for i in range(dim - 2, -1, -1):
sum = b[i]
for j in range(dim - 1, i - 1, -1):
sum -= x[j] * A[i, j]
x[i] = sum / A[i, i]
return x
def gauss(A, b, tol, err):
assert A.shape[0] == len(b), "A and b must have the same length"
dim = A.shape[0]
x = np.zeros(dim)
pv = np.arange(0, dim, 1)
err = 0
# Eliminate everything but the last row (dim-1)
for i in range(dim - 1):
# Store the current pivot from the pivot list
pvt = pv[i]
# Store the value of the current pivot
pvv = A[pvt, i]
# Search the other row specified in the pivot list
for k in pv:
# Check if the other rows have larger pivot values
val = A[k, i]
# print("val ({0}) > pvv({1})".format(val, pvv))
if val > pvv:
# We found a larger row, store the value and so we can check the others
pvv = val
pvt = k
# Did we find a new pivot that is in a row below us?
if pvt > pv[i]:
# If we did switch the indices in the pivot list
#print("We switched row {0} with pivot {1} for row {2} with pivot {3}".format(pv[i], A[pv[i], i], pvt, A[pvt,i]))
tmp = pv[i]
pv[i] = pvt
pv[pvt] = tmp
# print(pv)
# Check if the current pivot is close to 0
# if it is, break and set the error flag
if np.abs(A[pv[i], i]) < tol:
err = -1
break
# Here we actually perform the actual elimination
for j in range(i + 1, dim):
# print("c = {0}/{1}".format(A[pv[j], i], A[pv[i], i]))
c = A[pv[j], i] / A[pv[i], i]
# print(A[pv[j], i:])
# print((c * A[pv[i], i:]))
A[pv[j], i:] -= (c * A[pv[i], i:])
# print(A[pv[j], :])
b[pv[j]] -= (c * b[pv[i]])
# print(A)
#print(b)
# Quit here is the system is singular
if err == -1:
return x
# Now we begin back substitution by calculating the last x value
x[-1] = b[pv[-1]] / A[pv[-1], -1]
# Now we solve the remaining equations
# dim-2 starts means we begin at second row from the end and go until the 0th row
for i in range(dim - 2, -1, -1):
# Grab the corresponding b value
sum = b[pv[i]]
# Now we sum from the last column (dim -1 ) to the current column (i-1)
for j in range(dim - 1, i - 1, -1):
sum -= x[j] * A[pv[i], j]
x[i] = sum / A[pv[i], i]
return x
def lu_factor(A, tol, err):
"""Returns the matrix A with the LU matrices and a pivot vector containing information on how the matrix was eliminated.
Passing these values to to lu_solve with a b vector will solve the equation"""
dim = A.shape[0]
pv = np.arange(0, dim, 1)
err = 0
# Eliminate everything but the last row (dim-1)
for i in range(dim - 1):
# Store the current pivot from the pivot list
pvt = pv[i]
# Store the value of the current pivot
pvv = A[pvt, i]
# Search the other row specified in the pivot list
for k in pv:
# Check if the other rows have larger pivot values
val = A[k, i]
# print("val ({0}) > pvv({1})".format(val, pvv))
if val > pvv:
# We found a larger row, store the value and so we can check the others
pvv = val
pvt = k
# Did we find a new pivot?
if pvt > pv[i]:
# If we did switch the indices in the pivot list
# print("We switched row {0} with pivot {1} for row {2} with pivot {3}".format(pv[i], A[pv[i], i], pvt, A[pvt,i]))
tmp = pv[i]
pv[i] = pvt
pv[pvt] = tmp
# print(pv)
# Check if the current pivot is close to 0
# if it is, break and set the error flag
if np.abs(A[pv[i], i]) < tol:
err = -1
break
# Here we actually perform the actual elimination
for j in range(i + 1, dim):
# print("c = {0}/{1}".format(A[pv[j], i], A[pv[i], i]))
c = A[pv[j], i] / A[pv[i], i]
# print(A[pv[j], i:])
# print((c * A[pv[i], i:]))
A[pv[j], i:] -= (c * A[pv[i], i:])
# print(A[pv[j], :])
#print("Replacing index {0},{1} with value {2} with {3}".format(pv[j], i, A[pv[j], i], c))
A[pv[j], i] = c
# print(A)
# Quit here if the system is singular
if err == -1:
return None
else:
return (A, pv)
def lu_solve(A, pv, b):
""" Solves the system Ax=b given the output from lu_factor"""
dim = A.shape[0]
x = np.zeros(dim)
for i in range(dim - 1):
for j in range(i + 1, dim):
#All of our c's are stored in A from the output of LU factor
c = A[pv[j], i]
#Calculate the b vector that would result from the typical elimination procedure
b[pv[j]] -= (c * b[pv[i]])
#print(d)
x[-1] = b[pv[-1]] / A[pv[-1], -1]
# Now we solve the remaining equations, this is the same as Gaussian back substitution
# dim-2 starts means we begin at second row from the end and go until the 0th row
for i in range(dim - 2, -1, -1):
# Grab the corresponding b value
sum = b[pv[i]]
# Now we sum from the last column (dim -1 ) to the current column (i-1)
for j in range(dim - 1, i - 1, -1):
sum -= x[j] * A[pv[i], j]
x[i] = sum / A[pv[i], i]
return x
def inv(A, tol, err):
"""We always assume square matrices"""
dim = A.shape[0]
A1 = np.zeros(A.shape)
A, pvt = lu_factor(A, tol, err)
if err == -1:
return None
for i in range(dim):
b = np.zeros(dim)
b[i] = 1
x = lu_solve(A, pvt, b)
A1[:, i] = np.copy(x)
return A1
def gauss_seidel(A, b, x, tol, maxi, lam):
""" x should contain initial guesses (can be 0)"""
dim = A.shape[0]
#Divide everything by each row by its diagnol element
for i in range(dim):
tmp = A[i,i]
for j in range(dim):
A[i,j] /= tmp
b[i] /= tmp
# print(A)
for i in range(dim):
acc = b[i]
for j in range(dim):
if i == j:
# print("Skipping i = {0} and j = {1}".format(i, j))
continue
else:
acc -= A[i, j] * x[j]
# print("Old x = {0}, new x = {1}".format(x[i], acc))
x[i] = acc
for i in range(maxi):
flag = 1
for k in range(dim):
acc = b[k]
oldx = x[k]
for j in range(dim):
if k == j:
continue
else:
# print('k = {0}, j={1}'.format(k, j))
acc -= (A[k,j] * x[j])
# print(acc)
# print("Old x = {0}, new x = {1}".format(oldx, (lam * acc) + ((1-lam) * oldx)))
x[k] = (lam * acc) + ((1-lam) * oldx)
if flag ==1 and x[k] != 0:
ea = abs((x[k] - oldx)/x[k]) * 100
# print("Error is equal to {0}".format(ea))
if ea > tol:
flag = 0
if flag == 1:
print('Breaking with ea = {0} and num iterations: {1}'.format(ea, i))
break
return x
e = 0
x2 = gauss(np.copy(A), np.copy(b), 0.001, e)
aa, pv = lu_factor(np.copy(A), 0.001, e)
x1 = lu_solve(aa, pv, np.copy(b))
print(np.dot(A,x2))
print(np.dot(A,x1))
x3 = gauss_seidel(np.copy(A), np.copy(b), np.zeros(A.shape[0]), 0.0001, 25, 1.03)
print(np.dot(A,x3))
| Seek/LaTechNumeric | linearalg/linalg.py | Python | mit | 8,666 | 0.006577 |
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group as PermissionGroup
from django.forms.models import modelform_factory
from django.http.response import HttpResponseRedirect
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import UpdateView
from shuup.admin.forms.fields import Select2MultipleField
from shuup.admin.toolbar import get_default_edit_toolbar
from shuup.admin.utils.urls import get_model_url
class PermissionChangeFormBase(forms.ModelForm):
old_password = forms.CharField(
label=_("Your Password"),
widget=forms.PasswordInput,
help_text=_("For security purposes, we need your current password.")
)
def __init__(self, changing_user, *args, **kwargs):
super(PermissionChangeFormBase, self).__init__(*args, **kwargs)
self.changing_user = changing_user
if getattr(self.instance, 'is_superuser', False) and not getattr(self.changing_user, 'is_superuser', False):
self.fields.pop("is_superuser")
if not (
self.changing_user == self.instance or
getattr(self.instance, 'is_superuser', False)
):
# Only require old password when editing
self.fields.pop("old_password")
initial_groups = self._get_initial_groups()
permission_groups_field = Select2MultipleField(
model=PermissionGroup,
initial=[group.pk for group in initial_groups],
required=False,
label=_("Permission Groups"),
help_text=_(
"The permission groups that this user belongs to. "
"Permission groups are configured through Contacts - Permission Groups."
)
)
permission_groups_field.widget.choices = [(group.pk, force_text(group)) for group in initial_groups]
self.fields["permission_groups"] = permission_groups_field
def _get_initial_groups(self):
if self.instance.pk and hasattr(self.instance, "groups"):
return self.instance.groups.all()
else:
return []
def clean_old_password(self):
"""
Validates that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.changing_user.check_password(old_password):
raise forms.ValidationError(
_("Your old password was entered incorrectly. Please enter it again."),
code='password_incorrect',
)
return old_password
def clean_members(self):
members = self.cleaned_data.get("members", [])
return get_user_model().objects.filter(pk__in=members).all()
def clean_permission_groups(self):
permission_groups = self.cleaned_data.get("permission_groups", [])
return PermissionGroup.objects.filter(pk__in=permission_groups)
def clean(self):
for field in ("is_staff", "is_superuser"):
flag = self.cleaned_data[field]
if self.changing_user == self.instance and not flag:
self.add_error(field, _("You can't unset this status for yourself."))
return self.cleaned_data
def save(self):
obj = super(PermissionChangeFormBase, self).save()
obj.groups.clear()
obj.groups = self.cleaned_data["permission_groups"]
class UserChangePermissionsView(UpdateView):
template_name = "shuup/admin/users/change_permissions.jinja"
model = settings.AUTH_USER_MODEL
title = _("Change User Permissions")
def get_form_class(self):
return modelform_factory(
model=get_user_model(),
form=PermissionChangeFormBase,
fields=("is_staff", "is_superuser")
)
def get_queryset(self):
return get_user_model().objects.all()
def get_toolbar(self):
toolbar = get_default_edit_toolbar(
self,
"permissions_form",
discard_url=get_model_url(self.object),
with_split_save=False
)
return toolbar
def get_form_kwargs(self):
kwargs = super(UserChangePermissionsView, self).get_form_kwargs()
kwargs["changing_user"] = self.request.user
return kwargs
def get_context_data(self, **kwargs):
context = super(UserChangePermissionsView, self).get_context_data(**kwargs)
context["toolbar"] = self.get_toolbar()
context["title"] = _("Change Permissions: %s") % self.object
return context
def form_valid(self, form):
form.save()
messages.success(self.request, _("Permissions changed for %s.") % self.object)
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return get_model_url(self.object)
| suutari-ai/shoop | shuup/admin/modules/users/views/permissions.py | Python | agpl-3.0 | 5,257 | 0.001332 |
import socket
import random
import time
# A blocking server that simply sends hello to anyone who conncets to it
def blocking_server(bind='0.0.0.0', port=8080, queued_connections=5):
"""
This sets up a blocking socket. We will be listening for incomming
conncetions
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((bind, port))
sock.listen(queued_connections) # max num of queued connections
return sock
def handle_connections(server):
"""
To accept connections and send data on a server socket that is alerady set
up. Accepting a connection creates a seperate socker with the server we
accepted to connect to.
"""
# Accept a connection. The socket must be bound to an address and listening
# for connections. The return value is a pair (conn, address) where conn is
# a new socket object usable to send and receive data on the connection,
# and address is the address bound to the socket on the other end of the
# connection.
sock, addr = server.accept() # this is a blocking call
time.sleep(random.randint(1,5))
sock.sendall("Hello") # this is a blocking call
sock.close()
server = blocking_server()
while True:
handle_connections(server)
| mr-uuid/snippets | python/sockets/servers/blocking.py | Python | mit | 1,330 | 0.001504 |
#!/usr/bin/env python3
'''Test for DDNS forwarding'''
from dnstest.test import Test
t = Test()
master = t.server("knot")
slave = t.server("knot")
zone = t.zone("example.com.")
t.link(zone, master, slave, ddns=True)
t.start()
master.zones_wait(zone)
seri = slave.zones_wait(zone)
# OK
update = slave.update(zone)
update.add("forwarded.example.com.", 1, "TXT", "forwarded")
update.send("NOERROR")
resp = master.dig("forwarded.example.com.", "TXT")
resp.check("forwarded")
slave.zones_wait(zone, seri)
t.xfr_diff(master, slave, zone)
# NAME out of zone
update = slave.update(zone)
update.add("forwarded.", 1, "TXT", "forwarded")
update.send("NOTZONE")
resp = master.dig("forwarded.", "TXT")
resp.check(rcode="REFUSED")
t.sleep(3)
t.xfr_diff(master, slave, zone)
t.end()
| CZ-NIC/knot | tests-extra/tests/ddns/forward/test.py | Python | gpl-3.0 | 777 | 0 |
# Main network and testnet3 definitions
params = {
'bitcoin_main': {
'pubkey_address': 50,
'script_address': 9,
'genesis_hash': '00000c7c73d8ce604178dae13f0fc6ec0be3275614366d44b1b4b5c6e238c60c'
},
'bitcoin_test': {
'pubkey_address': 88,
'script_address': 188,
'genesis_hash': '000003ae7f631de18a457fa4fa078e6fa8aff38e258458f8189810de5d62cede'
}
}
| mazaclub/tate-server | src/networks.py | Python | agpl-3.0 | 413 | 0.004843 |
'''
'''
__version__ = '0.1-dev'
device_config_name = 'Devices'
exp_config_name = 'experiment'
| matham/sniffer | sniffer/__init__.py | Python | mit | 97 | 0 |
# Valid-IP-checker-
#This program check whether a given IP is valid or not IPV4
def ip_checkv4(ip):
parts=ip.split(".")
if len(parts)<4 or len(parts)>4:
return "invalid IP length should be 4 not greater or less than 4"
else:
while len(parts)== 4:
a=int(parts[0])
b=int(parts[1])
c=int(parts[2])
d=int(parts[3])
if a<= 0 or a == 127 :
return "invalid IP address"
elif d == 0:
return "host id should not be 0 or less than zero "
elif a>=255:
return "should not be 255 or greater than 255 or less than 0 A"
elif b>=255 or b<0:
return "should not be 255 or greater than 255 or less than 0 B"
elif c>=255 or c<0:
return "should not be 255 or greater than 255 or less than 0 C"
elif d>=255 or c<0:
return "should not be 255 or greater than 255 or less than 0 D"
else:
return "Valid IP address ", ip
p=raw_input("Enter IP address")
print ip_checkv4(p)
| 0101a/Valid-IP-checker- | Ipv4.py | Python | gpl-2.0 | 918 | 0.051198 |
"""
Helper functions for creating Form classes from Django models
and database field objects.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from itertools import chain
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldError, ImproperlyConfigured, ValidationError,
)
from django.forms.fields import ChoiceField, Field
from django.forms.forms import BaseForm, DeclarativeFieldsMetaclass
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.utils import ErrorList
from django.forms.widgets import (
HiddenInput, MultipleHiddenInput, SelectMultiple,
)
from django.utils import six
from django.utils.encoding import force_text, smart_text
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext, ugettext_lazy as _
__all__ = (
'ModelForm', 'BaseModelForm', 'model_to_dict', 'fields_for_model',
'save_instance', 'ModelChoiceField', 'ModelMultipleChoiceField',
'ALL_FIELDS', 'BaseModelFormSet', 'modelformset_factory',
'BaseInlineFormSet', 'inlineformset_factory', 'modelform_factory',
)
ALL_FIELDS = '__all__'
def construct_instance(form, instance, fields=None, exclude=None):
"""
Constructs and returns a model instance from the bound ``form``'s
``cleaned_data``, but does not save the returned instance to the
database.
"""
from django.db import models
opts = instance._meta
cleaned_data = form.cleaned_data
file_field_list = []
for f in opts.fields:
if not f.editable or isinstance(f, models.AutoField) \
or f.name not in cleaned_data:
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
# Defer saving file-type fields until after the other fields, so a
# callable upload_to can use the values from other fields.
if isinstance(f, models.FileField):
file_field_list.append(f)
else:
f.save_form_data(instance, cleaned_data[f.name])
for f in file_field_list:
f.save_form_data(instance, cleaned_data[f.name])
return instance
def save_instance(form, instance, fields=None, fail_message='saved',
commit=True, exclude=None, construct=True):
"""
Saves bound Form ``form``'s cleaned_data into model instance ``instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
If construct=False, assume ``instance`` has already been constructed and
just needs to be saved.
"""
if construct:
instance = construct_instance(form, instance, fields, exclude)
opts = instance._meta
if form.errors:
raise ValueError("The %s could not be %s because the data didn't"
" validate." % (opts.object_name, fail_message))
# Wrap up the saving of m2m data as a function.
def save_m2m():
cleaned_data = form.cleaned_data
# Note that for historical reasons we want to include also
# virtual_fields here. (GenericRelation was previously a fake
# m2m field).
for f in chain(opts.many_to_many, opts.virtual_fields):
if not hasattr(f, 'save_form_data'):
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if f.name in cleaned_data:
f.save_form_data(instance, cleaned_data[f.name])
if commit:
# If we are committing, save the instance and the m2m data immediately.
instance.save()
save_m2m()
else:
# We're not committing. Add a method to the form to allow deferred
# saving of m2m data.
form.save_m2m = save_m2m
return instance
# ModelForms #################################################################
def model_to_dict(instance, fields=None, exclude=None):
"""
Returns a dict containing the data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
# avoid a circular import
from django.db.models.fields.related import ManyToManyField
opts = instance._meta
data = {}
for f in chain(opts.concrete_fields, opts.virtual_fields, opts.many_to_many):
if not getattr(f, 'editable', False):
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if isinstance(f, ManyToManyField):
# If the object doesn't have a primary key yet, just use an empty
# list for its m2m fields. Calling f.value_from_object will raise
# an exception.
if instance.pk is None:
data[f.name] = []
else:
# MultipleChoiceWidget needs a list of pks, not object instances.
qs = f.value_from_object(instance)
if qs._result_cache is not None:
data[f.name] = [item.pk for item in qs]
else:
data[f.name] = list(qs.values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
def fields_for_model(model, fields=None, exclude=None, widgets=None,
formfield_callback=None, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
field_classes=None):
"""
Returns a ``OrderedDict`` containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
``widgets`` is a dictionary of model field names mapped to a widget.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
``localized_fields`` is a list of names of fields which should be localized.
``labels`` is a dictionary of model field names mapped to a label.
``help_texts`` is a dictionary of model field names mapped to a help text.
``error_messages`` is a dictionary of model field names mapped to a
dictionary of error messages.
``field_classes`` is a dictionary of model field names mapped to a form
field class.
"""
field_list = []
ignored = []
opts = model._meta
# Avoid circular import
from django.db.models.fields import Field as ModelField
sortable_virtual_fields = [f for f in opts.virtual_fields
if isinstance(f, ModelField)]
for f in sorted(chain(opts.concrete_fields, sortable_virtual_fields, opts.many_to_many)):
if not getattr(f, 'editable', False):
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
kwargs = {}
if widgets and f.name in widgets:
kwargs['widget'] = widgets[f.name]
if localized_fields == ALL_FIELDS or (localized_fields and f.name in localized_fields):
kwargs['localize'] = True
if labels and f.name in labels:
kwargs['label'] = labels[f.name]
if help_texts and f.name in help_texts:
kwargs['help_text'] = help_texts[f.name]
if error_messages and f.name in error_messages:
kwargs['error_messages'] = error_messages[f.name]
if field_classes and f.name in field_classes:
kwargs['form_class'] = field_classes[f.name]
if formfield_callback is None:
formfield = f.formfield(**kwargs)
elif not callable(formfield_callback):
raise TypeError('formfield_callback must be a function or callable')
else:
formfield = formfield_callback(f, **kwargs)
if formfield:
field_list.append((f.name, formfield))
else:
ignored.append(f.name)
field_dict = OrderedDict(field_list)
if fields:
field_dict = OrderedDict(
[(f, field_dict.get(f)) for f in fields
if ((not exclude) or (exclude and f not in exclude)) and (f not in ignored)]
)
return field_dict
class ModelFormOptions(object):
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
self.widgets = getattr(options, 'widgets', None)
self.localized_fields = getattr(options, 'localized_fields', None)
self.labels = getattr(options, 'labels', None)
self.help_texts = getattr(options, 'help_texts', None)
self.error_messages = getattr(options, 'error_messages', None)
self.field_classes = getattr(options, 'field_classes', None)
class ModelFormMetaclass(DeclarativeFieldsMetaclass):
def __new__(mcs, name, bases, attrs):
formfield_callback = attrs.pop('formfield_callback', None)
new_class = super(ModelFormMetaclass, mcs).__new__(mcs, name, bases, attrs)
if bases == (BaseModelForm,):
return new_class
opts = new_class._meta = ModelFormOptions(getattr(new_class, 'Meta', None))
# We check if a string was passed to `fields` or `exclude`,
# which is likely to be a mistake where the user typed ('foo') instead
# of ('foo',)
for opt in ['fields', 'exclude', 'localized_fields']:
value = getattr(opts, opt)
if isinstance(value, six.string_types) and value != ALL_FIELDS:
msg = ("%(model)s.Meta.%(opt)s cannot be a string. "
"Did you mean to type: ('%(value)s',)?" % {
'model': new_class.__name__,
'opt': opt,
'value': value,
})
raise TypeError(msg)
if opts.model:
# If a model is defined, extract form fields from it.
if opts.fields is None and opts.exclude is None:
raise ImproperlyConfigured(
"Creating a ModelForm without either the 'fields' attribute "
"or the 'exclude' attribute is prohibited; form %s "
"needs updating." % name
)
if opts.fields == ALL_FIELDS:
# Sentinel for fields_for_model to indicate "get the list of
# fields from the model"
opts.fields = None
fields = fields_for_model(opts.model, opts.fields, opts.exclude,
opts.widgets, formfield_callback,
opts.localized_fields, opts.labels,
opts.help_texts, opts.error_messages,
opts.field_classes)
# make sure opts.fields doesn't specify an invalid field
none_model_fields = [k for k, v in six.iteritems(fields) if not v]
missing_fields = (set(none_model_fields) -
set(new_class.declared_fields.keys()))
if missing_fields:
message = 'Unknown field(s) (%s) specified for %s'
message = message % (', '.join(missing_fields),
opts.model.__name__)
raise FieldError(message)
# Override default model fields with any custom declared ones
# (plus, include all the other declared fields).
fields.update(new_class.declared_fields)
else:
fields = new_class.declared_fields
new_class.base_fields = fields
return new_class
class BaseModelForm(BaseForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False, instance=None):
opts = self._meta
if opts.model is None:
raise ValueError('ModelForm has no model class specified.')
if instance is None:
# if we didn't get an instance, instantiate a new one
self.instance = opts.model()
object_data = {}
else:
self.instance = instance
object_data = model_to_dict(instance, opts.fields, opts.exclude)
# if initial was provided, it should override the values from instance
if initial is not None:
object_data.update(initial)
# self._validate_unique will be set to True by BaseModelForm.clean().
# It is False by default so overriding self.clean() and failing to call
# super will stop validate_unique from being called.
self._validate_unique = False
super(BaseModelForm, self).__init__(data, files, auto_id, prefix, object_data,
error_class, label_suffix, empty_permitted)
# Apply ``limit_choices_to`` to each field.
for field_name in self.fields:
formfield = self.fields[field_name]
if hasattr(formfield, 'queryset') and hasattr(formfield, 'get_limit_choices_to'):
limit_choices_to = formfield.get_limit_choices_to()
if limit_choices_to is not None:
formfield.queryset = formfield.queryset.complex_filter(limit_choices_to)
def _get_validation_exclusions(self):
"""
For backwards-compatibility, several types of fields need to be
excluded from model validation. See the following tickets for
details: #12507, #12521, #12553
"""
exclude = []
# Build up a list of fields that should be excluded from model field
# validation and unique checks.
for f in self.instance._meta.fields:
field = f.name
# Exclude fields that aren't on the form. The developer may be
# adding these values to the model after form validation.
if field not in self.fields:
exclude.append(f.name)
# Don't perform model validation on fields that were defined
# manually on the form and excluded via the ModelForm's Meta
# class. See #12901.
elif self._meta.fields and field not in self._meta.fields:
exclude.append(f.name)
elif self._meta.exclude and field in self._meta.exclude:
exclude.append(f.name)
# Exclude fields that failed form validation. There's no need for
# the model fields to validate them as well.
elif field in self._errors.keys():
exclude.append(f.name)
# Exclude empty fields that are not required by the form, if the
# underlying model field is required. This keeps the model field
# from raising a required error. Note: don't exclude the field from
# validation if the model field allows blanks. If it does, the blank
# value may be included in a unique check, so cannot be excluded
# from validation.
else:
form_field = self.fields[field]
field_value = self.cleaned_data.get(field)
if not f.blank and not form_field.required and field_value in form_field.empty_values:
exclude.append(f.name)
return exclude
def clean(self):
self._validate_unique = True
return self.cleaned_data
def _update_errors(self, errors):
# Override any validation error messages defined at the model level
# with those defined at the form level.
opts = self._meta
for field, messages in errors.error_dict.items():
if (field == NON_FIELD_ERRORS and opts.error_messages and
NON_FIELD_ERRORS in opts.error_messages):
error_messages = opts.error_messages[NON_FIELD_ERRORS]
elif field in self.fields:
error_messages = self.fields[field].error_messages
else:
continue
for message in messages:
if (isinstance(message, ValidationError) and
message.code in error_messages):
message.message = error_messages[message.code]
self.add_error(None, errors)
def _post_clean(self):
opts = self._meta
exclude = self._get_validation_exclusions()
# Foreign Keys being used to represent inline relationships
# are excluded from basic field value validation. This is for two
# reasons: firstly, the value may not be supplied (#12507; the
# case of providing new values to the admin); secondly the
# object being referred to may not yet fully exist (#12749).
# However, these fields *must* be included in uniqueness checks,
# so this can't be part of _get_validation_exclusions().
for name, field in self.fields.items():
if isinstance(field, InlineForeignKeyField):
exclude.append(name)
# Update the model instance with self.cleaned_data.
self.instance = construct_instance(self, self.instance, opts.fields, exclude)
try:
self.instance.full_clean(exclude=exclude, validate_unique=False)
except ValidationError as e:
self._update_errors(e)
# Validate uniqueness if needed.
if self._validate_unique:
self.validate_unique()
def validate_unique(self):
"""
Calls the instance's validate_unique() method and updates the form's
validation errors if any were raised.
"""
exclude = self._get_validation_exclusions()
try:
self.instance.validate_unique(exclude=exclude)
except ValidationError as e:
self._update_errors(e)
def save(self, commit=True):
"""
Saves this ``form``'s cleaned_data into model instance
``self.instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
"""
if self.instance.pk is None:
fail_message = 'created'
else:
fail_message = 'changed'
return save_instance(self, self.instance, self._meta.fields,
fail_message, commit, self._meta.exclude,
construct=False)
save.alters_data = True
class ModelForm(six.with_metaclass(ModelFormMetaclass, BaseModelForm)):
pass
def modelform_factory(model, form=ModelForm, fields=None, exclude=None,
formfield_callback=None, widgets=None, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
field_classes=None):
"""
Returns a ModelForm containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields. If omitted or '__all__',
all fields will be used.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
``widgets`` is a dictionary of model field names mapped to a widget.
``localized_fields`` is a list of names of fields which should be localized.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
``labels`` is a dictionary of model field names mapped to a label.
``help_texts`` is a dictionary of model field names mapped to a help text.
``error_messages`` is a dictionary of model field names mapped to a
dictionary of error messages.
``field_classes`` is a dictionary of model field names mapped to a form
field class.
"""
# Create the inner Meta class. FIXME: ideally, we should be able to
# construct a ModelForm without creating and passing in a temporary
# inner class.
# Build up a list of attributes that the Meta object will have.
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
if widgets is not None:
attrs['widgets'] = widgets
if localized_fields is not None:
attrs['localized_fields'] = localized_fields
if labels is not None:
attrs['labels'] = labels
if help_texts is not None:
attrs['help_texts'] = help_texts
if error_messages is not None:
attrs['error_messages'] = error_messages
if field_classes is not None:
attrs['field_classes'] = field_classes
# If parent form class already has an inner Meta, the Meta we're
# creating needs to inherit from the parent's inner meta.
parent = (object,)
if hasattr(form, 'Meta'):
parent = (form.Meta, object)
Meta = type(str('Meta'), parent, attrs)
# Give this new form class a reasonable name.
class_name = model.__name__ + str('Form')
# Class attributes for the new form class.
form_class_attrs = {
'Meta': Meta,
'formfield_callback': formfield_callback
}
if (getattr(Meta, 'fields', None) is None and
getattr(Meta, 'exclude', None) is None):
raise ImproperlyConfigured(
"Calling modelform_factory without defining 'fields' or "
"'exclude' explicitly is prohibited."
)
# Instantiate type(form) in order to use the same metaclass as form.
return type(form)(class_name, (form,), form_class_attrs)
# ModelFormSets ##############################################################
class BaseModelFormSet(BaseFormSet):
"""
A ``FormSet`` for editing a queryset and/or adding new objects to it.
"""
model = None
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
queryset=None, **kwargs):
self.queryset = queryset
self.initial_extra = kwargs.pop('initial', None)
defaults = {'data': data, 'files': files, 'auto_id': auto_id, 'prefix': prefix}
defaults.update(kwargs)
super(BaseModelFormSet, self).__init__(**defaults)
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if not (self.data or self.files):
return len(self.get_queryset())
return super(BaseModelFormSet, self).initial_form_count()
def _existing_object(self, pk):
if not hasattr(self, '_object_dict'):
self._object_dict = {o.pk: o for o in self.get_queryset()}
return self._object_dict.get(pk)
def _get_to_python(self, field):
"""
If the field is a related field, fetch the concrete field's (that
is, the ultimate pointed-to field's) to_python.
"""
while field.remote_field is not None:
field = field.remote_field.get_related_field()
return field.to_python
def _construct_form(self, i, **kwargs):
if self.is_bound and i < self.initial_form_count():
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
pk_field = self.model._meta.pk
to_python = self._get_to_python(pk_field)
pk = to_python(pk)
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and 'instance' not in kwargs:
kwargs['instance'] = self.get_queryset()[i]
if i >= self.initial_form_count() and self.initial_extra:
# Set initial values for extra forms
try:
kwargs['initial'] = self.initial_extra[i - self.initial_form_count()]
except IndexError:
pass
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def get_queryset(self):
if not hasattr(self, '_queryset'):
if self.queryset is not None:
qs = self.queryset
else:
qs = self.model._default_manager.get_queryset()
# If the queryset isn't already ordered we need to add an
# artificial ordering here to make sure that all formsets
# constructed from this queryset have the same form order.
if not qs.ordered:
qs = qs.order_by(self.model._meta.pk.name)
# Removed queryset limiting here. As per discussion re: #13023
# on django-dev, max_num should not prevent existing
# related objects/inlines from being displayed.
self._queryset = qs
return self._queryset
def save_new(self, form, commit=True):
"""Saves and returns a new model instance for the given form."""
return form.save(commit=commit)
def save_existing(self, form, instance, commit=True):
"""Saves and returns an existing model instance for the given form."""
return form.save(commit=commit)
def save(self, commit=True):
"""Saves model instances for every form, adding and changing instances
as necessary, and returns the list of instances.
"""
if not commit:
self.saved_forms = []
def save_m2m():
for form in self.saved_forms:
form.save_m2m()
self.save_m2m = save_m2m
return self.save_existing_objects(commit) + self.save_new_objects(commit)
save.alters_data = True
def clean(self):
self.validate_unique()
def validate_unique(self):
# Collect unique_checks and date_checks to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
exclude = form._get_validation_exclusions()
unique_checks, date_checks = form.instance._get_unique_checks(exclude=exclude)
all_unique_checks = all_unique_checks.union(set(unique_checks))
all_date_checks = all_date_checks.union(set(date_checks))
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# get data for each field of each of unique_check
row_data = (form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
# iterate over each of the date checks now
for date_check in all_date_checks:
seen_data = set()
uclass, lookup, field, unique_for = date_check
for form in valid_forms:
# see if we have data for both fields
if (form.cleaned_data and form.cleaned_data[field] is not None
and form.cleaned_data[unique_for] is not None):
# if it's a date lookup we need to get the data for all the fields
if lookup == 'date':
date = form.cleaned_data[unique_for]
date_data = (date.year, date.month, date.day)
# otherwise it's just the attribute on the date/datetime
# object
else:
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
data = (form.cleaned_data[field],) + date_data
# if we've already seen it then we have a uniqueness failure
if data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_date_error_message(date_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(data)
if errors:
raise ValidationError(errors)
def get_unique_error_message(self, unique_check):
if len(unique_check) == 1:
return ugettext("Please correct the duplicate data for %(field)s.") % {
"field": unique_check[0],
}
else:
return ugettext("Please correct the duplicate data for %(field)s, "
"which must be unique.") % {
"field": get_text_list(unique_check, six.text_type(_("and"))),
}
def get_date_error_message(self, date_check):
return ugettext("Please correct the duplicate data for %(field_name)s "
"which must be unique for the %(lookup)s in %(date_field)s.") % {
'field_name': date_check[2],
'date_field': date_check[3],
'lookup': six.text_type(date_check[1]),
}
def get_form_error(self):
return ugettext("Please correct the duplicate values below.")
def save_existing_objects(self, commit=True):
self.changed_objects = []
self.deleted_objects = []
if not self.initial_forms:
return []
saved_instances = []
forms_to_delete = self.deleted_forms
for form in self.initial_forms:
obj = form.instance
if form in forms_to_delete:
# If the pk is None, it means that the object can't be
# deleted again. Possible reason for this is that the
# object was already deleted from the DB. Refs #14877.
if obj.pk is None:
continue
self.deleted_objects.append(obj)
if commit:
obj.delete()
elif form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def save_new_objects(self, commit=True):
self.new_objects = []
for form in self.extra_forms:
if not form.has_changed():
continue
# If someone has marked an add form for deletion, don't save the
# object.
if self.can_delete and self._should_delete_form(form):
continue
self.new_objects.append(self.save_new(form, commit=commit))
if not commit:
self.saved_forms.append(form)
return self.new_objects
def add_fields(self, form, index):
"""Add a hidden field for the object's primary key."""
from django.db.models import AutoField, OneToOneField, ForeignKey
self._pk_field = pk = self.model._meta.pk
# If a pk isn't editable, then it won't be on the form, so we need to
# add it here so we can tell which object is which when we get the
# data back. Generally, pk.editable should be false, but for some
# reason, auto_created pk fields and AutoField's editable attribute is
# True, so check for that as well.
def pk_is_not_editable(pk):
return ((not pk.editable) or (pk.auto_created or isinstance(pk, AutoField))
or (pk.remote_field and pk.remote_field.parent_link and pk_is_not_editable(pk.remote_field.model._meta.pk)))
if pk_is_not_editable(pk) or pk.name not in form.fields:
if form.is_bound:
# If we're adding the related instance, ignore its primary key
# as it could be an auto-generated default which isn't actually
# in the database.
pk_value = None if form.instance._state.adding else form.instance.pk
else:
try:
if index is not None:
pk_value = self.get_queryset()[index].pk
else:
pk_value = None
except IndexError:
pk_value = None
if isinstance(pk, OneToOneField) or isinstance(pk, ForeignKey):
qs = pk.remote_field.model._default_manager.get_queryset()
else:
qs = self.model._default_manager.get_queryset()
qs = qs.using(form.instance._state.db)
if form._meta.widgets:
widget = form._meta.widgets.get(self._pk_field.name, HiddenInput)
else:
widget = HiddenInput
form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=widget)
super(BaseModelFormSet, self).add_fields(form, index)
def modelformset_factory(model, form=ModelForm, formfield_callback=None,
formset=BaseModelFormSet, extra=1, can_delete=False,
can_order=False, max_num=None, fields=None, exclude=None,
widgets=None, validate_max=False, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
min_num=None, validate_min=False, field_classes=None):
"""
Returns a FormSet class for the given Django model class.
"""
meta = getattr(form, 'Meta', None)
if meta is None:
meta = type(str('Meta'), (object,), {})
if (getattr(meta, 'fields', fields) is None and
getattr(meta, 'exclude', exclude) is None):
raise ImproperlyConfigured(
"Calling modelformset_factory without defining 'fields' or "
"'exclude' explicitly is prohibited."
)
form = modelform_factory(model, form=form, fields=fields, exclude=exclude,
formfield_callback=formfield_callback,
widgets=widgets, localized_fields=localized_fields,
labels=labels, help_texts=help_texts,
error_messages=error_messages, field_classes=field_classes)
FormSet = formset_factory(form, formset, extra=extra, min_num=min_num, max_num=max_num,
can_order=can_order, can_delete=can_delete,
validate_min=validate_min, validate_max=validate_max)
FormSet.model = model
return FormSet
# InlineFormSets #############################################################
class BaseInlineFormSet(BaseModelFormSet):
"""A formset for child objects related to a parent."""
def __init__(self, data=None, files=None, instance=None,
save_as_new=False, prefix=None, queryset=None, **kwargs):
if instance is None:
self.instance = self.fk.remote_field.model()
else:
self.instance = instance
self.save_as_new = save_as_new
if queryset is None:
queryset = self.model._default_manager
if self.instance.pk is not None:
qs = queryset.filter(**{self.fk.name: self.instance})
else:
qs = queryset.none()
super(BaseInlineFormSet, self).__init__(data, files, prefix=prefix,
queryset=qs, **kwargs)
def initial_form_count(self):
if self.save_as_new:
return 0
return super(BaseInlineFormSet, self).initial_form_count()
def _construct_form(self, i, **kwargs):
form = super(BaseInlineFormSet, self)._construct_form(i, **kwargs)
if self.save_as_new:
# Remove the primary key from the form's data, we are only
# creating new instances
form.data[form.add_prefix(self._pk_field.name)] = None
# Remove the foreign key from the form's data
form.data[form.add_prefix(self.fk.name)] = None
# Set the fk value here so that the form can do its validation.
fk_value = self.instance.pk
if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name:
fk_value = getattr(self.instance, self.fk.remote_field.field_name)
fk_value = getattr(fk_value, 'pk', fk_value)
setattr(form.instance, self.fk.get_attname(), fk_value)
return form
@classmethod
def get_default_prefix(cls):
return cls.fk.remote_field.get_accessor_name(model=cls.model).replace('+', '')
def save_new(self, form, commit=True):
# Ensure the latest copy of the related instance is present on each
# form (it may have been saved after the formset was originally
# instantiated).
setattr(form.instance, self.fk.name, self.instance)
# Use commit=False so we can assign the parent key afterwards, then
# save the object.
obj = form.save(commit=False)
pk_value = getattr(self.instance, self.fk.remote_field.field_name)
setattr(obj, self.fk.get_attname(), getattr(pk_value, 'pk', pk_value))
if commit:
obj.save()
# form.save_m2m() can be called via the formset later on if commit=False
if commit and hasattr(form, 'save_m2m'):
form.save_m2m()
return obj
def add_fields(self, form, index):
super(BaseInlineFormSet, self).add_fields(form, index)
if self._pk_field == self.fk:
name = self._pk_field.name
kwargs = {'pk_field': True}
else:
# The foreign key field might not be on the form, so we poke at the
# Model field to get the label, since we need that for error messages.
name = self.fk.name
kwargs = {
'label': getattr(form.fields.get(name), 'label', capfirst(self.fk.verbose_name))
}
if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name:
kwargs['to_field'] = self.fk.remote_field.field_name
# If we're adding a new object, ignore a parent's auto-generated key
# as it will be regenerated on the save request.
if self.instance._state.adding:
if kwargs.get('to_field') is not None:
to_field = self.instance._meta.get_field(kwargs['to_field'])
else:
to_field = self.instance._meta.pk
if to_field.has_default():
setattr(self.instance, to_field.attname, None)
form.fields[name] = InlineForeignKeyField(self.instance, **kwargs)
# Add the generated field to form._meta.fields if it's defined to make
# sure validation isn't skipped on that field.
if form._meta.fields:
if isinstance(form._meta.fields, tuple):
form._meta.fields = list(form._meta.fields)
form._meta.fields.append(self.fk.name)
def get_unique_error_message(self, unique_check):
unique_check = [field for field in unique_check if field != self.fk.name]
return super(BaseInlineFormSet, self).get_unique_error_message(unique_check)
def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
"""
Finds and returns the ForeignKey from model to parent if there is one
(returns None if can_fail is True and no such field exists). If fk_name is
provided, assume it is the name of the ForeignKey field. Unless can_fail is
True, an exception is raised if there is no ForeignKey from model to
parent_model.
"""
# avoid circular import
from django.db.models import ForeignKey
opts = model._meta
if fk_name:
fks_to_parent = [f for f in opts.fields if f.name == fk_name]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
if not isinstance(fk, ForeignKey) or \
(fk.remote_field.model != parent_model and
fk.remote_field.model not in parent_model._meta.get_parent_list()):
raise ValueError(
"fk_name '%s' is not a ForeignKey to '%s'." % (fk_name, parent_model._meta.label)
)
elif len(fks_to_parent) == 0:
raise ValueError(
"'%s' has no field named '%s'." % (model._meta.label, fk_name)
)
else:
# Try to discover what the ForeignKey from model to parent_model is
fks_to_parent = [
f for f in opts.fields
if isinstance(f, ForeignKey)
and (f.remote_field.model == parent_model
or f.remote_field.model in parent_model._meta.get_parent_list())
]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
elif len(fks_to_parent) == 0:
if can_fail:
return
raise ValueError(
"'%s' has no ForeignKey to '%s'." % (
model._meta.label,
parent_model._meta.label,
)
)
else:
raise ValueError(
"'%s' has more than one ForeignKey to '%s'." % (
model._meta.label,
parent_model._meta.label,
)
)
return fk
def inlineformset_factory(parent_model, model, form=ModelForm,
formset=BaseInlineFormSet, fk_name=None,
fields=None, exclude=None, extra=3, can_order=False,
can_delete=True, max_num=None, formfield_callback=None,
widgets=None, validate_max=False, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
min_num=None, validate_min=False, field_classes=None):
"""
Returns an ``InlineFormSet`` for the given kwargs.
You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey``
to ``parent_model``.
"""
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
'can_order': can_order,
'fields': fields,
'exclude': exclude,
'min_num': min_num,
'max_num': max_num,
'widgets': widgets,
'validate_min': validate_min,
'validate_max': validate_max,
'localized_fields': localized_fields,
'labels': labels,
'help_texts': help_texts,
'error_messages': error_messages,
'field_classes': field_classes,
}
FormSet = modelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
# Fields #####################################################################
class InlineForeignKeyField(Field):
"""
A basic integer field that deals with validating the given value to a
given parent instance in an inline.
"""
widget = HiddenInput
default_error_messages = {
'invalid_choice': _('The inline foreign key did not match the parent instance primary key.'),
}
def __init__(self, parent_instance, *args, **kwargs):
self.parent_instance = parent_instance
self.pk_field = kwargs.pop("pk_field", False)
self.to_field = kwargs.pop("to_field", None)
if self.parent_instance is not None:
if self.to_field:
kwargs["initial"] = getattr(self.parent_instance, self.to_field)
else:
kwargs["initial"] = self.parent_instance.pk
kwargs["required"] = False
super(InlineForeignKeyField, self).__init__(*args, **kwargs)
def clean(self, value):
if value in self.empty_values:
if self.pk_field:
return None
# if there is no value act as we did before.
return self.parent_instance
# ensure the we compare the values as equal types.
if self.to_field:
orig = getattr(self.parent_instance, self.to_field)
else:
orig = self.parent_instance.pk
if force_text(value) != force_text(orig):
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
return self.parent_instance
def has_changed(self, initial, data):
return False
class ModelChoiceIterator(object):
def __init__(self, field):
self.field = field
self.queryset = field.queryset
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
for obj in self.queryset.iterator():
yield self.choice(obj)
def __len__(self):
return (len(self.queryset) +
(1 if self.field.empty_label is not None else 0))
def choice(self, obj):
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
class ModelChoiceField(ChoiceField):
"""A ChoiceField whose choices are a model QuerySet."""
# This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation.
default_error_messages = {
'invalid_choice': _('Select a valid choice. That choice is not one of'
' the available choices.'),
}
def __init__(self, queryset, empty_label="---------",
required=True, widget=None, label=None, initial=None,
help_text='', to_field_name=None, limit_choices_to=None,
*args, **kwargs):
if required and (initial is not None):
self.empty_label = None
else:
self.empty_label = empty_label
# Call Field instead of ChoiceField __init__() because we don't need
# ChoiceField.__init__().
Field.__init__(self, required, widget, label, initial, help_text,
*args, **kwargs)
self.queryset = queryset
self.limit_choices_to = limit_choices_to # limit the queryset later.
self.to_field_name = to_field_name
def get_limit_choices_to(self):
"""
Returns ``limit_choices_to`` for this form field.
If it is a callable, it will be invoked and the result will be
returned.
"""
if callable(self.limit_choices_to):
return self.limit_choices_to()
return self.limit_choices_to
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
# Need to force a new ModelChoiceIterator to be created, bug #11183
result.queryset = result.queryset
return result
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
# this method will be used to create object labels by the QuerySetIterator.
# Override it to customize the label.
def label_from_instance(self, obj):
"""
This method is used to convert objects into strings; it's used to
generate the labels for the choices presented by this object. Subclasses
can override this method to customize the display of the choices.
"""
return smart_text(obj)
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return ModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
def prepare_value(self, value):
if hasattr(value, '_meta'):
if self.to_field_name:
return value.serializable_value(self.to_field_name)
else:
return value.pk
return super(ModelChoiceField, self).prepare_value(value)
def to_python(self, value):
if value in self.empty_values:
return None
try:
key = self.to_field_name or 'pk'
value = self.queryset.get(**{key: value})
except (ValueError, TypeError, self.queryset.model.DoesNotExist):
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
return value
def validate(self, value):
return Field.validate(self, value)
def has_changed(self, initial, data):
initial_value = initial if initial is not None else ''
data_value = data if data is not None else ''
return force_text(self.prepare_value(initial_value)) != force_text(data_value)
class ModelMultipleChoiceField(ModelChoiceField):
"""A MultipleChoiceField whose choices are a model QuerySet."""
widget = SelectMultiple
hidden_widget = MultipleHiddenInput
default_error_messages = {
'list': _('Enter a list of values.'),
'invalid_choice': _('Select a valid choice. %(value)s is not one of the'
' available choices.'),
'invalid_pk_value': _('"%(pk)s" is not a valid value for a primary key.')
}
def __init__(self, queryset, required=True, widget=None, label=None,
initial=None, help_text='', *args, **kwargs):
super(ModelMultipleChoiceField, self).__init__(queryset, None,
required, widget, label, initial, help_text, *args, **kwargs)
def to_python(self, value):
if not value:
return []
return list(self._check_values(value))
def clean(self, value):
if self.required and not value:
raise ValidationError(self.error_messages['required'], code='required')
elif not self.required and not value:
return self.queryset.none()
if not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['list'], code='list')
qs = self._check_values(value)
# Since this overrides the inherited ModelChoiceField.clean
# we run custom validators here
self.run_validators(value)
return qs
def _check_values(self, value):
"""
Given a list of possible PK values, returns a QuerySet of the
corresponding objects. Raises a ValidationError if a given value is
invalid (not a valid PK, not in the queryset, etc.)
"""
key = self.to_field_name or 'pk'
# deduplicate given values to avoid creating many querysets or
# requiring the database backend deduplicate efficiently.
try:
value = frozenset(value)
except TypeError:
# list of lists isn't hashable, for example
raise ValidationError(
self.error_messages['list'],
code='list',
)
for pk in value:
try:
self.queryset.filter(**{key: pk})
except (ValueError, TypeError):
raise ValidationError(
self.error_messages['invalid_pk_value'],
code='invalid_pk_value',
params={'pk': pk},
)
qs = self.queryset.filter(**{'%s__in' % key: value})
pks = set(force_text(getattr(o, key)) for o in qs)
for val in value:
if force_text(val) not in pks:
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': val},
)
return qs
def prepare_value(self, value):
if (hasattr(value, '__iter__') and
not isinstance(value, six.text_type) and
not hasattr(value, '_meta')):
return [super(ModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(ModelMultipleChoiceField, self).prepare_value(value)
def has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = set(force_text(value) for value in self.prepare_value(initial))
data_set = set(force_text(value) for value in data)
return data_set != initial_set
def modelform_defines_fields(form_class):
return (form_class is not None and (
hasattr(form_class, '_meta') and
(form_class._meta.fields is not None or
form_class._meta.exclude is not None)
))
| rsvip/Django | django/forms/models.py | Python | bsd-3-clause | 55,046 | 0.001508 |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 03 13:06:56 2015
@author: Eunice
"""
if __name__ == '__main__':
import sys
sys.path.append("..")
from engine import bar
# 以上模块仅测试用
from engine.broker.fillstrategy import DefaultStrategy
from engine.broker.backtesting import TradePercentage
from engine import strategy
from engine.technical import ma
from engine.technical import cross
class thrSMA(strategy.BacktestingStrategy):
def __init__(self, feed, instrument, short_l, mid_l, long_l, up_cum):
strategy.BacktestingStrategy.__init__(self, feed)
self.__instrument = instrument
self.getBroker().setFillStrategy(DefaultStrategy(None))
self.getBroker().setCommission(TradePercentage(0.001))
self.__position = None
self.__prices = feed[instrument].getPriceDataSeries()
self.__malength1 = int(short_l)
self.__malength2 = int(mid_l)
self.__malength3 = int(long_l)
self.__circ = int(up_cum)
self.__ma1 = ma.SMA(self.__prices, self.__malength1)
self.__ma2 = ma.SMA(self.__prices, self.__malength2)
self.__ma3 = ma.SMA(self.__prices, self.__malength3)
def getPrice(self):
return self.__prices
def getSMA(self):
return self.__ma1,self.__ma2, self.__ma3
def onEnterCanceled(self, position):
self.__position = None
def onEnterOK(self):
pass
def onExitOk(self, position):
self.__position = None
#self.info("long close")
def onExitCanceled(self, position):
self.__position.exitMarket()
def buyCon1(self):
if cross.cross_above(self.__ma1, self.__ma2) > 0:
return True
def buyCon2(self):
m1 = 0
m2 = 0
for i in range(self.__circ):
if self.__ma1[-i-1] > self.__ma3[-i-1]:
m1 += 1
if self.__ma2[-i-1] > self.__ma3[-i-1]:
m2 += 1
if m1 >= self.__circ and m2 >= self.__circ:
return True
def sellCon1(self):
if cross.cross_below(self.__ma1, self.__ma2) > 0:
return True
def onBars(self, bars):
# If a position was not opened, check if we should enter a long position.
if self.__ma2[-1]is None:
return
if self.__position is not None:
if not self.__position.exitActive() and cross.cross_below(self.__ma1, self.__ma2) > 0:
self.__position.exitMarket()
#self.info("sell %s" % (bars.getDateTime()))
if self.__position is None:
if self.buyCon1() and self.buyCon2():
shares = int(self.getBroker().getCash() * 0.2 / bars[self.__instrument].getPrice())
self.__position = self.enterLong(self.__instrument, shares)
print bars[self.__instrument].getDateTime(), bars[self.__instrument].getPrice()
#self.info("buy %s" % (bars.getDateTime()))
def testStrategy():
from engine import bar
strat = thrSMA
instrument = '600288'
market = 'SH'
fromDate = '20150101'
toDate ='20150601'
frequency = bar.Frequency.MINUTE
paras = [2, 20, 60, 10]
plot = True
#############################################path set ############################33
import os
print os.path.split(os.path.realpath(__file__))
if frequency == bar.Frequency.MINUTE:
path = os.path.join(os.environ.get('STRATEGYPATH'), '..', 'histdata', 'minute')
elif frequency == bar.Frequency.DAY:
path = os.path.join(os.environ.get('STRATEGYPATH'), '..', 'histdata', 'day')
filepath = os.path.join(path, instrument + market + ".csv")
#############################################don't change ############################33
from engine.barfeed.csvfeed import Feed
barfeed = Feed(frequency)
barfeed.setDateTimeFormat('%Y-%m-%d %H:%M:%S')
barfeed.loadBars(instrument, market, fromDate, toDate, filepath)
engine_id = instrument + '.' + market
strat = strat(barfeed, engine_id, *paras)
from engine.stratanalyzer import returns
from engine.stratanalyzer import sharpe
from engine.stratanalyzer import drawdown
from engine.stratanalyzer import trades
retAnalyzer = returns.Returns()
strat.attachAnalyzer(retAnalyzer)
sharpeRatioAnalyzer = sharpe.SharpeRatio()
strat.attachAnalyzer(sharpeRatioAnalyzer)
drawDownAnalyzer = drawdown.DrawDown()
strat.attachAnalyzer(drawDownAnalyzer)
tradesAnalyzer = trades.Trades()
strat.attachAnalyzer(tradesAnalyzer)
strat.run()
#夏普率
sharp = sharpeRatioAnalyzer.getSharpeRatio(0.05)
#最大回撤
maxdd = drawDownAnalyzer.getMaxDrawDown()
#收益率
return_ = retAnalyzer.getCumulativeReturns()[-1]
#收益曲线
return_list = []
for item in retAnalyzer.getCumulativeReturns():
return_list.append(item)
def run_strategy(ticker, account_id, paras):
print ticker
print account_id
print paras
strat = testStrategy()
if __name__ == "__main__":
testStrategy()
| Yam-cn/potato | stratlib/thrSMA.py | Python | apache-2.0 | 5,091 | 0.004953 |
from captcha.conf import settings
from captcha.helpers import captcha_image_url
from captcha.models import CaptchaStore
from django.http import HttpResponse, Http404
from django.shortcuts import get_object_or_404
import random
import re
import tempfile
import os
import subprocess
try:
from cStringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
try:
from PIL import Image, ImageDraw, ImageFont
except ImportError:
import Image
import ImageDraw
import ImageFont
try:
import json
except ImportError:
from django.utils import simplejson as json
NON_DIGITS_RX = re.compile('[^\d]')
# Distance of the drawn text from the top of the captcha image
from_top = 4
def getsize(font, text):
if hasattr(font, 'getoffset'):
return [x + y + z for x, y, z in zip(font.getsize(text), font.getoffset(text), (0, from_top))]
else:
return font.getsize(text)
def captcha_image(request, key, scale=1):
store = get_object_or_404(CaptchaStore, hashkey=key)
text = store.challenge
if settings.CAPTCHA_FONT_PATH.lower().strip().endswith('ttf'):
font = ImageFont.truetype(settings.CAPTCHA_FONT_PATH, settings.CAPTCHA_FONT_SIZE * scale)
else:
font = ImageFont.load(settings.CAPTCHA_FONT_PATH)
size = getsize(font, text)
size = (size[0] * 2, int(size[1] * 1.4))
image = Image.new('RGB', size, settings.CAPTCHA_BACKGROUND_COLOR)
try:
PIL_VERSION = int(NON_DIGITS_RX.sub('', Image.VERSION))
except:
PIL_VERSION = 116
xpos = 2
charlist = []
for char in text:
if char in settings.CAPTCHA_PUNCTUATION and len(charlist) >= 1:
charlist[-1] += char
else:
charlist.append(char)
for char in charlist:
fgimage = Image.new('RGB', size, settings.CAPTCHA_FOREGROUND_COLOR)
charimage = Image.new('L', getsize(font, ' %s ' % char), '#000000')
chardraw = ImageDraw.Draw(charimage)
chardraw.text((0, 0), ' %s ' % char, font=font, fill='#ffffff')
if settings.CAPTCHA_LETTER_ROTATION:
if PIL_VERSION >= 116:
charimage = charimage.rotate(random.randrange(*settings.CAPTCHA_LETTER_ROTATION), expand=0, resample=Image.BICUBIC)
else:
charimage = charimage.rotate(random.randrange(*settings.CAPTCHA_LETTER_ROTATION), resample=Image.BICUBIC)
charimage = charimage.crop(charimage.getbbox())
maskimage = Image.new('L', size)
maskimage.paste(charimage, (xpos, from_top, xpos + charimage.size[0], from_top + charimage.size[1]))
size = maskimage.size
image = Image.composite(fgimage, image, maskimage)
xpos = xpos + 2 + charimage.size[0]
image = image.crop((0, 0, xpos + 1, size[1]))
draw = ImageDraw.Draw(image)
for f in settings.noise_functions():
draw = f(draw, image)
for f in settings.filter_functions():
image = f(image)
out = StringIO()
image.save(out, "PNG")
out.seek(0)
response = HttpResponse(content_type='image/png')
response.write(out.read())
response['Content-length'] = out.tell()
return response
def captcha_audio(request, key):
if settings.CAPTCHA_FLITE_PATH:
store = get_object_or_404(CaptchaStore, hashkey=key)
text = store.challenge
if 'captcha.helpers.math_challenge' == settings.CAPTCHA_CHALLENGE_FUNCT:
text = text.replace('*', 'times').replace('-', 'minus')
else:
text = ', '.join(list(text))
path = str(os.path.join(tempfile.gettempdir(), '%s.wav' % key))
subprocess.call([settings.CAPTCHA_FLITE_PATH, "-t", text, "-o", path])
if os.path.isfile(path):
response = HttpResponse()
f = open(path, 'rb')
response['Content-Type'] = 'audio/x-wav'
response.write(f.read())
f.close()
os.unlink(path)
return response
raise Http404
def captcha_refresh(request):
""" Return json with new captcha for ajax refresh request """
if not request.is_ajax():
raise Http404
new_key = CaptchaStore.generate_key()
to_json_response = {
'key': new_key,
'image_url': captcha_image_url(new_key),
}
return HttpResponse(json.dumps(to_json_response), content_type='application/json')
| viaregio/django-simple-captcha | captcha/views.py | Python | mit | 4,373 | 0.002058 |
""" This module contains data dump creation and import functions.
Read more about the data dumps in our documentation here:
https://listenbrainz.readthedocs.io/en/production/dev/listenbrainz-dumps.html
"""
# listenbrainz-server - Server for the ListenBrainz project
#
# Copyright (C) 2017 MetaBrainz Foundation Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import logging
import os
import shutil
import sqlalchemy
import subprocess
import sys
import tarfile
import tempfile
import time
import ujson
from datetime import datetime
from flask import current_app
from listenbrainz import DUMP_LICENSE_FILE_PATH
import listenbrainz.db as db
from listenbrainz.db import DUMP_DEFAULT_THREAD_COUNT
from listenbrainz.utils import create_path, log_ioerrors
from listenbrainz import config
# this dict contains the tables dumped in public dump as keys
# and a tuple of columns that should be dumped as values
PUBLIC_TABLES = {
'"user"': (
'id',
'created',
'musicbrainz_id',
'musicbrainz_row_id',
# the following are dummy values for columns that we do not want to
# dump in the public dump
'\'\'', # auth token
'to_timestamp(0)', # last_login
'to_timestamp(0)', # latest_import
),
'statistics.user': (
'user_id',
'artist',
'release',
'recording',
'last_updated',
),
'statistics.artist': (
'id',
'msid',
'name',
'release',
'recording',
'listener',
'listen_count',
'last_updated',
),
'statistics.release': (
'id',
'msid',
'name',
'recording',
'listener',
'listen_count',
'last_updated',
),
'statistics.recording': (
'id',
'msid',
'name',
'listener',
'listen_count',
'last_updated',
),
'recording_feedback': (
'id',
'user_id',
'recording_msid',
'score',
'created'
),
}
# this dict contains the tables dumped in the private dump as keys
# and a tuple of columns that should be dumped as values
PRIVATE_TABLES = {
'"user"': (
'id',
'created',
'musicbrainz_id',
'auth_token',
'last_login',
'latest_import',
'musicbrainz_row_id',
'gdpr_agreed',
),
'api_compat.token': (
'id',
'user_id',
'token',
'api_key',
'ts',
),
'api_compat.session': (
'id',
'user_id',
'sid',
'api_key',
'ts',
),
}
def dump_postgres_db(location, dump_time=datetime.today(), threads=None):
""" Create postgres database dump in the specified location
Arguments:
location: Directory where the final dump will be stored
dump_time: datetime object representing when the dump was started
threads: Maximal number of threads to run during compression
Returns:
a tuple: (path to private dump, path to public dump)
"""
current_app.logger.info('Beginning dump of PostgreSQL database...')
current_app.logger.info('dump path: %s', location)
current_app.logger.info('Creating dump of private data...')
try:
private_dump = create_private_dump(location, dump_time, threads)
except IOError as e:
current_app.logger.critical(
'IOError while creating private dump: %s', str(e), exc_info=True)
current_app.logger.info('Removing created files and giving up...')
shutil.rmtree(location)
return
except Exception as e:
current_app.logger.critical(
'Unable to create private db dump due to error %s', str(e), exc_info=True)
current_app.logger.info('Removing created files and giving up...')
shutil.rmtree(location)
return
current_app.logger.info(
'Dump of private data created at %s!', private_dump)
current_app.logger.info('Creating dump of public data...')
try:
public_dump = create_public_dump(location, dump_time, threads)
except IOError as e:
current_app.logger.critical(
'IOError while creating public dump: %s', str(e), exc_info=True)
current_app.logger.info('Removing created files and giving up...')
shutil.rmtree(location)
return
except Exception as e:
current_app.logger.critical(
'Unable to create public dump due to error %s', str(e), exc_info=True)
current_app.logger.info('Removing created files and giving up...')
shutil.rmtree(location)
return
current_app.logger.info('Dump of public data created at %s!', public_dump)
current_app.logger.info(
'ListenBrainz PostgreSQL data dump created at %s!', location)
return private_dump, public_dump
def dump_feedback_for_spark(location, dump_time=datetime.today(), threads=None):
""" Dump user/recommendation feedback from postgres into spark format.
Arguments:
location: Directory where the final dump will be stored
dump_time: datetime object representing when the dump was started
threads: Maximal number of threads to run during compression
Returns:
path to feedback dump
"""
current_app.logger.info('Beginning dump of feedback data...')
current_app.logger.info('dump path: %s', location)
try:
feedback_dump = create_feedback_dump(location, dump_time, threads)
except IOError as e:
current_app.logger.critical(
'IOError while creating feedback dump: %s', str(e), exc_info=True)
current_app.logger.info('Removing created files and giving up...')
shutil.rmtree(location)
return
except Exception as e:
current_app.logger.critical(
'Unable to create feedback dump due to error %s', str(e), exc_info=True)
current_app.logger.info('Removing created files and giving up...')
shutil.rmtree(location)
return
current_app.logger.info(
'Dump of feedback data created at %s!', feedback_dump)
return feedback_dump
def _create_dump(location, dump_type, tables, dump_time, threads=DUMP_DEFAULT_THREAD_COUNT):
""" Creates a dump of the provided tables at the location passed
Arguments:
location: the path where the dump should be created
dump_type: the type of data dump being made - private or public
tables: a dict containing the names of the tables to be dumped as keys and the columns
to be dumped as values
dump_time: the time at which the dump process was started
threads: the maximum number of threads to use for compression
Returns:
the path to the archive file created
"""
archive_name = 'listenbrainz-{dump_type}-dump-{time}'.format(
dump_type=dump_type,
time=dump_time.strftime('%Y%m%d-%H%M%S')
)
archive_path = os.path.join(location, '{archive_name}.tar.xz'.format(
archive_name=archive_name,
))
with open(archive_path, 'w') as archive:
pxz_command = ['pxz', '--compress',
'-T{threads}'.format(threads=threads)]
pxz = subprocess.Popen(
pxz_command, stdin=subprocess.PIPE, stdout=archive)
with tarfile.open(fileobj=pxz.stdin, mode='w|') as tar:
temp_dir = tempfile.mkdtemp()
try:
schema_seq_path = os.path.join(temp_dir, "SCHEMA_SEQUENCE")
with open(schema_seq_path, "w") as f:
f.write(str(db.SCHEMA_VERSION))
tar.add(schema_seq_path,
arcname=os.path.join(archive_name, "SCHEMA_SEQUENCE"))
timestamp_path = os.path.join(temp_dir, "TIMESTAMP")
with open(timestamp_path, "w") as f:
f.write(dump_time.isoformat(" "))
tar.add(timestamp_path,
arcname=os.path.join(archive_name, "TIMESTAMP"))
tar.add(DUMP_LICENSE_FILE_PATH,
arcname=os.path.join(archive_name, "COPYING"))
except IOError as e:
current_app.logger.error(
'IOError while adding dump metadata: %s', str(e), exc_info=True)
raise
except Exception as e:
current_app.logger.error(
'Exception while adding dump metadata: %s', str(e), exc_info=True)
raise
archive_tables_dir = os.path.join(temp_dir, 'lbdump', 'lbdump')
create_path(archive_tables_dir)
with db.engine.connect() as connection:
if dump_type == "feedback":
dump_user_feedback(connection, location=archive_tables_dir)
else:
with connection.begin() as transaction:
cursor = connection.connection.cursor()
for table in tables:
try:
copy_table(
cursor=cursor,
location=archive_tables_dir,
columns=','.join(tables[table]),
table_name=table,
)
except IOError as e:
current_app.logger.error(
'IOError while copying table %s', table, exc_info=True)
raise
except Exception as e:
current_app.logger.error(
'Error while copying table %s: %s', table, str(e), exc_info=True)
raise
transaction.rollback()
tar.add(archive_tables_dir, arcname=os.path.join(
archive_name, 'lbdump'.format(dump_type)))
shutil.rmtree(temp_dir)
pxz.stdin.close()
pxz.wait()
return archive_path
def create_private_dump(location, dump_time, threads=DUMP_DEFAULT_THREAD_COUNT):
""" Create postgres database dump for private data in db.
This includes dumps of the following tables:
"user",
api_compat.token,
api_compat.session
"""
return _create_dump(
location=location,
dump_type='private',
tables=PRIVATE_TABLES,
dump_time=dump_time,
threads=threads,
)
def create_public_dump(location, dump_time, threads=DUMP_DEFAULT_THREAD_COUNT):
""" Create postgres database dump for statistics and user info in db.
This includes a sanitized dump of the "user" table and dumps of all tables
in the statistics schema:
statistics.user
statistics.artist
statistics.release
statistics.recording
"""
return _create_dump(
location=location,
dump_type='public',
tables=PUBLIC_TABLES,
dump_time=dump_time,
threads=threads,
)
def create_feedback_dump(location, dump_time, threads=DUMP_DEFAULT_THREAD_COUNT):
""" Create a spark format dump of user listen and user recommendation feedback.
"""
return _create_dump(
location=location,
dump_type='feedback',
tables=[],
dump_time=dump_time,
threads=threads,
)
def dump_user_feedback(connection, location):
""" Carry out the actual dumping of user listen and user recommendation feedback.
"""
with connection.begin() as transaction:
# First dump the user feedback
result = connection.execute(sqlalchemy.text("""
SELECT musicbrainz_id, recording_msid, score, r.created,
EXTRACT(YEAR FROM r.created) AS year,
EXTRACT(MONTH FROM r.created) AS month,
EXTRACT(DAY FROM r.created) AS day
FROM recording_feedback r
JOIN "user"
ON r.user_id = "user".id
ORDER BY created"""))
last_day = ()
todays_items = []
while True:
row = result.fetchone()
today = (row[4], row[5], row[6]) if row else ()
if (not row or today != last_day) and len(todays_items) > 0:
full_path = os.path.join(location, "feedback", "listens", "%02d" % int(last_day[0]),
"%02d" % int(last_day[1]), "%02d" % int(last_day[2]))
os.makedirs(full_path)
with open(os.path.join(full_path, "data.json"), "wb") as f:
for item in todays_items:
f.write(bytes(ujson.dumps(item) + "\n", "utf-8"))
todays_items = []
if not row:
break
todays_items.append({'user_name': row[0],
'recording_msid': str(row[1]),
'feedback': row[2],
'created': row[3].isoformat()})
last_day = today
# Now dump the recommendation feedback
result = connection.execute(sqlalchemy.text("""
SELECT musicbrainz_id, recording_mbid, rating, r.created,
EXTRACT(YEAR FROM r.created) AS year,
EXTRACT(MONTH FROM r.created) AS month,
EXTRACT(DAY FROM r.created) AS day
FROM recommendation_feedback r
JOIN "user"
ON r.user_id = "user".id
ORDER BY created"""))
last_day = ()
todays_items = []
while True:
row = result.fetchone()
today = (row[4], row[5], row[6]) if row else ()
if (not row or today != last_day) and len(todays_items) > 0:
full_path = os.path.join(location, "feedback", "recommendation", "%02d" % int(last_day[0]),
"%02d" % int(last_day[1]), "%02d" % int(last_day[2]))
os.makedirs(full_path)
with open(os.path.join(full_path, "data.json"), "wb") as f:
for item in todays_items:
f.write(bytes(ujson.dumps(item) + "\n", "utf-8"))
todays_items = []
if not row:
break
todays_items.append({'user_name': row[0],
'mb_recording_mbid': str(row[1]),
'feedback': row[2],
'created': row[3].isoformat()})
last_day = today
transaction.rollback()
def copy_table(cursor, location, columns, table_name):
""" Copies a PostgreSQL table to a file
Arguments:
cursor: a psycopg cursor
location: the directory where the table should be copied
columns: a comma seperated string listing the columns of the table
that should be dumped
table_name: the name of the table to be copied
"""
with open(os.path.join(location, table_name), 'w') as f:
cursor.copy_to(f, '(SELECT {columns} FROM {table})'.format(
columns=columns,
table=table_name
))
def add_dump_entry(timestamp):
""" Adds an entry to the data_dump table with specified time.
Args:
timestamp: the unix timestamp to be added
Returns:
id (int): the id of the new entry added
"""
with db.engine.connect() as connection:
result = connection.execute(sqlalchemy.text("""
INSERT INTO data_dump (created)
VALUES (TO_TIMESTAMP(:ts))
RETURNING id
"""), {
'ts': timestamp,
})
return result.fetchone()['id']
def get_dump_entries():
""" Returns a list of all dump entries in the data_dump table
"""
with db.engine.connect() as connection:
result = connection.execute(sqlalchemy.text("""
SELECT id, created
FROM data_dump
ORDER BY created DESC
"""))
return [dict(row) for row in result]
def get_dump_entry(dump_id):
with db.engine.connect() as connection:
result = connection.execute(sqlalchemy.text("""
SELECT id, created
FROM data_dump
WHERE id = :dump_id
"""), {
'dump_id': dump_id,
})
if result.rowcount > 0:
return dict(result.fetchone())
return None
def import_postgres_dump(private_dump_archive_path=None, public_dump_archive_path=None, threads=DUMP_DEFAULT_THREAD_COUNT):
""" Imports postgres dump created by dump_postgres_db present at location.
Arguments:
location: the directory where the private and public archives are present
threads: the number of threads to use while decompressing the archives, defaults to
db.DUMP_DEFAULT_THREAD_COUNT
"""
if private_dump_archive_path:
current_app.logger.info(
'Importing private dump %s...', private_dump_archive_path)
try:
_import_dump(private_dump_archive_path,
'private', PRIVATE_TABLES, threads)
current_app.logger.info(
'Import of private dump %s done!', private_dump_archive_path)
except IOError as e:
current_app.logger.critical(
'IOError while importing private dump: %s', str(e), exc_info=True)
raise
except SchemaMismatchException as e:
current_app.logger.critical(
'SchemaMismatchException: %s', str(e), exc_info=True)
raise
except Exception as e:
current_app.logger.critical(
'Error while importing private dump: %s', str(e), exc_info=True)
raise
current_app.logger.info(
'Private dump %s imported!', private_dump_archive_path)
if public_dump_archive_path:
current_app.logger.info(
'Importing public dump %s...', public_dump_archive_path)
tables_to_import = PUBLIC_TABLES.copy()
if private_dump_archive_path:
# if the private dump exists and has been imported, we need to
# ignore the sanitized user table in the public dump
# so remove it from tables_to_import
del tables_to_import['"user"']
try:
_import_dump(public_dump_archive_path, 'public',
tables_to_import, threads)
current_app.logger.info(
'Import of Public dump %s done!', public_dump_archive_path)
except IOError as e:
current_app.logger.critical(
'IOError while importing public dump: %s', str(e), exc_info=True)
raise
except SchemaMismatchException as e:
current_app.logger.critical(
'SchemaMismatchException: %s', str(e), exc_info=True)
raise
except Exception as e:
current_app.logger.critical(
'Error while importing public dump: %s', str(e), exc_info=True)
raise
current_app.logger.info(
'Public dump %s imported!', public_dump_archive_path)
def _import_dump(archive_path, dump_type, tables, threads=DUMP_DEFAULT_THREAD_COUNT):
""" Import dump present in passed archive path into postgres db.
Arguments:
archive_path: path to the .tar.xz archive to be imported
dump_type (str): type of dump to be imported ('private' or 'public')
tables: dict of tables present in the archive with table name as key and
columns to import as values
threads (int): the number of threads to use while decompressing, defaults to
db.DUMP_DEFAULT_THREAD_COUNT
"""
pxz_command = ['pxz', '--decompress', '--stdout',
archive_path, '-T{threads}'.format(threads=threads)]
pxz = subprocess.Popen(pxz_command, stdout=subprocess.PIPE)
connection = db.engine.raw_connection()
try:
cursor = connection.cursor()
with tarfile.open(fileobj=pxz.stdout, mode='r|') as tar:
for member in tar:
file_name = member.name.split('/')[-1]
if file_name == 'SCHEMA_SEQUENCE':
# Verifying schema version
schema_seq = int(tar.extractfile(member).read().strip())
if schema_seq != db.SCHEMA_VERSION:
raise SchemaMismatchException('Incorrect schema version! Expected: %d, got: %d.'
'Please, get the latest version of the dump.'
% (db.SCHEMA_VERSION, schema_seq))
else:
current_app.logger.info('Schema version verified.')
else:
if file_name in tables:
current_app.logger.info(
'Importing data into %s table...', file_name)
try:
cursor.copy_from(tar.extractfile(member), '%s' % file_name,
columns=tables[file_name])
connection.commit()
except IOError as e:
current_app.logger.critical(
'IOError while extracting table %s: %s', file_name, str(e), exc_info=True)
raise
except Exception as e:
current_app.logger.critical(
'Exception while importing table %s: %s', file_name, str(e), exc_info=True)
raise
current_app.logger.info('Imported table %s', file_name)
finally:
connection.close()
pxz.stdout.close()
try:
_update_sequences()
except Exception as e:
current_app.logger.critical(
'Exception while trying to update sequences: %s', str(e), exc_info=True)
raise
def _update_sequence(seq_name, table_name):
""" Update the specified sequence's value to the maximum value of ID in the table.
Args:
seq_name (str): the name of the sequence to be updated.
table_name (str): the name of the table from which the maximum value is to be retrieved
"""
with db.engine.connect() as connection:
connection.execute(sqlalchemy.text("""
SELECT setval('{seq_name}', max(id))
FROM {table_name}
""".format(seq_name=seq_name, table_name=table_name)))
def _update_sequences():
""" Update all sequences to the maximum value of id in the table.
"""
# user_id_seq
current_app.logger.info('Updating user_id_seq...')
_update_sequence('user_id_seq', '"user"')
# token_id_seq
current_app.logger.info('Updating token_id_seq...')
_update_sequence('api_compat.token_id_seq', 'api_compat.token')
# session_id_seq
current_app.logger.info('Updating session_id_seq...')
_update_sequence('api_compat.session_id_seq', 'api_compat.session')
# artist_id_seq
current_app.logger.info('Updating artist_id_seq...')
_update_sequence('statistics.artist_id_seq', 'statistics.artist')
# release_id_seq
current_app.logger.info('Updating release_id_seq...')
_update_sequence('statistics.release_id_seq', 'statistics.release')
# recording_id_seq
current_app.logger.info('Updating recording_id_seq...')
_update_sequence('statistics.recording_id_seq', 'statistics.recording')
# data_dump_id_seq
current_app.logger.info('Updating data_dump_id_seq...')
_update_sequence('data_dump_id_seq', 'data_dump')
class SchemaMismatchException(Exception):
pass
| Freso/listenbrainz-server | listenbrainz/db/dump.py | Python | gpl-2.0 | 24,816 | 0.001531 |
import asyncio
import logging
import uuid
from collections import defaultdict
from dask.utils import parse_timedelta, stringify
from .client import Client, Future
from .worker import get_client, get_worker
logger = logging.getLogger(__name__)
class QueueExtension:
"""An extension for the scheduler to manage queues
This adds the following routes to the scheduler
* queue_create
* queue_release
* queue_put
* queue_get
* queue_size
"""
def __init__(self, scheduler):
self.scheduler = scheduler
self.queues = dict()
self.client_refcount = dict()
self.future_refcount = defaultdict(lambda: 0)
self.scheduler.handlers.update(
{
"queue_create": self.create,
"queue_put": self.put,
"queue_get": self.get,
"queue_qsize": self.qsize,
}
)
self.scheduler.stream_handlers.update(
{"queue-future-release": self.future_release, "queue_release": self.release}
)
self.scheduler.extensions["queues"] = self
def create(self, name=None, client=None, maxsize=0):
logger.debug(f"Queue name: {name}")
if name not in self.queues:
self.queues[name] = asyncio.Queue(maxsize=maxsize)
self.client_refcount[name] = 1
else:
self.client_refcount[name] += 1
def release(self, name=None, client=None):
if name not in self.queues:
return
self.client_refcount[name] -= 1
if self.client_refcount[name] == 0:
del self.client_refcount[name]
futures = self.queues[name]._queue
del self.queues[name]
keys = [d["value"] for d in futures if d["type"] == "Future"]
if keys:
self.scheduler.client_releases_keys(keys=keys, client="queue-%s" % name)
async def put(self, name=None, key=None, data=None, client=None, timeout=None):
if key is not None:
record = {"type": "Future", "value": key}
self.future_refcount[name, key] += 1
self.scheduler.client_desires_keys(keys=[key], client="queue-%s" % name)
else:
record = {"type": "msgpack", "value": data}
await asyncio.wait_for(self.queues[name].put(record), timeout=timeout)
def future_release(self, name=None, key=None, client=None):
self.future_refcount[name, key] -= 1
if self.future_refcount[name, key] == 0:
self.scheduler.client_releases_keys(keys=[key], client="queue-%s" % name)
del self.future_refcount[name, key]
async def get(self, name=None, client=None, timeout=None, batch=False):
def process(record):
"""Add task status if known"""
if record["type"] == "Future":
record = record.copy()
key = record["value"]
ts = self.scheduler.tasks.get(key)
state = ts.state if ts is not None else "lost"
record["state"] = state
if state == "erred":
record["exception"] = ts.exception_blame.exception
record["traceback"] = ts.exception_blame.traceback
return record
if batch:
q = self.queues[name]
out = []
if batch is True:
while not q.empty():
record = await q.get()
out.append(record)
else:
if timeout is not None:
msg = (
"Dask queues don't support simultaneous use of "
"integer batch sizes and timeouts"
)
raise NotImplementedError(msg)
for i in range(batch):
record = await q.get()
out.append(record)
out = [process(o) for o in out]
return out
else:
record = await asyncio.wait_for(self.queues[name].get(), timeout=timeout)
record = process(record)
return record
def qsize(self, name=None, client=None):
return self.queues[name].qsize()
class Queue:
"""Distributed Queue
This allows multiple clients to share futures or small bits of data between
each other with a multi-producer/multi-consumer queue. All metadata is
sequentialized through the scheduler.
Elements of the Queue must be either Futures or msgpack-encodable data
(ints, strings, lists, dicts). All data is sent through the scheduler so
it is wise not to send large objects. To share large objects scatter the
data and share the future instead.
.. warning::
This object is experimental
Parameters
----------
name: string (optional)
Name used by other clients and the scheduler to identify the queue. If
not given, a random name will be generated.
client: Client (optional)
Client used for communication with the scheduler.
If not given, the default global client will be used.
maxsize: int (optional)
Number of items allowed in the queue. If 0 (the default), the queue
size is unbounded.
Examples
--------
>>> from dask.distributed import Client, Queue # doctest: +SKIP
>>> client = Client() # doctest: +SKIP
>>> queue = Queue('x') # doctest: +SKIP
>>> future = client.submit(f, x) # doctest: +SKIP
>>> queue.put(future) # doctest: +SKIP
See Also
--------
Variable: shared variable between clients
"""
def __init__(self, name=None, client=None, maxsize=0):
try:
self.client = client or Client.current()
except ValueError:
# Initialise new client
self.client = get_worker().client
self.name = name or "queue-" + uuid.uuid4().hex
self.maxsize = maxsize
if self.client.asynchronous:
self._started = asyncio.ensure_future(self._start())
else:
self.client.sync(self._start)
async def _start(self):
await self.client.scheduler.queue_create(name=self.name, maxsize=self.maxsize)
return self
def __await__(self):
if hasattr(self, "_started"):
return self._started.__await__()
else:
async def _():
return self
return _().__await__()
async def _put(self, value, timeout=None):
if isinstance(value, Future):
await self.client.scheduler.queue_put(
key=stringify(value.key), timeout=timeout, name=self.name
)
else:
await self.client.scheduler.queue_put(
data=value, timeout=timeout, name=self.name
)
def put(self, value, timeout=None, **kwargs):
"""Put data into the queue
Parameters
----------
timeout : number or string or timedelta, optional
Time in seconds to wait before timing out.
Instead of number of seconds, it is also possible to specify
a timedelta in string format, e.g. "200ms".
"""
timeout = parse_timedelta(timeout)
return self.client.sync(self._put, value, timeout=timeout, **kwargs)
def get(self, timeout=None, batch=False, **kwargs):
"""Get data from the queue
Parameters
----------
timeout : number or string or timedelta, optional
Time in seconds to wait before timing out.
Instead of number of seconds, it is also possible to specify
a timedelta in string format, e.g. "200ms".
batch : boolean, int (optional)
If True then return all elements currently waiting in the queue.
If an integer than return that many elements from the queue
If False (default) then return one item at a time
"""
timeout = parse_timedelta(timeout)
return self.client.sync(self._get, timeout=timeout, batch=batch, **kwargs)
def qsize(self, **kwargs):
"""Current number of elements in the queue"""
return self.client.sync(self._qsize, **kwargs)
async def _get(self, timeout=None, batch=False):
resp = await self.client.scheduler.queue_get(
timeout=timeout, name=self.name, batch=batch
)
def process(d):
if d["type"] == "Future":
value = Future(d["value"], self.client, inform=True, state=d["state"])
if d["state"] == "erred":
value._state.set_error(d["exception"], d["traceback"])
self.client._send_to_scheduler(
{"op": "queue-future-release", "name": self.name, "key": d["value"]}
)
else:
value = d["value"]
return value
if batch is False:
result = process(resp)
else:
result = list(map(process, resp))
return result
async def _qsize(self):
result = await self.client.scheduler.queue_qsize(name=self.name)
return result
def close(self):
if self.client.status == "running": # TODO: can leave zombie futures
self.client._send_to_scheduler({"op": "queue_release", "name": self.name})
def __getstate__(self):
return (self.name, self.client.scheduler.address)
def __setstate__(self, state):
name, address = state
try:
client = get_client(address)
assert client.scheduler.address == address
except (AttributeError, AssertionError):
client = Client(address, set_as_default=False)
self.__init__(name=name, client=client)
| dask/distributed | distributed/queues.py | Python | bsd-3-clause | 9,788 | 0.001124 |
#! /usr/bin/env python
# This will run as a production ready server if something like eventlet is installed
import argparse
import json
import os
import threading
import time
import sys
from gevent import ssl
from geventwebsocket import WebSocketServer, WebSocketApplication, Resource
from readscale import set_scale
clients = 0
scale = None
class WeightApp(WebSocketApplication):
def setup_scale(self):
global scale
try:
scale = set_scale()
except ValueError:
scale = None
sys.stdout.write("\rPlease ensure that scale is connected and not in use by another process")
sys.stdout.flush()
def on_open(self):
print "Connected!"
global clients
if clients:
clients += 1
return
clients += 1
self.send_weight()
def on_message(self, message, *args, **kwargs):
if message:
print message
def on_close(self, reason):
print 'Disconnected'
global clients
clients -= 1
if reason:
print reason
def send_weight(self, reschedule=0.2):
"""
broadcast the weight on the scale to listeners
:param weight: dictionary with
:param reschedule: time delay to reschedule the function
:return: None
"""
global scale
fakeweight = {
'lbs': 'Please connect scale',
'ozs': 'Please connect scale',
}
if not scale:
self.setup_scale()
if scale:
try:
scale.update()
weight = {
'lbs': scale.pounds,
'ozs': scale.ounces
}
except IOError:
self.setup_scale()
weight = fakeweight
else:
weight = fakeweight
if clients:
self.ws.send(json.dumps(weight))
if reschedule and clients:
threading.Timer(reschedule, self.send_weight).start()
def static_wsgi_app(environ, start_response):
"""
Serve a test page
:param environ:
:param start_response:
:return:
"""
start_response("200 OK", [('Content-Type]', 'text/html')])
with open("templates/index.html", 'r') as f:
retval = [bytes(line) for line in f.readlines()]
return retval
def parse_args():
"""
Parse cmd line arguments
:return: arguments
"""
parser = argparse.ArgumentParser(description='Serve USB scale weights over WebSockets')
parser.add_argument('-k', '--key', help='Server private key for SSL')
parser.add_argument('-c', '--cert', help='Server certificate for SSL')
return parser.parse_args()
def validate_file(_file):
"""
Check to see if a file exists
:param _file: path to file
:return: True for file exists, Raises RuntimeError if doesn't exist
"""
if not os.path.isfile(_file):
raise RuntimeError("The file provided does not exist! {}".format(_file))
return True
if __name__ == '__main__':
args = parse_args()
server_args = []
server_kwargs = dict()
try:
scale = set_scale()
except ValueError:
print "ERROR: Unable to connect to the scale!!"
scale = None
if not args.cert and not args.key:
pass
elif validate_file(args.cert) and validate_file(args.key):
server_kwargs.update({'keyfile': args.key,
'certfile': args.cert})
server_args.append(('localhost', 8000))
server_args.append(
Resource([
('/', static_wsgi_app),
('/data', WeightApp)
])
)
WebSocketServer(*server_args, **server_kwargs).serve_forever()
| sparkfun/usb-scale | scaleServer.py | Python | gpl-3.0 | 3,755 | 0.001065 |
from twisted.internet.defer import inlineCallbacks, fail, succeed
from globaleaks import models
from globaleaks.orm import transact
from globaleaks.tests import helpers
from globaleaks.jobs.delivery_sched import DeliverySchedule
from globaleaks.jobs.notification_sched import NotificationSchedule, MailGenerator
class TestNotificationSchedule(helpers.TestGLWithPopulatedDB):
@inlineCallbacks
def setUp(self):
yield helpers.TestGLWithPopulatedDB.setUp(self)
yield self.perform_full_submission_actions()
@transact
def get_scheduled_email_count(self, store):
return store.find(models.Mail).count()
@inlineCallbacks
def test_notification_schedule_success(self):
count = yield self.get_scheduled_email_count()
self.assertEqual(count, 0)
yield DeliverySchedule().run()
notification_schedule = NotificationSchedule()
notification_schedule.skip_sleep = True
yield notification_schedule.run()
count = yield self.get_scheduled_email_count()
self.assertEqual(count, 0)
@inlineCallbacks
def test_notification_schedule_failure(self):
count = yield self.get_scheduled_email_count()
self.assertEqual(count, 0)
yield DeliverySchedule().run()
notification_schedule = NotificationSchedule()
notification_schedule.skip_sleep = True
def sendmail(x, y, z):
return fail(True)
notification_schedule.sendmail = sendmail
for i in range(0, 10):
yield notification_schedule.run()
count = yield self.get_scheduled_email_count()
self.assertEqual(count, 40)
yield notification_schedule.run()
count = yield self.get_scheduled_email_count()
self.assertEqual(count, 0)
| vodkina/GlobaLeaks | backend/globaleaks/tests/jobs/test_notification_sched.py | Python | agpl-3.0 | 1,811 | 0.000552 |
#
# Copyright 2014 Red Hat, Inc
#
# Author: Chris Dent <chdent@redhat.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Sample data for test_ipmi.
This data is provided as a sample of the data expected from the ipmitool
driver in the Ironic project, which is the publisher of the notifications
being tested.
"""
TEMPERATURE_DATA = {
'DIMM GH VR Temp (0x3b)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '26 (+/- 0.500) degrees C',
'Entity ID': '20.6 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'DIMM GH VR Temp (0x3b)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'CPU1 VR Temp (0x36)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '32 (+/- 0.500) degrees C',
'Entity ID': '20.1 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'CPU1 VR Temp (0x36)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'DIMM EF VR Temp (0x3a)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '26 (+/- 0.500) degrees C',
'Entity ID': '20.5 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'DIMM EF VR Temp (0x3a)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'CPU2 VR Temp (0x37)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '31 (+/- 0.500) degrees C',
'Entity ID': '20.2 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'CPU2 VR Temp (0x37)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'Ambient Temp (0x32)': {
'Status': 'ok',
'Sensor Reading': '25 (+/- 0) degrees C',
'Entity ID': '12.1 (Front Panel Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Upper non-critical': '43.000',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Upper non-recoverable': '50.000',
'Positive Hysteresis': '4.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '46.000',
'Sensor ID': 'Ambient Temp (0x32)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '25.000'
},
'Mezz Card Temp (0x35)': {
'Status': 'Disabled',
'Sensor Reading': 'Disabled',
'Entity ID': '44.1 (I/O Module)',
'Event Message Control': 'Per-threshold',
'Upper non-critical': '70.000',
'Upper non-recoverable': '85.000',
'Positive Hysteresis': '4.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'Mezz Card Temp (0x35)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '25.000'
},
'PCH Temp (0x3c)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '46 (+/- 0.500) degrees C',
'Entity ID': '45.1 (Processor/IO Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '93.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '103.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '98.000',
'Sensor ID': 'PCH Temp (0x3c)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'DIMM CD VR Temp (0x39)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '27 (+/- 0.500) degrees C',
'Entity ID': '20.4 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'DIMM CD VR Temp (0x39)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'PCI Riser 2 Temp (0x34)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '30 (+/- 0) degrees C',
'Entity ID': '16.2 (System Internal Expansion Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '70.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '85.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'PCI Riser 2 Temp (0x34)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'DIMM AB VR Temp (0x38)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '28 (+/- 0.500) degrees C',
'Entity ID': '20.3 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'DIMM AB VR Temp (0x38)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'PCI Riser 1 Temp (0x33)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '38 (+/- 0) degrees C',
'Entity ID': '16.1 (System Internal Expansion Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '70.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '85.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'PCI Riser 1 Temp (0x33)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
}
CURRENT_DATA = {
'Avg Power (0x2e)': {
'Status': 'ok',
'Sensor Reading': '130 (+/- 0) Watts',
'Entity ID': '21.0 (Power Management)',
'Assertions Enabled': '',
'Event Message Control': 'Per-threshold',
'Readable Thresholds': 'No Thresholds',
'Positive Hysteresis': 'Unspecified',
'Sensor Type (Analog)': 'Current',
'Negative Hysteresis': 'Unspecified',
'Maximum sensor range': 'Unspecified',
'Sensor ID': 'Avg Power (0x2e)',
'Assertion Events': '',
'Minimum sensor range': '2550.000',
'Settable Thresholds': 'No Thresholds'
}
}
FAN_DATA = {
'Fan 4A Tach (0x46)': {
'Status': 'ok',
'Sensor Reading': '6900 (+/- 0) RPM',
'Entity ID': '29.4 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 4A Tach (0x46)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 5A Tach (0x48)': {
'Status': 'ok',
'Sensor Reading': '7140 (+/- 0) RPM',
'Entity ID': '29.5 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 5A Tach (0x48)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 3A Tach (0x44)': {
'Status': 'ok',
'Sensor Reading': '6900 (+/- 0) RPM',
'Entity ID': '29.3 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 3A Tach (0x44)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 1A Tach (0x40)': {
'Status': 'ok',
'Sensor Reading': '6960 (+/- 0) RPM',
'Entity ID': '29.1 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 1A Tach (0x40)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 3B Tach (0x45)': {
'Status': 'ok',
'Sensor Reading': '7104 (+/- 0) RPM',
'Entity ID': '29.3 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 3B Tach (0x45)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 2A Tach (0x42)': {
'Status': 'ok',
'Sensor Reading': '7080 (+/- 0) RPM',
'Entity ID': '29.2 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 2A Tach (0x42)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 4B Tach (0x47)': {
'Status': 'ok',
'Sensor Reading': '7488 (+/- 0) RPM',
'Entity ID': '29.4 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 4B Tach (0x47)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 2B Tach (0x43)': {
'Status': 'ok',
'Sensor Reading': '7168 (+/- 0) RPM',
'Entity ID': '29.2 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 2B Tach (0x43)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 5B Tach (0x49)': {
'Status': 'ok',
'Sensor Reading': '7296 (+/- 0) RPM',
'Entity ID': '29.5 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 5B Tach (0x49)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 1B Tach (0x41)': {
'Status': 'ok',
'Sensor Reading': '7296 (+/- 0) RPM',
'Entity ID': '29.1 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 1B Tach (0x41)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 6B Tach (0x4b)': {
'Status': 'ok',
'Sensor Reading': '7616 (+/- 0) RPM',
'Entity ID': '29.6 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 6B Tach (0x4b)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 6A Tach (0x4a)': {
'Status': 'ok',
'Sensor Reading': '7080 (+/- 0) RPM',
'Entity ID': '29.6 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 6A Tach (0x4a)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
}
}
VOLTAGE_DATA = {
'Planar 12V (0x18)': {
'Status': 'ok',
'Sensor Reading': '12.312 (+/- 0) Volts',
'Entity ID': '7.1 (System Board)',
'Assertions Enabled': 'lcr- ucr+',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Maximum sensor range': 'Unspecified',
'Positive Hysteresis': '0.108',
'Deassertions Enabled': 'lcr- ucr+',
'Sensor Type (Analog)': 'Voltage',
'Lower critical': '10.692',
'Negative Hysteresis': '0.108',
'Threshold Read Mask': 'lcr ucr',
'Upper critical': '13.446',
'Readable Thresholds': 'lcr ucr',
'Sensor ID': 'Planar 12V (0x18)',
'Settable Thresholds': 'lcr ucr',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '12.042'
},
'Planar 3.3V (0x16)': {
'Status': 'ok',
'Sensor Reading': '3.309 (+/- 0) Volts',
'Entity ID': '7.1 (System Board)',
'Assertions Enabled': 'lcr- ucr+',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Maximum sensor range': 'Unspecified',
'Positive Hysteresis': '0.028',
'Deassertions Enabled': 'lcr- ucr+',
'Sensor Type (Analog)': 'Voltage',
'Lower critical': '3.039',
'Negative Hysteresis': '0.028',
'Threshold Read Mask': 'lcr ucr',
'Upper critical': '3.564',
'Readable Thresholds': 'lcr ucr',
'Sensor ID': 'Planar 3.3V (0x16)',
'Settable Thresholds': 'lcr ucr',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3.309'
},
'Planar VBAT (0x1c)': {
'Status': 'ok',
'Sensor Reading': '3.137 (+/- 0) Volts',
'Entity ID': '7.1 (System Board)',
'Assertions Enabled': 'lnc- lcr-',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Readable Thresholds': 'lcr lnc',
'Positive Hysteresis': '0.025',
'Deassertions Enabled': 'lnc- lcr-',
'Sensor Type (Analog)': 'Voltage',
'Lower critical': '2.095',
'Negative Hysteresis': '0.025',
'Lower non-critical': '2.248',
'Maximum sensor range': 'Unspecified',
'Sensor ID': 'Planar VBAT (0x1c)',
'Settable Thresholds': 'lcr lnc',
'Threshold Read Mask': 'lcr lnc',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3.010'
},
'Planar 5V (0x17)': {
'Status': 'ok',
'Sensor Reading': '5.062 (+/- 0) Volts',
'Entity ID': '7.1 (System Board)',
'Assertions Enabled': 'lcr- ucr+',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Maximum sensor range': 'Unspecified',
'Positive Hysteresis': '0.045',
'Deassertions Enabled': 'lcr- ucr+',
'Sensor Type (Analog)': 'Voltage',
'Lower critical': '4.475',
'Negative Hysteresis': '0.045',
'Threshold Read Mask': 'lcr ucr',
'Upper critical': '5.582',
'Readable Thresholds': 'lcr ucr',
'Sensor ID': 'Planar 5V (0x17)',
'Settable Thresholds': 'lcr ucr',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4.995'
}
}
SENSOR_DATA = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': TEMPERATURE_DATA,
'Current': CURRENT_DATA,
'Fan': FAN_DATA,
'Voltage': VOLTAGE_DATA
}
}
}
EMPTY_PAYLOAD = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
}
}
}
MISSING_SENSOR = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': {
'PCI Riser 1 Temp (0x33)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Entity ID': '16.1 (System Internal Expansion Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '70.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '85.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'PCI Riser 1 Temp (0x33)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
}
}
}
}
BAD_SENSOR = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': {
'PCI Riser 1 Temp (0x33)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': 'some bad stuff',
'Entity ID': '16.1 (System Internal Expansion Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '70.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '85.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'PCI Riser 1 Temp (0x33)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
}
}
}
}
NO_SENSOR_ID = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': {
'PCI Riser 1 Temp (0x33)': {
'Sensor Reading': '26 C',
},
}
}
}
}
NO_NODE_ID = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': {
'PCI Riser 1 Temp (0x33)': {
'Sensor Reading': '26 C',
'Sensor ID': 'PCI Riser 1 Temp (0x33)',
},
}
}
}
}
| ChinaMassClouds/copenstack-server | openstack/src/ceilometer-2014.2.2/ceilometer/tests/ipmi/notifications/ipmi_test_data.py | Python | gpl-2.0 | 30,224 | 0 |
# -*- coding: utf-8 -*-
import os
import re
from django.conf import settings
from django.core import validators
from django.core.exceptions import ValidationError
from django.forms.util import ErrorList
from django.forms.fields import MultiValueField, FilePathField, \
FileField, CharField
from django.utils.translation import ugettext as _
from hybrid_filefield.widgets import FileSelectOrUploadWidget
class FileSelectOrUploadField(MultiValueField):
widget = FileSelectOrUploadWidget
default_error_messages = {
'optional_required': _('At least one value is required.'),
}
def __init__(self, upload_to='', path='', match='',
recursive=False, widget=None, initial=None,
optional=False, *args, **kwargs):
self.upload_to, self.path, self.match, \
self.recursive, self.initial, self.optional = \
upload_to, path, match, recursive, initial, optional
self.max_length = kwargs.pop('max_length', None)
self.required = getattr(kwargs, 'required', True)
fields = (
FilePathField(
path=self.path,
match=self.match,
recursive=self.recursive,
initial=self.initial,
required=self.required,
),
FileField(
max_length=self.max_length,
initial=self.initial,
required=self.required,
),
)
widget = widget or self.widget
if isinstance(widget, type):
widget = widget()
self.widget = widget
super(FileSelectOrUploadField, self).__init__(
fields,
widget=self.widget,
*args, **kwargs
)
self.choices = [('', 'Use upload')] + fields[0].choices
self.widget.is_required = self.required
def _get_choices(self):
return self._choices
def _set_choices(self, value):
self._choices = self.widget.choices = list(value)
choices = property(_get_choices, _set_choices)
def clean(self, value):
clean_data = []
errors = ErrorList()
if value in validators.EMPTY_VALUES and self.required:
raise ValidationError(self.error_messages['required'])
for i, field in enumerate(self.fields):
try:
field_value = value[i]
except IndexError:
field_value = None
if field_value in validators.EMPTY_VALUES:
if (self.required and not self.optional):
raise ValidationError(self.error_messages['required'])
else:
field_value = field_value
try:
clean_data.append(field.clean(field_value))
except ValidationError, e:
errors.extend(e.messages)
if i == len(self.fields) and len(clean_data) == 0:
raise ValidationError(self.error_messages['optional_required'])
if errors:
raise ValidationError(errors)
return self.compress(clean_data)
def compress(self, data_list):
if len(data_list) > 1 and data_list[1] not in validators.EMPTY_VALUES:
return data_list[1]
elif len(data_list) > 0 and data_list[0] not in validators.EMPTY_VALUES:
return data_list[0]
return None
| danieljb/django-hybrid-filefield | hybrid_filefield/forms.py | Python | gpl-3.0 | 3,393 | 0.001768 |
from .position import position
class position_nudge(position):
"""
Nudge points
Useful to nudge labels away from the points
being labels.
Parameters
----------
x : float
Horizontal nudge
y : float
Vertical nudge
"""
def __init__(self, x=0, y=0):
self.params = {'x': x, 'y': y}
@classmethod
def compute_layer(cls, data, params, layout):
trans_x, trans_y = None, None
if params['x']:
def trans_x(x):
return x + params['x']
if params['y']:
def trans_y(y):
return y + params['y']
return cls.transform_position(data, trans_x, trans_y)
| has2k1/plotnine | plotnine/positions/position_nudge.py | Python | gpl-2.0 | 698 | 0 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ThreadToGroup.visibility'
db.add_column('askbot_thread_groups', 'visibility',
self.gf('django.db.models.fields.SmallIntegerField')(default=1),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ThreadToGroup.visibility'
db.delete_column('askbot_thread_groups', 'visibility')
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Post']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.askwidget': {
'Meta': {'object_name': 'AskWidget'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_text_field': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'inner_style': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'outer_style': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Tag']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'askbot.draftanswer': {
'Meta': {'object_name': 'DraftAnswer'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'draft_answers'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'draft_answers'", 'to': "orm['askbot.Thread']"})
},
'askbot.draftquestion': {
'Meta': {'object_name': 'DraftQuestion'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125', 'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'})
},
'askbot.emailfeedsetting': {
'Meta': {'unique_together': "(('subscriber', 'feed_type'),)", 'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.group': {
'Meta': {'object_name': 'Group', '_ormbases': ['auth.Group']},
'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'logo_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'moderate_answers_to_enquirers': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'openness': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'preapproved_email_domains': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'preapproved_emails': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'})
},
'askbot.groupmembership': {
'Meta': {'object_name': 'GroupMembership', '_ormbases': ['auth.AuthUserGroups']},
'authusergroups_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.AuthUserGroups']", 'unique': 'True', 'primary_key': 'True'}),
'level': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.post': {
'Meta': {'object_name': 'Post'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_posts'", 'symmetrical': 'False', 'through': "orm['askbot.PostToGroup']", 'to': "orm['askbot.Group']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'old_answer_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_comment_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_question_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'post_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'posts'", 'null': 'True', 'blank': 'True', 'to': "orm['askbot.Thread']"}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.postflagreason': {
'Meta': {'object_name': 'PostFlagReason'},
'added_at': ('django.db.models.fields.DateTimeField', [], {}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'details': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'post_reject_reasons'", 'to': "orm['askbot.Post']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'askbot.postrevision': {
'Meta': {'ordering': "('-revision',)", 'unique_together': "(('post', 'revision'),)", 'object_name': 'PostRevision'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'approved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'postrevisions'", 'to': "orm['auth.User']"}),
'by_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisions'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '125', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'})
},
'askbot.posttogroup': {
'Meta': {'unique_together': "(('post', 'group'),)", 'object_name': 'PostToGroup', 'db_table': "'askbot_post_groups'"},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']"})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Post']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.questionwidget': {
'Meta': {'object_name': 'QuestionWidget'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_by': ('django.db.models.fields.CharField', [], {'default': "'-added_at'", 'max_length': '18'}),
'question_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '7'}),
'search_query': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'style': ('django.db.models.fields.TextField', [], {'default': '"\\n@import url(\'http://fonts.googleapis.com/css?family=Yanone+Kaffeesatz:300,400,700\');\\nbody {\\n overflow: hidden;\\n}\\n\\n#container {\\n width: 200px;\\n height: 350px;\\n}\\nul {\\n list-style: none;\\n padding: 5px;\\n margin: 5px;\\n}\\nli {\\n border-bottom: #CCC 1px solid;\\n padding-bottom: 5px;\\n padding-top: 5px;\\n}\\nli:last-child {\\n border: none;\\n}\\na {\\n text-decoration: none;\\n color: #464646;\\n font-family: \'Yanone Kaffeesatz\', sans-serif;\\n font-size: 15px;\\n}\\n"', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'askbot.replyaddress': {
'Meta': {'object_name': 'ReplyAddress'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'allowed_from_email': ('django.db.models.fields.EmailField', [], {'max_length': '150'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reply_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'reply_action': ('django.db.models.fields.CharField', [], {'default': "'auto_answer_or_comment'", 'max_length': '32'}),
'response_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'edit_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'suggested_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'suggested_tags'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'tag_wiki': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'described_tag'", 'unique': 'True', 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.thread': {
'Meta': {'object_name': 'Thread'},
'accepted_answer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'unused_favorite_threads'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_threads'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_threads'", 'symmetrical': 'False', 'through': "orm['askbot.ThreadToGroup']", 'to': "orm['askbot.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'unused_last_active_in_threads'", 'to': "orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'threads'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.threadtogroup': {
'Meta': {'unique_together': "(('thread', 'group'),)", 'object_name': 'ThreadToGroup', 'db_table': "'askbot_thread_groups'"},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}),
'visibility': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('user', 'voted_post'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'voted_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['askbot.Post']"})
},
'auth.authusergroups': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'AuthUserGroups', 'db_table': "'auth_user_groups'", 'managed': 'False'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_signature': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_fake': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_marked_tags': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'subscribed_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot'] | PearsonIOKI/compose-forum | askbot/migrations/0146_auto__add_field_threadtogroup_visibility.py | Python | gpl-3.0 | 33,877 | 0.00797 |
"""
Copyright (C) 2011-2012 Brandon L. Reiss
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Displays either day or nighttime traffic image processing in a mock-up UI
based on the HTC Desire smartphone.
"""
import numpy as np
import scipy
import scipy.ndimage as ndimg
from collections import deque
from copy import *
import PIL
import ImageOps
import pylab
import cv2
import os
import fnmatch
import sys
import pymorph
import night
import day
import argparse
class PhoneDemo(object):
""" Object to run the phone UI demo. """
TYPE_DAY = "DAY"
TYPE_NIGHT = "NIGHT"
HISTORY_FRAMES = 600
class DayProcessor(object):
""" Object used to process day sequences. """
GMM_K = 3
GMM_NUM_FRAMES = 25
GMM_W_INIT = 0.1
GMM_VAR_INIT = 20
GMM_MAHA_THRESH = 3
MASK_OVERLAY_ALPHA = 0.4
def __init__(self, rgb):
assert(rgb.dtype == 'uint8')
self._gmm = day.GaussianMixtureModelUV(self.GMM_K, rgb.shape,
self.GMM_NUM_FRAMES,
self.GMM_W_INIT,
self.GMM_VAR_INIT,
self.GMM_MAHA_THRESH)
self._ycbcr = np.zeros(rgb.shape, dtype='uint8')
self._mask = np.zeros(rgb.shape[:2], dtype='uint8')
self._red_mask = np.zeros(rgb.shape, dtype='uint8')
self._rgb_red_masked = np.zeros(rgb.shape, dtype='uint8')
self._process_count = 0
def next(self, rgb):
""" Process the next file and return the results. """
# Do GMM steps.
self._gmm.rgb2ycbcr(rgb, self._ycbcr)
self._gmm.segment_cl(self._mask)
self._gmm.update_cl(self._ycbcr)
# Save total pixels in foreground.
fg_pixel_count = np.sum(self._mask)
# Pull alpha and render red overlay
# (channels are reversed RGB = BGR).
self._red_mask[:,:,2] = self._mask * 255
self._rgb_red_masked[:,:] = \
(self.MASK_OVERLAY_ALPHA * self._red_mask) + \
((1. - self.MASK_OVERLAY_ALPHA) * rgb)
# Ignore the first GMM_NUM_FRAMES / 2 frames.
self._process_count = self._process_count + 1
if self._process_count > self.GMM_NUM_FRAMES / 2:
return fg_pixel_count, self._rgb_red_masked
else:
return 0, self._rgb_red_masked
class NightProcessor(object):
""" Object used to process day sequences. """
def __init__(self, rgb):
pass
def next(self, rgb):
""" Process the next file and return the results. """
def blackout_date_regions(image, blackout_rects):
""" Black out specified regions. """
for rect in blackout_rects:
image[rect[1]:rect[3], rect[0]:rect[2]] = 0
# Do bright object detection.
blackout_date_regions(rgb, night.BLACKOUT_RECTS)
steps = night.bright_object_detection(rgb)
# Return results (channels are reversed RGB = BGR).
label_img = pymorph.overlay(steps['luminance'].astype('uint8'),
blue=steps['detect_dilate'])
return steps['bright_blob_count'], label_img
def __init__(self):
# Initialize plotting parameters.
self._history_raw = deque()
self._history_filtered = deque()
self._max_sample = 0.001
self._ui = PhoneDisplay()
self._filter_exp = 0.1
self._sample_exp_filter = 0.
def run_sequence(self, type, filepath, seq_range=None, filter_exp=None):
""" Run a TYPE_DAY or TYPE_NIGHT sequence. """
QUIT_KEY_CODES = [ 27, 113, 81 ]
PAUSE_KEY_CODES = [ 32, 112, 80 ]
def pause():
""" Poll input until the pause key is pressed. """
while True:
key = cv2.waitKey(100)
if PAUSE_KEY_CODES.count(key) > 0:
break
def bound_queue_push(val, q, maxlen=None):
""" Push to bounded queue. """
q.append(val)
if maxlen is not None and len(q) > maxlen:
q.popleft()
assert(type == self.TYPE_DAY or type == self.TYPE_NIGHT)
# TODO(reissb) -- The history frames and filtering need to become
# parameterized in some way. The history frames is fixed by the
# camera framerate. The filtering is fixed by the required
# detection sensitivity.
if filter_exp is not None:
self._filter_exp = filter_exp
else:
self._filter_exp = 0.1
# Clear state.
self._ui.clear()
self._history_raw = deque()
self._history_filtered = deque()
self._max_sample = 0.001
self._sample_exp_filter = 0.
# Extract command-line parameters. This is the name of one file in the
# series.
path, filename = os.path.split(filepath)
file_name, file_ext = os.path.splitext(os.path.basename(filename))
series_name_end = file_name.rindex('_')
series_name = file_name[:series_name_end]
print "Processing image series {0} in path {1}.".format(series_name,
path)
files_in_path = os.listdir(path)
series_pattern = series_name + '_[0-9]*' + file_ext
print "Processing files matching pattern {0}.".format(series_pattern)
series_suffixes = [int(os.path.splitext(fn)[0].split('_')[-1]) \
for fn in files_in_path \
if fnmatch.fnmatch(fn, series_pattern)]
series_suffixes.sort()
num_files = len(series_suffixes)
print "Found {0} files in image series {1}.".format(num_files,
series_name)
# Check for limited range.
if seq_range is not None:
assert(seq_range[1] > seq_range[0] and seq_range[0] >= 0)
print "Filtering series range [{},{}).".format(seq_range[0],
seq_range[1])
series_suffixes = np.array(series_suffixes)
f = (series_suffixes >= seq_range[0]) * \
(series_suffixes < seq_range[1])
series_suffixes = np.sort(series_suffixes * f)
remove_count = len(series_suffixes) - np.sum(f)
series_suffixes = np.delete(series_suffixes, range(remove_count))
# Load first file and process.
series_filename = series_name + '_' + str(series_suffixes[0]) + \
file_ext
rgb = ndimg.imread(os.path.join(path, series_filename))
# Initilaize the processor.
type_processor = self.DayProcessor(rgb) if type is self.TYPE_DAY \
else self.NightProcessor(rgb)
# Process the files.
quit_flag = False
process_count = 0
history_n = int(self.HISTORY_FRAMES / \
(self._ui.history_frame_count - 1))
for suffix in series_suffixes:
# Process the next file.
series_filename = series_name + '_' + str(suffix) + file_ext
print "Processing file {0}.".format(series_filename)
rgb = ndimg.imread(os.path.join(path, series_filename))
sample_raw, display_img = type_processor.next(rgb)
self._sample_exp_filter = \
((1. - self._filter_exp) * self._sample_exp_filter) + \
(self._filter_exp * sample_raw)
# Put new samples on queues.
bound_queue_push(sample_raw,
self._history_raw, self.HISTORY_FRAMES)
bound_queue_push(self._sample_exp_filter,
self._history_filtered, self.HISTORY_FRAMES)
# Update UI.
self._max_sample = max(self._max_sample,
self._sample_exp_filter * 1.1)
ybound = (0, self._max_sample)
plot_img = self.plot_history(self._history_raw,
self._history_filtered,
ybound)
self._ui.set_main_video_frame(display_img)
self._ui.set_plot(plot_img)
# Space history frames evenly over interval.
if 0 == (process_count % history_n):
self._ui.push_history_frame(display_img)
process_count = process_count + 1
# Show UI.
cv2.imshow("Phone Display", self._ui.ui_image)
key = cv2.waitKey(1)
# Catch escape or 'q' or 'Q':
if QUIT_KEY_CODES.count(key) > 0:
quit_flag = True
break
# Catch spacebar, 'p' or 'P':
if PAUSE_KEY_CODES.count(key) > 0:
pause()
# Cleanup GUI on complete.
if not quit_flag:
cv2.waitKey(-1)
cv2.destroyAllWindows()
@staticmethod
def plot_history(raw, filtered, ybound):
""" Make plot of raw and history and return as image. """
p = pylab.subplot('111')
p.clear()
p.figure.set_size_inches(4, 3);
p.plot(raw, '.r')
p.plot(filtered, '-b')
p.axes.set_ybound(ybound)
p.figure.canvas.draw()
buf = np.fromstring(p.figure.canvas.tostring_rgb(), dtype='uint8')
h, w = p.figure.canvas.get_width_height()
buf.shape = (w, h, 3)
return buf
class PhoneDisplay(object):
# Dictionary of UI resources.
RESOURCES = {
# Name of the phone UI image.
"UI_BASE": "ui_base.jpg"
}
# Coordinates for the phone image display area.
UI_LAYOUT = {
"MARGIN": 15,
"DISPLAY_RECT": { "UpperLeft": (255, 59), "LowerRight": (1106, 596) },
"MAIN_VIDEO_SIZE": (360, 480),
"PLOT_SIZE": (240, 320),
"HISTORY_FRAME_COUNT": 5,
"CLEAR_COLOR": np.array([60, 85, 45])
}
def __init__(self):
""" Setup phone UI. """
# Load UI base resource and set slice of display area.
self._ui_base = ndimg.imread(self.RESOURCES["UI_BASE"])
up_lt = self.UI_LAYOUT["DISPLAY_RECT"]["UpperLeft"]
lw_rt = self.UI_LAYOUT["DISPLAY_RECT"]["LowerRight"]
self._ui_display_area = self._ui_base[up_lt[1]:lw_rt[1] + 1,
up_lt[0]:lw_rt[0] + 1]
self._ui_display_area[:,:] = self.UI_LAYOUT["CLEAR_COLOR"]
self._ui_display_area_size = self._ui_display_area.shape[:2]
self._ui_main_video_size = self.UI_LAYOUT["MAIN_VIDEO_SIZE"]
self._plot_size = self.UI_LAYOUT["PLOT_SIZE"]
margin = self.UI_LAYOUT["MARGIN"]
# Get main video frame area.
vid_frm_x1 = self._ui_display_area_size[1] - margin
vid_frm_x0 = vid_frm_x1 - self._ui_main_video_size[1]
vid_frm_y0 = margin
vid_frm_y1 = vid_frm_y0 + self._ui_main_video_size[0]
self._ui_main_video_frame = self._ui_display_area[vid_frm_y0:
vid_frm_y1,
vid_frm_x0:
vid_frm_x1]
# Get plot area.
plt_frm_x0 = margin
plt_frm_x1 = plt_frm_x0 + self._plot_size[1]
plt_frm_y0 = margin
plt_frm_y1 = plt_frm_y0 + self._plot_size[0]
self._ui_plot_frame = self._ui_display_area[plt_frm_y0: plt_frm_y1,
plt_frm_x0: plt_frm_x1]
# Compute history frame areas.
his_frm_count = self.UI_LAYOUT["HISTORY_FRAME_COUNT"]
his_frm_wid = int((self._ui_display_area_size[1] -
((his_frm_count + 1) * margin)) / his_frm_count)
his_frm_ht = int((3./4.) * his_frm_wid)
self._ui_history_frame_size = (his_frm_ht, his_frm_wid)
his_frm_y0 = (2 * margin) + self._ui_main_video_size[0]
his_frm_x0_fn = lambda n: margin + ((margin + his_frm_wid) * n)
his_frm_x1_fn = lambda n: (margin + his_frm_wid) * (n + 1)
self._ui_history_frames = map(
lambda n: self._ui_display_area[
his_frm_y0: his_frm_y0 + his_frm_ht,
his_frm_x0_fn(n):his_frm_x1_fn(n)],
range(self.UI_LAYOUT["HISTORY_FRAME_COUNT"]))
def clear_display_area(self, color=np.array([0, 0, 0])):
""" Clear UI base display area to given color. """
self._ui_display_area[:,:] = color
def set_main_video_frame(self, frame):
""" Set the main video frame in the UI layout. """
h, w = self._ui_main_video_size
img = np.array(ImageOps.fit(PIL.Image.fromarray(frame), (w, h)))
self._ui_main_video_frame[:,:] = img
def push_history_frame(self, frame):
""" Push a frame to the top of the history images. """
# Shift back.
for n in range(len(self._ui_history_frames) - 1):
self._ui_history_frames[n][:,:] = self._ui_history_frames[n+1][:,:]
# Update.
h, w = self._ui_history_frame_size
img = np.array(ImageOps.fit(PIL.Image.fromarray(frame), (w, h)))
self._ui_history_frames[-1][:,:] = img
def set_plot(self, plot):
""" Set the plot image in the UI layout. """
h, w = self._plot_size
img = np.array(ImageOps.fit(PIL.Image.fromarray(plot), (w, h)))
self._ui_plot_frame[:,:] = img
def clear(self):
""" Reset the UI. """
ui_base = ndimg.imread(self.RESOURCES["UI_BASE"])
self._ui_base[:,:,:] = ui_base
self._ui_display_area[:,:] = self.UI_LAYOUT["CLEAR_COLOR"]
def get_ui_image(self):
return self._ui_base
def get_history_frame_count(self):
return len(self._ui_history_frames)
ui_image = property(get_ui_image, doc="The main display image.")
history_frame_count = property(get_history_frame_count,
doc="Count of history frames.")
def main():
# Parse arguments for
# PhoneDemo.run_sequence(type, filepath, seq_range, filter_exp):
parser = argparse.ArgumentParser(description='Run a UI demo of ' +
'image-based traffic ' +
'analysis algorithms.')
parser.add_argument('SEQUENCE_TYPE', type=str, choices=('D', 'N'),
help='day or night image type')
parser.add_argument('SEQUENCE_IMAGE_PATH', type=str,
help='path to an image within sequence')
parser.add_argument('-r', '--range', default=None, nargs=2, type=int,
help='range of frames to process as in \'-r 0 100\'')
parser.add_argument('-e', '--filter_exp', default=None, type=float,
help='exponential filter strength')
args = parser.parse_args(sys.argv[1:])
demo = PhoneDemo()
demo.run_sequence(PhoneDemo.TYPE_DAY if 'D' == args.SEQUENCE_TYPE
else PhoneDemo.TYPE_NIGHT,
args.SEQUENCE_IMAGE_PATH, args.range, args.filter_exp)
if __name__ == "__main__":
main()
| blr246/traffic | phone_display_demo.py | Python | mit | 16,853 | 0.002492 |
from ..remote import RemoteModel
class NetworkNetworkExplorerSummariesSummaryGridRemote(RemoteModel):
"""
| ``id:`` none
| ``attribute type:`` string
| ``DeviceID:`` none
| ``attribute type:`` string
| ``DeviceIPNumeric:`` none
| ``attribute type:`` string
| ``DeviceIPDotted:`` none
| ``attribute type:`` string
| ``VirtualNetworkID:`` none
| ``attribute type:`` string
| ``Network:`` none
| ``attribute type:`` string
| ``DeviceName:`` none
| ``attribute type:`` string
| ``DeviceType:`` none
| ``attribute type:`` string
| ``ifIndex:`` none
| ``attribute type:`` string
| ``ifName:`` none
| ``attribute type:`` string
| ``VirtualNetworkMemberName:`` none
| ``attribute type:`` string
| ``ifType:`` none
| ``attribute type:`` string
| ``ifIPDotted:`` none
| ``attribute type:`` string
| ``ifIPNumeric:`` none
| ``attribute type:`` string
"""
properties = ("id",
"DeviceID",
"DeviceIPNumeric",
"DeviceIPDotted",
"VirtualNetworkID",
"Network",
"DeviceName",
"DeviceType",
"ifIndex",
"ifName",
"VirtualNetworkMemberName",
"ifType",
"ifIPDotted",
"ifIPNumeric",
)
| infobloxopen/infoblox-netmri | infoblox_netmri/api/remote/models/network_network_explorer_summaries_summary_grid_remote.py | Python | apache-2.0 | 1,486 | 0 |
from setuptools import setup
# Really only required so setup.cfg can pick up __version__
setup(
name="bruges",
) | agile-geoscience/bruges | setup.py | Python | apache-2.0 | 117 | 0.008547 |
#-*- coding: utf-8 -*-
"""OAuth 2.0 Token Generation"""
from base64 import b64encode
from django.http import HttpResponse
from django.contrib.auth import authenticate
from django.views.decorators.csrf import csrf_exempt
try: import simplejson as json
except ImportError: import json
from .exceptions import OAuth2Exception
from .consts import ACCESS_TOKEN_EXPIRATION, REFRESH_TOKEN_LENGTH, ACCESS_TOKEN_LENGTH
from .consts import AUTHENTICATION_METHOD, MAC, BEARER, MAC_KEY_LENGTH
from .consts import REFRESHABLE
from .lib.uri import normalize
from .models import Client, AccessRange, Code, AccessToken, TimestampGenerator
from .models import KeyGenerator
class AccessTokenException(OAuth2Exception):
"""Access Token exception base class."""
pass
class UnvalidatedRequest(OAuth2Exception):
"""The method requested requires a validated request to continue."""
pass
class InvalidRequest(AccessTokenException):
"""The request is missing a required parameter, includes an
unsupported parameter or parameter value, repeats a
parameter, includes multiple credentials, utilizes more
than one mechanism for authenticating the client, or is
otherwise malformed."""
error = 'invalid_request'
class InvalidClient(AccessTokenException):
"""Client authentication failed (e.g. unknown client, no
client credentials included, multiple client credentials
included, or unsupported credentials type)."""
error = 'invalid_client'
class UnauthorizedClient(AccessTokenException):
"""The client is not authorized to request an authorization
code using this method."""
error = 'unauthorized_client'
class InvalidGrant(AccessTokenException):
"""The provided authorization grant is invalid, expired,
revoked, does not match the redirection URI used in the
authorization request, or was issued to another client."""
error = 'invalid_grant'
class UnsupportedGrantType(AccessTokenException):
"""The authorization grant type is not supported by the
authorization server."""
error = 'unsupported_grant_type'
class InvalidScope(AccessTokenException):
"""The requested scope is invalid, unknown, malformed, or
exceeds the scope granted by the resource owner."""
error = 'invalid_scope'
@csrf_exempt
def handler(request):
"""Token access handler. Conveneince function that wraps the Handler()
callable.
**Args:**
* *request:* Django HttpRequest object.
"""
return TokenGenerator()(request)
class TokenGenerator(object):
"""Token access handler. Validates authorization codes, refresh tokens,
username/password pairs, and generates a JSON formatted authorization code.
**Args:**
* *request:* Django HttpRequest object.
**Kwargs:**
* *scope:* An iterable of oauth2app.models.AccessRange objects representing
the scope the token generator will grant. *Default None*
* *authentication_method:* Type of token to generate. Possible
values are: oauth2app.consts.MAC and oauth2app.consts.BEARER
*Default oauth2app.consts.BEARER*
* *refreshable:* Boolean value indicating whether issued tokens are
refreshable. *Default True*
"""
valid = False
code = None
client = None
access_token = None
user = None
error = None
request = None
def __init__(
self,
scope=None,
authentication_method=AUTHENTICATION_METHOD,
refreshable=REFRESHABLE):
self.refreshable = refreshable
if authentication_method not in [BEARER, MAC]:
raise OAuth2Exception("Possible values for authentication_method"
" are oauth2app.consts.MAC and oauth2app.consts.BEARER")
self.authentication_method = authentication_method
if scope is None:
self.authorized_scope = None
elif isinstance(scope, AccessRange):
self.authorized_scope = set([scope.key])
else:
self.authorized_scope = set([x.key for x in scope])
@csrf_exempt
def __call__(self, request):
"""Django view that handles the token endpoint. Returns a JSON formatted
authorization code.
**Args:**
* *request:* Django HttpRequest object.
"""
self.grant_type = request.REQUEST.get('grant_type')
self.client_id = request.REQUEST.get('client_id')
self.client_secret = request.POST.get('client_secret')
self.scope = request.REQUEST.get('scope')
if self.scope is not None:
self.scope = set(self.scope.split())
# authorization_code, see 4.1.3. Access Token Request
self.code_key = request.REQUEST.get('code')
self.redirect_uri = request.REQUEST.get('redirect_uri')
# refresh_token, see 6. Refreshing an Access Token
self.refresh_token = request.REQUEST.get('refresh_token')
# password, see 4.3.2. Access Token Request
self.email = request.REQUEST.get('email')
self.username = request.REQUEST.get('username')
self.password = request.REQUEST.get('password')
# Optional json callback
self.callback = request.REQUEST.get('callback')
self.request = request
try:
self.validate()
except AccessTokenException:
return self.error_response()
return self.grant_response()
def validate(self):
"""Validate the request. Raises an AccessTokenException if the
request fails authorization.
*Returns None*"""
try:
self._validate()
except AccessTokenException as e:
self.error = e
raise e
self.valid = True
def _validate(self):
"""Validate the request."""
# Check response type
if self.grant_type is None:
raise InvalidRequest('No grant_type provided.')
if self.grant_type not in [
"authorization_code",
"refresh_token",
"password",
"client_credentials"]:
raise UnsupportedGrantType('No grant type: %s' % self.grant_type)
if self.client_id is None:
raise InvalidRequest('No client_id')
try:
self.client = Client.objects.get(key=self.client_id)
except Client.DoesNotExist:
raise InvalidClient("client_id %s doesn't exist" % self.client_id)
# Scope
if self.scope is not None:
access_ranges = AccessRange.objects.filter(key__in=self.scope)
access_ranges = set(access_ranges.values_list('key', flat=True))
difference = access_ranges.symmetric_difference(self.scope)
if len(difference) != 0:
raise InvalidScope("Following access ranges doesn't exist: "
"%s" % ', '.join(difference))
if self.grant_type == "authorization_code":
self._validate_authorization_code()
elif self.grant_type == "refresh_token":
self._validate_refresh_token()
elif self.grant_type == "password":
self._validate_password()
elif self.grant_type == "client_credentials":
self._validate_client_credentials()
else:
raise UnsupportedGrantType('Unable to validate grant type.')
def _validate_access_credentials(self):
"""Validate the request's access credentials."""
if self.client_secret is None and "HTTP_AUTHORIZATION" in self.request.META:
authorization = self.request.META["HTTP_AUTHORIZATION"]
auth_type, auth_value = authorization.split()[0:2]
if auth_type.lower() == "basic":
credentials = "%s:%s" % (self.client.key, self.client.secret)
if auth_value != b64encode(credentials):
raise InvalidClient('Client authentication failed.')
else:
raise InvalidClient('Client authentication failed.')
elif self.client_secret != self.client.secret:
raise InvalidClient('Client authentication failed.')
def _validate_client_credentials(self):
"""Validate a client_credentials request."""
self._validate_access_credentials()
def _validate_authorization_code(self):
"""Validate an authorization_code request."""
if self.code_key is None:
raise InvalidRequest('No code_key provided')
self._validate_access_credentials()
try:
self.code = Code.objects.get(key=self.code_key)
except Code.DoesNotExist:
raise InvalidRequest('No such code: %s' % self.code_key)
now = TimestampGenerator()()
if self.code.expire < now:
raise InvalidGrant("Provided code is expired")
self.scope = set([x.key for x in self.code.scope.all()])
if self.redirect_uri is None:
raise InvalidRequest('No redirect_uri')
if normalize(self.redirect_uri) != normalize(self.code.redirect_uri):
raise InvalidRequest("redirect_uri doesn't match")
def _validate_password(self):
"""Validate a password request."""
if self.username is None and self.email is None:
raise InvalidRequest('No username')
if self.password is None:
raise InvalidRequest('No password')
if self.scope is not None:
access_ranges = AccessRange.objects.filter(key__in=self.scope)
access_ranges = set(access_ranges.values_list('key', flat=True))
difference = access_ranges.symmetric_difference(self.scope)
if len(difference) != 0:
raise InvalidScope("""Following access ranges do not
exist: %s""" % ', '.join(difference))
if self.authorized_scope is not None:
new_scope = self.scope - self.authorized_scope
if len(new_scope) > 0:
raise InvalidScope(
"Invalid scope request: %s" % ', '.join(new_scope))
if "HTTP_AUTHORIZATION" in self.request.META:
authorization = self.request.META["HTTP_AUTHORIZATION"]
auth_type, auth_value = authorization.split()[0:2]
if auth_type.lower() == "basic":
credentials = "%s:%s" % (self.client.key, self.client.secret)
if auth_value != b64encode(credentials):
raise InvalidClient('Client authentication failed.')
else:
raise InvalidClient('Client authentication failed.')
else:
raise InvalidClient('Client authentication failed.')
if self.username is not None:
user = authenticate(username=self.username, password=self.password)
else:
user = authenticate(email=self.email, password=self.password)
if user is not None:
if not user.is_active:
raise InvalidRequest('Inactive user.')
else:
raise InvalidRequest('User authentication failed.')
self.user = user
def _validate_refresh_token(self):
"""Validate a refresh token request."""
if self.refresh_token is None:
raise InvalidRequest('No refresh_token')
try:
self.access_token = AccessToken.objects.get(
refresh_token=self.refresh_token)
except AccessToken.DoesNotExist:
raise InvalidRequest(
'No such refresh token: %s' % self.refresh_token)
self._validate_access_credentials()
if not self.access_token.refreshable:
raise InvalidGrant("Access token is not refreshable.")
if self.scope is not None:
access_ranges = set([x.key for x in self.access_token.scope.all()])
new_scope = self.scope - access_ranges
if len(new_scope) > 0:
raise InvalidScope("Refresh request requested scopes beyond"
"initial grant: %s" % new_scope)
def error_response(self):
"""In the event of an error, return a Django HttpResponse
with the appropriate JSON encoded error parameters.
*Returns HttpResponse*"""
if self.error is not None:
e = self.error
else:
e = InvalidRequest("Access Denied.")
data = {'error': e.error, 'error_description': u'%s' % e.message}
json_data = json.dumps(data)
if self.callback is not None:
json_data = "%s(%s);" % (self.callback, json_data)
return HttpResponse(
content=json_data,
content_type='application/json')
else:
response = HttpResponse(
content=json_data,
content_type='application/json')
if isinstance(self.error, InvalidClient):
response.status_code = 401
else:
response.status_code = 400
return response
def grant_response(self):
"""Returns a JSON formatted authorization code."""
if not self.valid:
raise UnvalidatedRequest("This request is invalid or has not been"
" validated.")
if self.grant_type == "authorization_code":
access_token = self._get_authorization_code_token()
elif self.grant_type == "refresh_token":
access_token = self._get_refresh_token()
elif self.grant_type == "password":
access_token = self._get_password_token()
elif self.grant_type == "client_credentials":
access_token = self._get_client_credentials_token()
data = {
'access_token': access_token.token,
'expires_in': ACCESS_TOKEN_EXPIRATION}
if self.authentication_method == MAC:
data["token_type"] = "mac"
data["mac_key"] = access_token.mac_key
data["mac_algorithm"] = "hmac-sha-256"
elif self.authentication_method == BEARER:
data["token_type"] = "bearer"
if access_token.refreshable:
data['refresh_token'] = access_token.refresh_token
if self.scope is not None:
data['scope'] = ' '.join(self.scope)
json_data = json.dumps(data)
if self.callback is not None:
json_data = "%s(%s);" % (self.callback, json_data)
response = HttpResponse(
content=json_data,
content_type='application/json')
response['Cache-Control'] = 'no-store'
return response
def _get_authorization_code_token(self):
"""Generate an access token after authorization_code authorization."""
access_token = AccessToken.objects.create(
user=self.code.user,
client=self.client,
refreshable=self.refreshable)
if self.authentication_method == MAC:
access_token.mac_key = KeyGenerator(MAC_KEY_LENGTH)()
access_ranges = AccessRange.objects.filter(key__in=self.scope) if self.scope else []
access_token.scope = access_ranges
access_token.save()
self.code.delete()
return access_token
def _get_password_token(self):
"""Generate an access token after password authorization."""
access_token = AccessToken.objects.create(
user=self.user,
client=self.client,
refreshable=self.refreshable)
if self.authentication_method == MAC:
access_token.mac_key = KeyGenerator(MAC_KEY_LENGTH)()
access_ranges = AccessRange.objects.filter(key__in=self.scope) if self.scope else []
access_token.scope = access_ranges
access_token.save()
return access_token
def _get_refresh_token(self):
"""Generate an access token after refresh authorization."""
self.access_token.token = KeyGenerator(ACCESS_TOKEN_LENGTH)()
self.access_token.refresh_token = KeyGenerator(REFRESH_TOKEN_LENGTH)()
self.access_token.expire = TimestampGenerator(ACCESS_TOKEN_EXPIRATION)()
access_ranges = AccessRange.objects.filter(key__in=self.scope) if self.scope else []
self.access_token.scope = access_ranges
self.access_token.save()
return self.access_token
def _get_client_credentials_token(self):
"""Generate an access token after client_credentials authorization."""
access_token = AccessToken.objects.create(
user=self.client.user,
client=self.client,
refreshable=self.refreshable)
if self.authentication_method == MAC:
access_token.mac_key = KeyGenerator(MAC_KEY_LENGTH)()
access_ranges = AccessRange.objects.filter(key__in=self.scope) if self.scope else []
self.access_token.scope = access_ranges
self.access_token.save()
return self.access_token
| MaplePlan/djwp | oauth2app/token.py | Python | lgpl-3.0 | 16,845 | 0.00089 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Softplus bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops.distributions import bijector
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util import deprecation
__all__ = [
"Softplus",
]
class Softplus(bijector.Bijector):
"""Bijector which computes `Y = g(X) = Log[1 + exp(X)]`.
The softplus `Bijector` has the following two useful properties:
* The domain is the positive real numbers
* `softplus(x) approx x`, for large `x`, so it does not overflow as easily as
the `Exp` `Bijector`.
The optional nonzero `hinge_softness` parameter changes the transition at
zero. With `hinge_softness = c`, the bijector is:
```f_c(x) := c * g(x / c) = c * Log[1 + exp(x / c)].```
For large `x >> 1`, `c * Log[1 + exp(x / c)] approx c * Log[exp(x / c)] = x`,
so the behavior for large `x` is the same as the standard softplus.
As `c > 0` approaches 0 from the right, `f_c(x)` becomes less and less soft,
approaching `max(0, x)`.
* `c = 1` is the default.
* `c > 0` but small means `f(x) approx ReLu(x) = max(0, x)`.
* `c < 0` flips sign and reflects around the `y-axis`: `f_{-c}(x) = -f_c(-x)`.
* `c = 0` results in a non-bijective transformation and triggers an exception.
Example Use:
```python
# Create the Y=g(X)=softplus(X) transform which works only on Tensors with 1
# batch ndim and 2 event ndims (i.e., vector of matrices).
softplus = Softplus()
x = [[[1., 2],
[3, 4]],
[[5, 6],
[7, 8]]]
log(1 + exp(x)) == softplus.forward(x)
log(exp(x) - 1) == softplus.inverse(x)
```
Note: log(.) and exp(.) are applied element-wise but the Jacobian is a
reduction over the event space.
"""
@distribution_util.AppendDocstring(
kwargs_dict={
"hinge_softness": (
"Nonzero floating point `Tensor`. Controls the softness of what "
"would otherwise be a kink at the origin. Default is 1.0")})
@deprecation.deprecated(
"2018-10-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.contrib.distributions`.",
warn_once=True)
def __init__(self,
hinge_softness=None,
validate_args=False,
name="softplus"):
with ops.name_scope(name, values=[hinge_softness]):
if hinge_softness is not None:
self._hinge_softness = ops.convert_to_tensor(
hinge_softness, name="hinge_softness")
else:
self._hinge_softness = None
if validate_args:
nonzero_check = check_ops.assert_none_equal(
ops.convert_to_tensor(
0, dtype=self.hinge_softness.dtype),
self.hinge_softness,
message="hinge_softness must be non-zero")
self._hinge_softness = control_flow_ops.with_dependencies(
[nonzero_check], self.hinge_softness)
super(Softplus, self).__init__(
forward_min_event_ndims=0,
validate_args=validate_args,
name=name)
def _forward(self, x):
if self.hinge_softness is None:
return nn_ops.softplus(x)
hinge_softness = math_ops.cast(self.hinge_softness, x.dtype)
return hinge_softness * nn_ops.softplus(x / hinge_softness)
def _inverse(self, y):
if self.hinge_softness is None:
return distribution_util.softplus_inverse(y)
hinge_softness = math_ops.cast(self.hinge_softness, y.dtype)
return hinge_softness * distribution_util.softplus_inverse(
y / hinge_softness)
def _inverse_log_det_jacobian(self, y):
# Could also do:
# ildj = math_ops.reduce_sum(y - distribution_util.softplus_inverse(y),
# axis=event_dims)
# but the following is more numerically stable. Ie,
# Y = Log[1 + exp{X}] ==> X = Log[exp{Y} - 1]
# ==> dX/dY = exp{Y} / (exp{Y} - 1)
# = 1 / (1 - exp{-Y}),
# which is the most stable for large Y > 0. For small Y, we use
# 1 - exp{-Y} approx Y.
if self.hinge_softness is not None:
y /= math_ops.cast(self.hinge_softness, y.dtype)
return -math_ops.log(-math_ops.expm1(-y))
def _forward_log_det_jacobian(self, x):
if self.hinge_softness is not None:
x /= math_ops.cast(self.hinge_softness, x.dtype)
return -nn_ops.softplus(-x)
@property
def hinge_softness(self):
return self._hinge_softness
| ghchinoy/tensorflow | tensorflow/contrib/distributions/python/ops/bijectors/softplus.py | Python | apache-2.0 | 5,563 | 0.003775 |
# snippet-comment:[These are tags for the AWS doc team's sample catalog. Do not remove.]
# snippet-sourcedescription:[Lambda rotation for AWS Secrets Manager - RDS PostgreSQL with separate Master secret]
# snippet-service:[secretsmanager]
# snippet-keyword:[rotation function]
# snippet-keyword:[python]
# snippet-keyword:[RDS PostgreSQL]
# snippet-keyword:[AWS Lambda]
# snippet-keyword:[AWS Secrets Manager]
# snippet-keyword:[Code Sample]
# snippet-sourcetype:[full-example]
# snippet-sourceauthor:[AWS]
# snippet-sourcedate:[2018-08-22]
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
import boto3
import json
import logging
import os
import pg
import pgdb
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def lambda_handler(event, context):
"""Secrets Manager RDS PostgreSQL Handler
This handler uses the master-user rotation scheme to rotate an RDS PostgreSQL user credential. During the first rotation, this
scheme logs into the database as the master user, creates a new user (appending _clone to the username), and grants the
new user all of the permissions from the user being rotated. Once the secret is in the state, every subsequent rotation
simply creates a new secret with the AWSPREVIOUS user credentials, adds any missing permissions that are in the current
secret, changes that user's password, and then marks the latest secret as AWSCURRENT.
The Secret SecretString is expected to be a JSON string with the following format:
{
'engine': <required: must be set to 'postgres'>,
'host': <required: instance host name>,
'username': <required: username>,
'password': <required: password>,
'dbname': <optional: database name, default to 'postgres'>,
'port': <optional: if not specified, default port 5432 will be used>,
'masterarn': <required: the arn of the master secret which will be used to create users/change passwords>
}
Args:
event (dict): Lambda dictionary of event parameters. These keys must include the following:
- SecretId: The secret ARN or identifier
- ClientRequestToken: The ClientRequestToken of the secret version
- Step: The rotation step (one of createSecret, setSecret, testSecret, or finishSecret)
context (LambdaContext): The Lambda runtime information
Raises:
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
ValueError: If the secret is not properly configured for rotation
KeyError: If the secret json does not contain the expected keys
"""
arn = event['SecretId']
token = event['ClientRequestToken']
step = event['Step']
# Setup the client
service_client = boto3.client('secretsmanager', endpoint_url=os.environ['SECRETS_MANAGER_ENDPOINT'])
# Make sure the version is staged correctly
metadata = service_client.describe_secret(SecretId=arn)
if "RotationEnabled" in metadata and not metadata['RotationEnabled']:
logger.error("Secret %s is not enabled for rotation" % arn)
raise ValueError("Secret %s is not enabled for rotation" % arn)
versions = metadata['VersionIdsToStages']
if token not in versions:
logger.error("Secret version %s has no stage for rotation of secret %s." % (token, arn))
raise ValueError("Secret version %s has no stage for rotation of secret %s." % (token, arn))
if "AWSCURRENT" in versions[token]:
logger.info("Secret version %s already set as AWSCURRENT for secret %s." % (token, arn))
return
elif "AWSPENDING" not in versions[token]:
logger.error("Secret version %s not set as AWSPENDING for rotation of secret %s." % (token, arn))
raise ValueError("Secret version %s not set as AWSPENDING for rotation of secret %s." % (token, arn))
# Call the appropriate step
if step == "createSecret":
create_secret(service_client, arn, token)
elif step == "setSecret":
set_secret(service_client, arn, token)
elif step == "testSecret":
test_secret(service_client, arn, token)
elif step == "finishSecret":
finish_secret(service_client, arn, token)
else:
logger.error("lambda_handler: Invalid step parameter %s for secret %s" % (step, arn))
raise ValueError("Invalid step parameter %s for secret %s" % (step, arn))
def create_secret(service_client, arn, token):
"""Generate a new secret
This method first checks for the existence of a secret for the passed in token. If one does not exist, it will generate a
new secret and put it with the passed in token.
Args:
service_client (client): The secrets manager service client
arn (string): The secret ARN or other identifier
token (string): The ClientRequestToken associated with the secret version
Raises:
ValueError: If the current secret is not valid JSON
KeyError: If the secret json does not contain the expected keys
"""
# Make sure the current secret exists
current_dict = get_secret_dict(service_client, arn, "AWSCURRENT")
# Now try to get the secret version, if that fails, put a new secret
try:
get_secret_dict(service_client, arn, "AWSPENDING", token)
logger.info("createSecret: Successfully retrieved secret for %s." % arn)
except service_client.exceptions.ResourceNotFoundException:
# Get the alternate username swapping between the original user and the user with _clone appended to it
current_dict['username'] = get_alt_username(current_dict['username'])
# Generate a random password
passwd = service_client.get_random_password(ExcludeCharacters='/@"\'\\')
current_dict['password'] = passwd['RandomPassword']
# Put the secret
service_client.put_secret_value(SecretId=arn, ClientRequestToken=token, SecretString=json.dumps(current_dict), VersionStages=['AWSPENDING'])
logger.info("createSecret: Successfully put secret for ARN %s and version %s." % (arn, token))
def set_secret(service_client, arn, token):
"""Set the pending secret in the database
This method tries to login to the database with the AWSPENDING secret and returns on success. If that fails, it
tries to login with the master credentials from the masterarn in the current secret. If this succeeds, it adds all
grants for AWSCURRENT user to the AWSPENDING user, creating the user and/or setting the password in the process.
Else, it throws a ValueError.
Args:
service_client (client): The secrets manager service client
arn (string): The secret ARN or other identifier
token (string): The ClientRequestToken associated with the secret version
Raises:
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
ValueError: If the secret is not valid JSON or master credentials could not be used to login to DB
KeyError: If the secret json does not contain the expected keys
"""
# First try to login with the pending secret, if it succeeds, return
pending_dict = get_secret_dict(service_client, arn, "AWSPENDING", token)
conn = get_connection(pending_dict)
if conn:
conn.close()
logger.info("setSecret: AWSPENDING secret is already set as password in PostgreSQL DB for secret arn %s." % arn)
return
# Before we do anything with the secret, make sure the AWSCURRENT secret is valid by logging in to the db
current_dict = get_secret_dict(service_client, arn, "AWSCURRENT")
conn = get_connection(current_dict)
if not conn:
logger.error("setSecret: Unable to log into database using current credentials for secret %s" % arn)
raise ValueError("Unable to log into database using current credentials for secret %s" % arn)
conn.close()
# Now get the master arn from the current secret
master_arn = current_dict['masterarn']
master_dict = get_secret_dict(service_client, master_arn, "AWSCURRENT")
if current_dict['host'] != master_dict['host']:
logger.warn("setSecret: Master database host %s is not the same host as current %s" % (master_dict['host'], current_dict['host']))
# Now log into the database with the master credentials
conn = get_connection(master_dict)
if not conn:
logger.error("setSecret: Unable to log into database using credentials in master secret %s" % master_arn)
raise ValueError("Unable to log into database using credentials in master secret %s" % master_arn)
# Now set the password to the pending password
try:
with conn.cursor() as cur:
# Check if the user exists, if not create it and grant it all permissions from the current role
# If the user exists, just update the password
cur.execute("SELECT 1 FROM pg_roles where rolname = %s", (pending_dict['username'],))
if len(cur.fetchall()) == 0:
create_role = "CREATE ROLE %s" % pending_dict['username']
cur.execute(create_role + " WITH LOGIN PASSWORD %s", (pending_dict['password'],))
cur.execute("GRANT %s TO %s" % (current_dict['username'], pending_dict['username']))
else:
alter_role = "ALTER USER %s" % pending_dict['username']
cur.execute(alter_role + " WITH PASSWORD %s", (pending_dict['password'],))
conn.commit()
logger.info("setSecret: Successfully created user %s in PostgreSQL DB for secret arn %s." % (pending_dict['username'], arn))
finally:
conn.close()
def test_secret(service_client, arn, token):
"""Test the pending secret against the database
This method tries to log into the database with the secrets staged with AWSPENDING and runs
a permissions check to ensure the user has the correct permissions.
Args:
service_client (client): The secrets manager service client
arn (string): The secret ARN or other identifier
token (string): The ClientRequestToken associated with the secret version
Raises:
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
ValueError: If the secret is not valid JSON or pending credentials could not be used to login to the database
KeyError: If the secret json does not contain the expected keys
"""
# Try to login with the pending secret, if it succeeds, return
conn = get_connection(get_secret_dict(service_client, arn, "AWSPENDING", token))
if conn:
# This is where the lambda will validate the user's permissions. Uncomment/modify the below lines to
# tailor these validations to your needs
try:
with conn.cursor() as cur:
cur.execute("SELECT NOW()")
conn.commit()
finally:
conn.close()
logger.info("testSecret: Successfully signed into PostgreSQL DB with AWSPENDING secret in %s." % arn)
return
else:
logger.error("testSecret: Unable to log into database with pending secret of secret ARN %s" % arn)
raise ValueError("Unable to log into database with pending secret of secret ARN %s" % arn)
def finish_secret(service_client, arn, token):
"""Finish the rotation by marking the pending secret as current
This method moves the secret from the AWSPENDING stage to the AWSCURRENT stage.
Args:
service_client (client): The secrets manager service client
arn (string): The secret ARN or other identifier
token (string): The ClientRequestToken associated with the secret version
Raises:
ResourceNotFoundException: If the secret with the specified arn does not exist
"""
# First describe the secret to get the current version
metadata = service_client.describe_secret(SecretId=arn)
current_version = None
for version in metadata["VersionIdsToStages"]:
if "AWSCURRENT" in metadata["VersionIdsToStages"][version]:
if version == token:
# The correct version is already marked as current, return
logger.info("finishSecret: Version %s already marked as AWSCURRENT for %s" % (version, arn))
return
current_version = version
break
# Finalize by staging the secret version current
service_client.update_secret_version_stage(SecretId=arn, VersionStage="AWSCURRENT", MoveToVersionId=token, RemoveFromVersionId=current_version)
logger.info("finishSecret: Successfully set AWSCURRENT stage to version %s for secret %s." % (version, arn))
def get_connection(secret_dict):
"""Gets a connection to PostgreSQL DB from a secret dictionary
This helper function tries to connect to the database grabbing connection info
from the secret dictionary. If successful, it returns the connection, else None
Args:
secret_dict (dict): The Secret Dictionary
Returns:
Connection: The pgdb.Connection object if successful. None otherwise
Raises:
KeyError: If the secret json does not contain the expected keys
"""
# Parse and validate the secret JSON string
port = int(secret_dict['port']) if 'port' in secret_dict else 5432
dbname = secret_dict['dbname'] if 'dbname' in secret_dict else "postgres"
# Try to obtain a connection to the db
try:
conn = pgdb.connect(host=secret_dict['host'], user=secret_dict['username'], password=secret_dict['password'], database=dbname, port=port, connect_timeout=5)
return conn
except pg.InternalError:
return None
def get_secret_dict(service_client, arn, stage, token=None):
"""Gets the secret dictionary corresponding for the secret arn, stage, and token
This helper function gets credentials for the arn and stage passed in and returns the dictionary by parsing the JSON string
Args:
service_client (client): The secrets manager service client
arn (string): The secret ARN or other identifier
token (string): The ClientRequestToken associated with the secret version, or None if no validation is desired
stage (string): The stage identifying the secret version
Returns:
SecretDictionary: Secret dictionary
Raises:
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
ValueError: If the secret is not valid JSON
KeyError: If the secret json does not contain the expected keys
"""
required_fields = ['host', 'username', 'password']
# Only do VersionId validation against the stage if a token is passed in
if token:
secret = service_client.get_secret_value(SecretId=arn, VersionId=token, VersionStage=stage)
else:
secret = service_client.get_secret_value(SecretId=arn, VersionStage=stage)
plaintext = secret['SecretString']
secret_dict = json.loads(plaintext)
# Run validations against the secret
if 'engine' not in secret_dict or secret_dict['engine'] != 'postgres':
raise KeyError("Database engine must be set to 'postgres' in order to use this rotation lambda")
for field in required_fields:
if field not in secret_dict:
raise KeyError("%s key is missing from secret JSON" % field)
# Parse and return the secret JSON string
return secret_dict
def get_alt_username(current_username):
"""Gets the alternate username for the current_username passed in
This helper function gets the username for the alternate user based on the passed in current username.
Args:
current_username (client): The current username
Returns:
AlternateUsername: Alternate username
Raises:
ValueError: If the new username length would exceed the maximum allowed
"""
clone_suffix = "_clone"
if current_username.endswith(clone_suffix):
return current_username[:(len(clone_suffix) * -1)]
else:
new_username = current_username + clone_suffix
if len(new_username) > 63:
raise ValueError("Unable to clone user, username length with _clone appended would exceed 63 characters")
return new_username | awsdocs/aws-doc-sdk-examples | lambda_functions/secretsmanager/RDSPostgreSQL-Multiuser.py | Python | apache-2.0 | 16,740 | 0.004421 |
#!/usr/bin/env python
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtXml import *
import sys, os
from ui_cheatsheet import Ui_CSDialog
class CSWindow ( QDialog , Ui_CSDialog):
settings = QSettings('Mte90','LearnHotkeys')
settings.setFallbacksEnabled(False)
theme_path = "./style"
theme_folder = theme_path+'/'
hotkeys_path = "./hotkeys"
hotkeys_folder = hotkeys_path+'/'
html_cs = ""
html_style = "<html>\n<head>\n<style>\n%s\n</style>\n</head>\n<body>\n"
html_thead = "\n<table><tr style='font-weight:bold'><td>Action</td><td>HotKey</td></tr>"
html_def = ""
def __init__ ( self, parent = None ):
QDialog.__init__( self, parent, Qt.CustomizeWindowHint )
self.ui = Ui_CSDialog()
self.ui.setupUi( self )
self.ui.saveButton.clicked.connect(self.saveHTML)
self.ui.closeButton.clicked.connect(self.accept)
for root, dirs, files in os.walk(self.theme_path):
files.sort()
for name in files:
filename = os.path.join(root, name)
self.ui.themeChooser.addItem(os.path.basename(filename))
if sys.version_info < (3, 0):
if self.ui.themeChooser.findText(self.settings.value('theme').toString()) != -1:
self.ui.themeChooser.setCurrentIndex(self.ui.themeChooser.findText(self.settings.value('theme').toString()))
self.saveConfig()
else:
self.settings.setValue("theme", 'soft-grey.css')
else:
if self.ui.themeChooser.findText(self.settings.value('theme')) != -1:
self.ui.themeChooser.setCurrentIndex(self.ui.themeChooser.findText(self.settings.value('theme')))
self.saveConfig()
else:
self.settings.setValue("theme", 'soft-grey.css')
self.ui.themeChooser.currentIndexChanged.connect(self.saveConfig)
self.loadHotkeys()
self.show()
def loadHotkeys(self):
if sys.version_info < (3, 0):
if self.settings.value('file_name_default').toString() != "":
fname = self.hotkeys_folder+self.settings.value('file_name_default').toString()
else:
if self.settings.value('file_name_default') != "":
fname = self.hotkeys_folder+self.settings.value('file_name_default')
dom = QDomDocument()
error = None
fh = None
try:
fh = QFile(fname)
if not fh.open(QIODevice.ReadOnly):
print(IOError, unicode(fh.errorString()))
if not dom.setContent(fh):
print(ValueError, "could not parse XML")
except (IOError, OSError, ValueError) as e:
error = "Failed to import: {0}".format(e)
finally:
if fh is not None:
fh.close()
if error is not None:
return False, error
root = dom.documentElement()
if not root.hasAttribute('fileversion'):
QMessageBox.information(self.window(), "LearnHotkeys","The file %s is not an LearnHotkeys definition file." % self.settings.value('file_name_default').toString())
return False
self.html_def += root.attribute('software')+" - "+root.attribute('softwareversion')+" - "+root.attribute('def')+"<br>\n<a href='"+root.attribute('softwaresite')+"'>" \
+root.attribute('softwaresite')+"</a><br> CheatSheet version: "+root.attribute('fileversion')+"<br><br>"
child = root.firstChildElement('hotkey')
while not child.isNull():
self.html_cs += "\n<tr><td>%s</td><td>%s</td></tr>" % (child.firstChildElement('question').text(),child.firstChildElement('key').text())
child = child.nextSiblingElement('hotkey')
self.html_cs += "</table></body></html>"
if sys.version_info < (3, 0):
self.ui.csView.setHtml((self.html_style % self.get_file_content(self.theme_folder+self.settings.value('theme').toString()))+self.html_thead+self.html_cs)
else:
self.ui.csView.setHtml((self.html_style % self.get_file_content(self.theme_folder+self.settings.value('theme')))+self.html_thead+self.html_cs)
def saveHTML(self):
if sys.version_info < (3, 0):
filename = QFileDialog.getSaveFileName(self, 'Save HTML CheatSheet', self.settings.value('file_name_default').toString()[:-4]+'.html')
fname = open(filename, 'w')
html = (self.html_style% self.get_file_content(self.theme_folder+self.settings.value('theme').toString()))+self.html_def+self.html_thead+self.html_cs
else:
filename = QFileDialog.getSaveFileName(self, 'Save HTML CheatSheet', self.settings.value('file_name_default')[:-4]+'.html')
fname = open(filename, 'w')
html = (self.html_style% self.get_file_content(self.theme_folder+self.settings.value('theme')))+self.html_def+self.html_thead+self.html_cs
fname.write(html.toUtf8()+"\n")
fname.close()
def get_file_content(self,file):
f = open(file, 'r')
c = f.read()
f.close()
return c
def saveConfig(self):
self.settings.setValue("theme", self.ui.themeChooser.currentText())
if sys.version_info < (3, 0):
self.ui.csView.setHtml((self.html_style % self.get_file_content(self.theme_folder+self.settings.value('theme').toString()))+self.html_thead+self.html_cs)
else:
self.ui.csView.setHtml((self.html_style % self.get_file_content(self.theme_folder+self.settings.value('theme')))+self.html_thead+self.html_cs)
| Mte90/LearnHotkeys | cheatsheet.py | Python | gpl-3.0 | 5,629 | 0.007817 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the Shiboken Python Bindings Generator project.
#
# Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
#
# Contact: PySide team <contact@pyside.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# version 2.1 as published by the Free Software Foundation. Please
# review the following information to ensure the GNU Lesser General
# Public License version 2.1 requirements will be met:
# http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
# #
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
'''Test cases for virtual methods.'''
import sys
import unittest
from sample import *
import warnings
class ExtendedVirtualMethods(VirtualMethods):
def __init__(self):
VirtualMethods.__init__(self)
self.virtual_method0_called = False
def virtualMethod0(self, pt, val, cpx, b):
self.virtual_method0_called = True
return VirtualMethods.virtualMethod0(self, pt, val, cpx, b) * -1.0
def strListToStdList(self, arg):
warnings.simplefilter('error')
# returning wrong type for test purposes.
return True
def recursionOnModifiedVirtual(self, arg):
# check if recursion is caused by injected code that calls C++.
return VirtualMethods.recursionOnModifiedVirtual(self, arg) + 10
class ExtendedVirtualDaughter(VirtualDaughter):
def __init__(self, name):
VirtualDaughter.__init__(self, name)
self.grand_daughter_name_called = False
def name(self):
self.grand_daughter_name_called = True
return VirtualDaughter.name(self).prepend('Extended')
class ExtendedExtendedVirtualDaughter(ExtendedVirtualDaughter):
def __init__(self, name):
ExtendedVirtualDaughter.__init__(self, name)
self.grand_grand_daughter_name_called = False
def name(self):
self.grand_grand_daughter_name_called = True
return ExtendedVirtualDaughter.name(self).prepend('Extended')
class VirtualMethodsTest(unittest.TestCase):
'''Test case for virtual methods'''
def setUp(self):
self.prefix_from_codeinjection = Str('Pimped')
def tearDown(self):
del self.prefix_from_codeinjection
def testReimplementedVirtualMethod0(self):
'''Test Python override of a virtual method with various different parameters is correctly called from C++.'''
vm = VirtualMethods()
evm = ExtendedVirtualMethods()
pt = Point(1.1, 2.2)
val = 4
cpx = complex(3.3, 4.4)
b = True
result0 = vm.callVirtualMethod0(pt, val, cpx, b)
result1 = evm.callVirtualMethod0(pt, val, cpx, b)
self.assertEqual(result0 * -1.0, result1)
def testRecursionOnModifiedVirtual(self):
evm = ExtendedVirtualMethods()
self.assertEqual(evm.recursionOnModifiedVirtual(''), 10)
self.assertEqual(evm.callRecursionOnModifiedVirtual(''), 10)
def testReimplementedVirtualMethodInheritedFromGrandParent(self):
'''Test Python override of a virtual method inherited from a grand parent.'''
original_name = 'Foo'
evd = ExtendedVirtualDaughter(original_name)
self.assertEqual(VirtualDaughter.name(evd), original_name)
self.assertEqual(VirtualMethods.name(evd), original_name)
self.assertFalse(evd.grand_daughter_name_called)
name = evd.callName()
self.assertTrue(evd.grand_daughter_name_called)
self.assertEqual(evd.name().prepend(self.prefix_from_codeinjection), name)
def testReimplementedVirtualMethodInheritedFromGrandGrandParent(self):
'''Test Python override of a virtual method inherited from a grand grand parent.'''
original_name = 'Foo'
eevd = ExtendedExtendedVirtualDaughter(original_name)
self.assertEqual(VirtualDaughter.name(eevd), original_name)
self.assertEqual(VirtualMethods.name(eevd), original_name)
self.assertFalse(eevd.grand_daughter_name_called)
self.assertFalse(eevd.grand_grand_daughter_name_called)
name = eevd.callName()
self.assertTrue(eevd.grand_daughter_name_called)
self.assertTrue(eevd.grand_grand_daughter_name_called)
self.assertEqual(eevd.name().prepend(self.prefix_from_codeinjection), name)
class PrettyErrorMessageTest(unittest.TestCase):
def testIt(self):
obj = ExtendedVirtualMethods()
self.assertRaises(RuntimeWarning, obj.callStrListToStdList, StrList())
if __name__ == '__main__':
unittest.main()
| codewarrior0/Shiboken | tests/samplebinding/virtualmethods_test.py | Python | gpl-2.0 | 5,040 | 0.002381 |
# coding=utf-8
# Copyright 2022 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base classes and utilities for image datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import io
import os
import numpy as np
from tensor2tensor.data_generators import generator_utils
from tensor2tensor.data_generators import problem
from tensor2tensor.data_generators import text_encoder
from tensor2tensor.layers import common_layers
from tensor2tensor.layers import modalities
from tensor2tensor.utils import contrib
from tensor2tensor.utils import metrics
import tensorflow.compat.v1 as tf
def matplotlib_pyplot():
import matplotlib # pylint: disable=g-import-not-at-top
matplotlib.use("agg")
import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top
return plt
def image_to_tf_summary_value(image, tag):
"""Converts a NumPy image to a tf.Summary.Value object.
Args:
image: 3-D NumPy array.
tag: name for tf.Summary.Value for display in tensorboard.
Returns:
image_summary: A tf.Summary.Value object.
"""
curr_image = np.asarray(image, dtype=np.uint8)
height, width, n_channels = curr_image.shape
# If monochrome image, then reshape to [height, width]
if n_channels == 1:
curr_image = np.reshape(curr_image, [height, width])
s = io.BytesIO()
matplotlib_pyplot().imsave(s, curr_image, format="png")
img_sum = tf.Summary.Image(encoded_image_string=s.getvalue(),
height=height, width=width,
colorspace=n_channels)
return tf.Summary.Value(tag=tag, image=img_sum)
def convert_predictions_to_image_summaries(hook_args):
"""Optionally converts images from hooks_args to image summaries.
Args:
hook_args: DecodeHookArgs namedtuple
Returns:
summaries: list of tf.Summary values if hook_args.decode_hpara
"""
decode_hparams = hook_args.decode_hparams
if not decode_hparams.display_decoded_images:
return []
predictions = hook_args.predictions[0]
# Display ten random inputs and outputs so that tensorboard does not hang.
all_summaries = []
rand_predictions = np.random.choice(predictions, size=10)
for ind, prediction in enumerate(rand_predictions):
output_summary = image_to_tf_summary_value(
prediction["outputs"], tag="%d_output" % ind)
input_summary = image_to_tf_summary_value(
prediction["inputs"], tag="%d_input" % ind)
all_summaries.append(input_summary)
all_summaries.append(output_summary)
return all_summaries
def resize_by_area(img, size):
"""image resize function used by quite a few image problems."""
return tf.to_int64(
tf.image.resize_images(img, [size, size], tf.image.ResizeMethod.AREA))
def make_multiscale(image, resolutions,
resize_method=tf.image.ResizeMethod.BICUBIC,
num_channels=3):
"""Returns list of scaled images, one for each resolution.
Args:
image: Tensor of shape [height, height, num_channels].
resolutions: List of heights that image's height is resized to.
resize_method: tf.image.ResizeMethod.
num_channels: Number of channels in image.
Returns:
List of Tensors, one for each resolution with shape given by
[resolutions[i], resolutions[i], num_channels].
"""
scaled_images = []
for height in resolutions:
scaled_image = tf.image.resize_images(
image,
size=[height, height], # assuming that height = width
method=resize_method)
scaled_image = tf.to_int64(scaled_image)
scaled_image.set_shape([height, height, num_channels])
scaled_images.append(scaled_image)
return scaled_images
def make_multiscale_dilated(image, resolutions, num_channels=3):
"""Returns list of scaled images, one for each resolution.
Resizes by skipping every nth pixel.
Args:
image: Tensor of shape [height, height, num_channels].
resolutions: List of heights that image's height is resized to. The function
assumes VALID padding, so the original image's height must be divisible
by each resolution's height to return the exact resolution size.
num_channels: Number of channels in image.
Returns:
List of Tensors, one for each resolution with shape given by
[resolutions[i], resolutions[i], num_channels] if resolutions properly
divide the original image's height; otherwise shape height and width is up
to valid skips.
"""
image_height = common_layers.shape_list(image)[0]
scaled_images = []
for height in resolutions:
dilation_rate = image_height // height # assuming height = width
scaled_image = image[::dilation_rate, ::dilation_rate]
scaled_image = tf.to_int64(scaled_image)
scaled_image.set_shape([None, None, num_channels])
scaled_images.append(scaled_image)
return scaled_images
class ImageProblem(problem.Problem):
"""Base class for problems with images."""
@property
def num_channels(self):
"""Number of color channels."""
return 3
@property
def vocab_size(self):
"""Number of pixel values."""
return 256
def example_reading_spec(self):
data_fields = {
"image/encoded": tf.FixedLenFeature((), tf.string),
"image/format": tf.FixedLenFeature((), tf.string),
}
data_items_to_decoders = {
"inputs":
contrib.slim().tfexample_decoder.Image(
image_key="image/encoded",
format_key="image/format",
channels=self.num_channels),
}
return data_fields, data_items_to_decoders
def preprocess_example(self, example, mode, hparams):
if not self._was_reversed:
example["inputs"] = tf.image.per_image_standardization(example["inputs"])
return example
def eval_metrics(self):
eval_metrics = [
metrics.Metrics.ACC, metrics.Metrics.ACC_TOP5,
metrics.Metrics.ACC_PER_SEQ, metrics.Metrics.NEG_LOG_PERPLEXITY
]
if self._was_reversed:
eval_metrics += [metrics.Metrics.IMAGE_SUMMARY]
return eval_metrics
@property
def decode_hooks(self):
return [convert_predictions_to_image_summaries]
class Image2ClassProblem(ImageProblem):
"""Base class for image classification problems."""
@property
def is_small(self):
raise NotImplementedError()
@property
def num_classes(self):
raise NotImplementedError()
@property
def train_shards(self):
raise NotImplementedError()
@property
def dev_shards(self):
return 1
@property
def class_labels(self):
return ["ID_%d" % i for i in range(self.num_classes)]
def feature_encoders(self, data_dir):
del data_dir
return {
"inputs": text_encoder.ImageEncoder(channels=self.num_channels),
"targets": text_encoder.ClassLabelEncoder(self.class_labels)
}
def generator(self, data_dir, tmp_dir, is_training):
raise NotImplementedError()
def example_reading_spec(self):
label_key = "image/class/label"
data_fields, data_items_to_decoders = (
super(Image2ClassProblem, self).example_reading_spec())
data_fields[label_key] = tf.FixedLenFeature((1,), tf.int64)
data_items_to_decoders["targets"] = contrib.slim().tfexample_decoder.Tensor(
label_key)
return data_fields, data_items_to_decoders
def hparams(self, defaults, unused_model_hparams):
p = defaults
p.modality = {"inputs": modalities.ModalityType.IMAGE,
"targets": modalities.ModalityType.CLASS_LABEL}
p.vocab_size = {"inputs": 256,
"targets": self.num_classes}
p.batch_size_multiplier = 4 if self.is_small else 256
p.loss_multiplier = 3.0 if self.is_small else 1.0
if self._was_reversed:
p.loss_multiplier = 1.0
p.input_space_id = problem.SpaceID.IMAGE
p.target_space_id = problem.SpaceID.IMAGE_LABEL
def generate_data(self, data_dir, tmp_dir, task_id=-1):
generator_utils.generate_dataset_and_shuffle(
self.generator(data_dir, tmp_dir, True),
self.training_filepaths(data_dir, self.train_shards, shuffled=False),
self.generator(data_dir, tmp_dir, False),
self.dev_filepaths(data_dir, self.dev_shards, shuffled=False))
def encode_images_as_png(images):
"""Yield images encoded as pngs."""
if tf.executing_eagerly():
for image in images:
yield tf.image.encode_png(image).numpy()
else:
(height, width, channels) = images[0].shape
with tf.Graph().as_default():
image_t = tf.placeholder(dtype=tf.uint8, shape=(height, width, channels))
encoded_image_t = tf.image.encode_png(image_t)
with tf.Session() as sess:
for image in images:
enc_string = sess.run(encoded_image_t, feed_dict={image_t: image})
yield enc_string
def image_generator(images, labels):
"""Generator for images that takes image and labels lists and creates pngs.
Args:
images: list of images given as [width x height x channels] numpy arrays.
labels: list of ints, same length as images.
Yields:
A dictionary representing the images with the following fields:
* image/encoded: the string encoding the image as PNG,
* image/format: the string "png" representing image format,
* image/class/label: an integer representing the label,
* image/height: an integer representing the height,
* image/width: an integer representing the width.
Every field is actually a singleton list of the corresponding type.
Raises:
ValueError: if images is an empty list.
"""
if not images:
raise ValueError("Must provide some images for the generator.")
width, height, _ = images[0].shape
for (enc_image, label) in zip(encode_images_as_png(images), labels):
yield {
"image/encoded": [enc_image],
"image/format": ["png"],
"image/class/label": [int(label)],
"image/height": [height],
"image/width": [width]
}
class Image2TextProblem(ImageProblem):
"""Base class for image-to-text problems."""
@property
def is_character_level(self):
raise NotImplementedError()
@property
def vocab_problem(self):
raise NotImplementedError() # Not needed if self.is_character_level.
@property
def target_space_id(self):
raise NotImplementedError()
@property
def train_shards(self):
raise NotImplementedError()
@property
def dev_shards(self):
raise NotImplementedError()
def generator(self, data_dir, tmp_dir, is_training):
raise NotImplementedError()
def example_reading_spec(self):
label_key = "image/class/label"
data_fields, data_items_to_decoders = (
super(Image2TextProblem, self).example_reading_spec())
data_fields[label_key] = tf.VarLenFeature(tf.int64)
data_items_to_decoders["targets"] = contrib.slim().tfexample_decoder.Tensor(
label_key)
return data_fields, data_items_to_decoders
def feature_encoders(self, data_dir):
if self.is_character_level:
encoder = text_encoder.ByteTextEncoder()
else:
vocab_filename = os.path.join(
data_dir, self.vocab_problem.vocab_filename)
encoder = text_encoder.SubwordTextEncoder(vocab_filename)
input_encoder = text_encoder.ImageEncoder(channels=self.num_channels)
return {"inputs": input_encoder, "targets": encoder}
def hparams(self, defaults, unused_model_hparams):
p = defaults
p.modality = {"inputs": modalities.ModalityType.IMAGE,
"targets": modalities.ModalityType.SYMBOL}
p.vocab_size = {"inputs": 256,
"targets": self._encoders["targets"].vocab_size}
p.batch_size_multiplier = 256
p.loss_multiplier = 1.0
p.input_space_id = problem.SpaceID.IMAGE
p.target_space_id = self.target_space_id
def generate_data(self, data_dir, tmp_dir, task_id=-1):
generator_utils.generate_dataset_and_shuffle(
self.generator(data_dir, tmp_dir, True),
self.training_filepaths(data_dir, self.train_shards, shuffled=False),
self.generator(data_dir, tmp_dir, False),
self.dev_filepaths(data_dir, self.dev_shards, shuffled=False))
def image_augmentation(images, do_colors=False, crop_size=None):
"""Image augmentation: cropping, flipping, and color transforms."""
if crop_size is None:
crop_size = [299, 299]
images = tf.random_crop(images, crop_size + [3])
images = tf.image.random_flip_left_right(images)
if do_colors: # More augmentation, but might be slow.
images = tf.image.random_brightness(images, max_delta=32. / 255.)
images = tf.image.random_saturation(images, lower=0.5, upper=1.5)
images = tf.image.random_hue(images, max_delta=0.2)
images = tf.image.random_contrast(images, lower=0.5, upper=1.5)
return images
def cifar_image_augmentation(images):
"""Image augmentation suitable for CIFAR-10/100.
As described in https://arxiv.org/pdf/1608.06993v3.pdf (page 5).
Args:
images: a Tensor.
Returns:
Tensor of the same shape as images.
"""
images = tf.image.resize_image_with_crop_or_pad(images, 40, 40)
images = tf.random_crop(images, [32, 32, 3])
images = tf.image.random_flip_left_right(images)
return images
def random_shift(image, wsr=0.1, hsr=0.1):
"""Apply random horizontal and vertical shift to images.
This is the default data-augmentation strategy used on CIFAR in Glow.
Args:
image: a 3-D Tensor
wsr: Width shift range, as a float fraction of the width.
hsr: Height shift range, as a float fraction of the width.
Returns:
images: images translated by the provided wsr and hsr.
"""
height, width, _ = common_layers.shape_list(image)
width_range, height_range = wsr*width, hsr*height
height_translations = tf.random_uniform((1,), -height_range, height_range)
width_translations = tf.random_uniform((1,), -width_range, width_range)
translations = tf.concat((height_translations, width_translations), axis=0)
return contrib.image().translate(image, translations=translations)
| tensorflow/tensor2tensor | tensor2tensor/data_generators/image_utils.py | Python | apache-2.0 | 14,495 | 0.007934 |
# coding: utf-8
# DJANGO IMPORTS
from django.template.loader import render_to_string
from django.forms.widgets import FileInput as DjangoFileInput
from django.forms.widgets import ClearableFileInput as DjangoClearableFileInput
from django.forms.widgets import CheckboxInput
from django.forms.fields import FilePathField
from django.utils.translation import ugettext, ugettext_lazy
from django.utils.safestring import mark_safe
# FILEBROWSER IMPORTS
from filebrowser.base import FileObject
from filebrowser.settings import ADMIN_THUMBNAIL
class FileInput(DjangoClearableFileInput):
initial_text = ugettext_lazy('Currently')
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
template_with_initial = u'%(input)s %(preview)s'
def render(self, name, value, attrs=None):
substitutions = {
'initial_text': self.initial_text,
'input_text': self.input_text,
'clear_template': '',
'preview': '',
'clear_checkbox_label': self.clear_checkbox_label,
}
template = u'%(input)s'
substitutions['input'] = super(DjangoClearableFileInput, self).render(name, value, attrs)
if value and hasattr(value, "url"):
template = self.template_with_initial
preview_template = render_to_string('filebrowser/widgets/fileinput.html', {
'value': FileObject(value.name),
'ADMIN_THUMBNAIL': ADMIN_THUMBNAIL,
})
substitutions["preview"] = preview_template
return mark_safe(template % substitutions)
class ClearableFileInput(DjangoClearableFileInput):
"""
A FileField Widget that shows its current value if it has one.
If value is an Image, a thumbnail is shown.
"""
initial_text = ugettext_lazy('Currently')
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
template_with_initial = u'%(clear_template)s<br />%(input)s %(preview)s'
template_with_clear = u'%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>'
def render(self, name, value, attrs=None):
substitutions = {
'initial_text': self.initial_text,
'input_text': self.input_text,
'clear_template': '',
'preview': '',
'clear_checkbox_label': self.clear_checkbox_label,
}
template = u'%(input)s'
substitutions['input'] = super(DjangoClearableFileInput, self).render(name, value, attrs)
if value and hasattr(value, "url"):
template = self.template_with_initial
substitutions['initial'] = (u'<a target="_blank" href="%s">%s</a>' % (value.url, value))
if not self.is_required:
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
substitutions['clear_checkbox_name'] = checkbox_name
substitutions['clear_checkbox_id'] = checkbox_id
substitutions['clear'] = CheckboxInput().render(checkbox_name, False, attrs={'id': checkbox_id})
substitutions['clear_template'] = self.template_with_clear % substitutions
if value and hasattr(value, "url"):
preview_template = render_to_string('filebrowser/widgets/clearablefileinput.html', {
'value': FileObject(value.name),
'ADMIN_THUMBNAIL': ADMIN_THUMBNAIL,
})
substitutions["preview"] = preview_template
return mark_safe(template % substitutions)
| yaroslavprogrammer/django-filebrowser-no-grappelli | filebrowser/widgets.py | Python | bsd-3-clause | 3,673 | 0.004901 |
#!/usr/bin/env python2.7
import datetime
import monthdelta
def parse_date(date_str):
return datetime.datetime.strptime(date_str, "%Y-%m-%d")
def unparse_date(date_obj):
return date_obj.strftime("%Y-%m-%d")
class Company(object):
def __init__(self, name):
self.name = name
self.flows = []
self.verbose = False
def __str__(self):
return self.name
def add_flow(self, flow):
self.flows.append(flow)
def cash_at_date_internal(self, start_date, end_date):
cash = 0
for flow in self.flows:
delta = flow.cashflow(start_date, end_date, (end_date - start_date).days)
cash += delta
if self.verbose:
print flow.name, round(delta, 2)
return round(cash, 2)
def cash_at_date(self, start, end):
start_date = parse_date(start)
end_date = parse_date(end)
return self.cash_at_date_internal(start_date, end_date)
def cash_monthly_summary(self, start, end):
start_date = parse_date(start)
cur_date = parse_date(start)
end_date = parse_date(end)
while cur_date <= end_date:
print cur_date, self.cash_at_date_internal(start_date, cur_date)
cur_date += monthdelta.MonthDelta(1)
if self.verbose:
print
# CashFlow objects fundamentally just provide a function that says how
# much cash has been spent by that source at each time
#
# The API is that one needs to define a function .cashflow(date)
class CashFlow(object):
def __init__(self, name):
self.name = name
class FixedCost(CashFlow):
def __init__(self, name, amount):
super(FixedCost, self).__init__(name)
self.cost = -amount
def cashflow(self, start, end, days):
return self.cost
class ConstantCost(CashFlow):
def __init__(self, name, amount):
super(ConstantCost, self).__init__(name)
self.rate = -amount
def cashflow(self, start, end, days):
return self.rate * days / 365.
class PeriodicCost(CashFlow):
def __init__(self, name, amount, start, interval):
super(PeriodicCost, self).__init__(name)
self.amount = -amount
self.start = parse_date(start)
self.interval = interval
def cashflow(self, start, end, days):
cur = self.start
delta = 0
while (cur <= end):
if cur >= start:
delta += self.amount
cur += datetime.timedelta(days=self.interval)
return delta
class MonthlyCost(CashFlow):
def __init__(self, name, amount, start):
super(MonthlyCost, self).__init__(name)
self.amount = -amount
self.start = parse_date(start)
def cashflow(self, start, end, days):
cur = self.start
delta = 0
while (cur <= end):
if cur >= start:
delta += self.amount
cur += monthdelta.MonthDelta(1)
return delta
class TotalCost(CashFlow):
def __init__(self, name, *args):
self.name = name
self.flows = args
def cashflow(self, start, end, days):
return sum(cost.cashflow(start, end, days) for cost in self.flows)
class SemiMonthlyCost(TotalCost):
def __init__(self, name, amount, start1, start2 = None):
if start2 is None:
start2 = unparse_date(parse_date(start1) + datetime.timedelta(days=14))
super(SemiMonthlyCost, self).__init__(name,
MonthlyCost(name, amount, start1),
MonthlyCost(name, amount, start2)
)
class SemiMonthlyWagesNoTax(SemiMonthlyCost):
def __init__(self, name, wage, start):
super(SemiMonthlyWagesNoTax, self).__init__(name, self.compute_wage(wage), start)
def compute_wage(self, wage):
return wage / 24.
class SemiMonthlyWages(SemiMonthlyWagesNoTax):
def compute_wage(self, wage):
fica_tax = min(wage, 110100) * 0.062 + wage * 0.0145
unemp_tax = 450
return (wage + fica_tax + unemp_tax) / 24.
def __init__(self, name, wage, start):
super(SemiMonthlyWages, self).__init__(name, wage, start)
class DelayedCost(CashFlow):
def __init__(self, start, base_model):
super(DelayedCost, self).__init__("Delayed")
self.base_model = base_model
self.start = parse_date(start)
def cashflow(self, start, end, days):
start = max(start, self.start)
if start > end:
return 0
time_delta = (end-start).days
return self.base_model.cashflow(start, end, time_delta)
class BiweeklyWagesNoTax(PeriodicCost):
def __init__(self, name, wage, start):
super(BiweeklyWagesNoTax, self).__init__(name, self.compute_wage(wage), start, 14)
def compute_wage(self, wage):
# You would think this calculation would be (wage * 14 /
# 365.24), but you'd be wrong -- companies paying biweekly
# wages overpay by about 0.34% by doing the math this way
return wage / 26.
class BiweeklyWages(BiweeklyWagesNoTax):
def compute_wage(self, wage):
fica_tax = min(wage, 110100) * 0.062 + wage * 0.0145
unemp_tax = 450
# You would think this calculation would be (wage * 14 /
# 365.24), but you'd be wrong -- companies paying biweekly
# wages overpay by about 0.34% by doing the math this way
return (wage + fica_tax + unemp_tax) / 26.
def __init__(self, name, wage, start):
super(BiweeklyWages, self).__init__(name, wage, start)
if __name__ == "__main__":
# Tests
c = Company("Example Inc")
c.add_flow(FixedCost("Initial Cash", -500000))
c.add_flow(FixedCost("Incorporation", 500))
assert(c.cash_at_date("2012-01-01", "2012-03-01") == 500000 - 500)
c.add_flow(FixedCost("Incorporation", -500))
c.add_flow(ConstantCost("Office", 50000))
assert(c.cash_at_date("2012-01-01", "2012-01-02") == 500000 - round(50000*1/365., 2))
c.add_flow(ConstantCost("Office", -50000))
c.add_flow(PeriodicCost("Payroll", 4000, "2012-01-05", 14))
assert(c.cash_at_date("2012-01-01", "2012-01-02") == 500000)
assert(c.cash_at_date("2012-01-01", "2012-01-06") == 500000 - 4000)
c.add_flow(PeriodicCost("Payroll", -4000, "2012-01-05", 14))
c.add_flow(DelayedCost("2012-02-01", ConstantCost("Office", 50000)))
assert(c.cash_at_date("2012-01-01", "2012-01-05") == 500000)
assert(c.cash_at_date("2012-01-01", "2012-02-05") == 500000 - round(50000*4/365., 2))
c.add_flow(DelayedCost("2012-02-01", ConstantCost("Office", -50000)))
c.add_flow(DelayedCost("2012-02-01", FixedCost("Financing", 50000)))
assert(c.cash_at_date("2012-01-01", "2012-01-15") == 500000)
c.add_flow(DelayedCost("2012-02-01", FixedCost("Financing", -50000)))
c.add_flow(SemiMonthlyCost("Payroll", 4000, "2012-01-01"))
assert(c.cash_at_date("2012-01-01", "2012-01-01") == 500000 - 4000)
assert(c.cash_at_date("2012-01-01", "2012-01-14") == 500000 - 4000)
assert(c.cash_at_date("2012-01-01", "2012-01-15") == 500000 - 4000 * 2)
assert(c.cash_at_date("2012-01-01", "2012-01-31") == 500000 - 4000 * 2)
assert(c.cash_at_date("2012-01-01", "2012-02-01") == 500000 - 4000 * 3)
assert(c.cash_at_date("2012-01-01", "2012-02-15") == 500000 - 4000 * 4)
c.add_flow(SemiMonthlyCost("Payroll", -4000, "2012-01-01"))
c.add_flow(SemiMonthlyWages("Payroll", 4000, "2012-01-01"))
assert(c.cash_at_date("2012-01-01", "2012-02-15") == 499207.33)
c.add_flow(SemiMonthlyWages("Payroll", -4000, "2012-01-01"))
print c
c.cash_monthly_summary("2012-01-01", "2012-07-01")
| gkotian/zulip | tools/deprecated/finbot/money.py | Python | apache-2.0 | 7,737 | 0.003231 |
# Copyright (C) 2009 Jeremy S. Sanders
# Email: Jeremy Sanders <jeremy@jeremysanders.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
"""A paint engine to produce EMF exports.
Requires: PyQt-x11-gpl-4.6-snapshot-20090906.tar.gz
sip-4.9-snapshot-20090906.tar.gz
pyemf
"""
import struct
import pyemf
from .. import qtall as qt
inch_mm = 25.4
scale = 100
def isStockObject(obj):
"""Is this a stock windows object."""
return (obj & 0x80000000) != 0
class _EXTCREATEPEN(pyemf._EMR._EXTCREATEPEN):
"""Extended pen creation record with custom line style."""
typedef = [
('i','handle',0),
('i','offBmi',0),
('i','cbBmi',0),
('i','offBits',0),
('i','cbBits',0),
('i','style'),
('i','penwidth'),
('i','brushstyle'),
('i','color'),
('i','brushhatch',0),
('i','numstyleentries')
]
def __init__(self, style=pyemf.PS_SOLID, width=1, color=0,
styleentries=[]):
"""Create pen.
styleentries is a list of dash and space lengths."""
pyemf._EMR._EXTCREATEPEN.__init__(self)
self.style = style
self.penwidth = width
self.color = pyemf._normalizeColor(color)
self.brushstyle = 0x0 # solid
if style & pyemf.PS_STYLE_MASK != pyemf.PS_USERSTYLE:
styleentries = []
self.numstyleentries = len(styleentries)
if styleentries:
self.unhandleddata = struct.pack(
"i"*self.numstyleentries, *styleentries)
def hasHandle(self):
return True
class EMFPaintEngine(qt.QPaintEngine):
"""Custom EMF paint engine."""
def __init__(self, width_in, height_in, dpi=75):
qt.QPaintEngine.__init__(
self,
qt.QPaintEngine.Antialiasing |
qt.QPaintEngine.PainterPaths |
qt.QPaintEngine.PrimitiveTransform |
qt.QPaintEngine.PaintOutsidePaintEvent |
qt.QPaintEngine.PatternBrush
)
self.width = width_in
self.height = height_in
self.dpi = dpi
def begin(self, paintdevice):
self.emf = pyemf.EMF(self.width, self.height, int(self.dpi*scale))
self.pen = self.emf.GetStockObject(pyemf.BLACK_PEN)
self.pencolor = (0, 0, 0)
self.brush = self.emf.GetStockObject(pyemf.NULL_BRUSH)
self.paintdevice = paintdevice
return True
def drawLines(self, lines):
"""Draw lines to emf output."""
for line in lines:
self.emf.Polyline(
[ (int(line.x1()*scale), int(line.y1()*scale)),
(int(line.x2()*scale), int(line.y2()*scale)) ] )
def drawPolygon(self, points, mode):
"""Draw polygon on output."""
# print "Polygon"
pts = [(int(p.x()*scale), int(p.y()*scale)) for p in points]
if mode == qt.QPaintEngine.PolylineMode:
self.emf.Polyline(pts)
else:
self.emf.SetPolyFillMode({
qt.QPaintEngine.WindingMode: pyemf.WINDING,
qt.QPaintEngine.OddEvenMode: pyemf.ALTERNATE,
qt.QPaintEngine.ConvexMode: pyemf.WINDING
})
self.emf.Polygon(pts)
def drawEllipse(self, rect):
"""Draw an ellipse."""
# print "ellipse"
args = (
int(rect.left()*scale), int(rect.top()*scale),
int(rect.right()*scale), int(rect.bottom()*scale),
int(rect.left()*scale), int(rect.top()*scale),
int(rect.left()*scale), int(rect.top()*scale),
)
self.emf.Pie(*args)
self.emf.Arc(*args)
def drawPoints(self, points):
"""Draw points."""
# print "points"
for pt in points:
x, y = (pt.x()-0.5)*scale, (pt.y()-0.5)*scale
self.emf.Pie(
int(x), int(y),
int((pt.x()+0.5)*scale), int((pt.y()+0.5)*scale),
int(x), int(y), int(x), int(y) )
def drawPixmap(self, r, pixmap, sr):
"""Draw pixmap to display."""
# convert pixmap to BMP format
bytearr = qt.QByteArray()
buf = qt.QBuffer(bytearr)
buf.open(qt.QIODevice.WriteOnly)
pixmap.save(buf, "BMP")
# chop off bmp header to get DIB
bmp = bytes(buf.data())
dib = bmp[0xe:]
hdrsize, = struct.unpack('<i', bmp[0xe:0x12])
dataindex, = struct.unpack('<i', bmp[0xa:0xe])
datasize, = struct.unpack('<i', bmp[0x22:0x26])
epix = pyemf._EMR._STRETCHDIBITS()
epix.rclBounds_left = int(r.left()*scale)
epix.rclBounds_top = int(r.top()*scale)
epix.rclBounds_right = int(r.right()*scale)
epix.rclBounds_bottom = int(r.bottom()*scale)
epix.xDest = int(r.left()*scale)
epix.yDest = int(r.top()*scale)
epix.cxDest = int(r.width()*scale)
epix.cyDest = int(r.height()*scale)
epix.xSrc = int(sr.left())
epix.ySrc = int(sr.top())
epix.cxSrc = int(sr.width())
epix.cySrc = int(sr.height())
epix.dwRop = 0xcc0020 # SRCCOPY
offset = epix.format.minstructsize + 8
epix.offBmiSrc = offset
epix.cbBmiSrc = hdrsize
epix.offBitsSrc = offset + dataindex - 0xe
epix.cbBitsSrc = datasize
epix.iUsageSrc = 0x0 # DIB_RGB_COLORS
epix.unhandleddata = dib
self.emf._append(epix)
def _createPath(self, path):
"""Convert qt path to emf path"""
self.emf.BeginPath()
count = path.elementCount()
i = 0
#print "Start path"
while i < count:
e = path.elementAt(i)
if e.type == qt.QPainterPath.MoveToElement:
self.emf.MoveTo( int(e.x*scale), int(e.y*scale) )
#print "M", e.x*scale, e.y*scale
elif e.type == qt.QPainterPath.LineToElement:
self.emf.LineTo( int(e.x*scale), int(e.y*scale) )
#print "L", e.x*scale, e.y*scale
elif e.type == qt.QPainterPath.CurveToElement:
e1 = path.elementAt(i+1)
e2 = path.elementAt(i+2)
params = (
( int(e.x*scale), int(e.y*scale) ),
( int(e1.x*scale), int(e1.y*scale) ),
( int(e2.x*scale), int(e2.y*scale) ),
)
self.emf.PolyBezierTo(params)
#print "C", params
i += 2
else:
assert False
i += 1
ef = path.elementAt(0)
el = path.elementAt(count-1)
if ef.x == el.x and ef.y == el.y:
self.emf.CloseFigure()
#print "closing"
self.emf.EndPath()
def drawPath(self, path):
"""Draw a path on the output."""
# print "path"
self._createPath(path)
self.emf.StrokeAndFillPath()
def drawTextItem(self, pt, textitem):
"""Convert text to a path and draw it.
"""
# print "text", pt, textitem.text()
path = qt.QPainterPath()
path.addText(pt, textitem.font(), textitem.text())
fill = self.emf.CreateSolidBrush(self.pencolor)
self.emf.SelectObject(fill)
self._createPath(path)
self.emf.FillPath()
self.emf.SelectObject(self.brush)
self.emf.DeleteObject(fill)
def end(self):
return True
def saveFile(self, filename):
self.emf.save(filename)
def _updatePen(self, pen):
"""Update the pen to the currently selected one."""
# line style
style = {
qt.Qt.NoPen: pyemf.PS_NULL,
qt.Qt.SolidLine: pyemf.PS_SOLID,
qt.Qt.DashLine: pyemf.PS_DASH,
qt.Qt.DotLine: pyemf.PS_DOT,
qt.Qt.DashDotLine: pyemf.PS_DASHDOT,
qt.Qt.DashDotDotLine: pyemf.PS_DASHDOTDOT,
qt.Qt.CustomDashLine: pyemf.PS_USERSTYLE,
}[pen.style()]
if style != pyemf.PS_NULL:
# set cap style
style |= {
qt.Qt.FlatCap: pyemf.PS_ENDCAP_FLAT,
qt.Qt.SquareCap: pyemf.PS_ENDCAP_SQUARE,
qt.Qt.RoundCap: pyemf.PS_ENDCAP_ROUND,
}[pen.capStyle()]
# set join style
style |= {
qt.Qt.MiterJoin: pyemf.PS_JOIN_MITER,
qt.Qt.BevelJoin: pyemf.PS_JOIN_BEVEL,
qt.Qt.RoundJoin: pyemf.PS_JOIN_ROUND,
qt.Qt.SvgMiterJoin: pyemf.PS_JOIN_MITER,
}[pen.joinStyle()]
# use proper widths of lines
style |= pyemf.PS_GEOMETRIC
width = int(pen.widthF()*scale)
qc = pen.color()
color = (qc.red(), qc.green(), qc.blue())
self.pencolor = color
if pen.style() == qt.Qt.CustomDashLine:
# make an extended pen if we need a custom dash pattern
dash = [int(pen.widthF()*scale*f) for f in pen.dashPattern()]
newpen = self.emf._appendHandle( _EXTCREATEPEN(
style, width=width, color=color, styleentries=dash))
else:
# use a standard create pen
newpen = self.emf.CreatePen(style, width, color)
self.emf.SelectObject(newpen)
# delete old pen if it is not a stock object
if not isStockObject(self.pen):
self.emf.DeleteObject(self.pen)
self.pen = newpen
def _updateBrush(self, brush):
"""Update to selected brush."""
style = brush.style()
qc = brush.color()
color = (qc.red(), qc.green(), qc.blue())
# print "brush", color
if style == qt.Qt.SolidPattern:
newbrush = self.emf.CreateSolidBrush(color)
elif style == qt.Qt.NoBrush:
newbrush = self.emf.GetStockObject(pyemf.NULL_BRUSH)
else:
try:
hatch = {
qt.Qt.HorPattern: pyemf.HS_HORIZONTAL,
qt.Qt.VerPattern: pyemf.HS_VERTICAL,
qt.Qt.CrossPattern: pyemf.HS_CROSS,
qt.Qt.BDiagPattern: pyemf.HS_BDIAGONAL,
qt.Qt.FDiagPattern: pyemf.HS_FDIAGONAL,
qt.Qt.DiagCrossPattern: pyemf.HS_DIAGCROSS
}[brush.style()]
except KeyError:
newbrush = self.emf.CreateSolidBrush(color)
else:
newbrush = self.emf.CreateHatchBrush(hatch, color)
self.emf.SelectObject(newbrush)
if not isStockObject(self.brush):
self.emf.DeleteObject(self.brush)
self.brush = newbrush
def _updateClipPath(self, path, operation):
"""Update clipping path."""
# print "clip"
if operation != qt.Qt.NoClip:
self._createPath(path)
clipmode = {
qt.Qt.ReplaceClip: pyemf.RGN_COPY,
qt.Qt.IntersectClip: pyemf.RGN_AND,
}[operation]
else:
# is this the only wave to get rid of clipping?
self.emf.BeginPath()
self.emf.MoveTo(0,0)
w = int(self.width*self.dpi*scale)
h = int(self.height*self.dpi*scale)
self.emf.LineTo(w, 0)
self.emf.LineTo(w, h)
self.emf.LineTo(0, h)
self.emf.CloseFigure()
self.emf.EndPath()
clipmode = pyemf.RGN_COPY
self.emf.SelectClipPath(mode=clipmode)
def _updateTransform(self, m):
"""Update transformation."""
self.emf.SetWorldTransform(
m.m11(), m.m12(),
m.m21(), m.m22(),
m.dx()*scale, m.dy()*scale)
def updateState(self, state):
"""Examine what has changed in state and call apropriate function."""
ss = state.state()
if ss & qt.QPaintEngine.DirtyPen:
self._updatePen(state.pen())
if ss & qt.QPaintEngine.DirtyBrush:
self._updateBrush(state.brush())
if ss & qt.QPaintEngine.DirtyTransform:
self._updateTransform(state.transform())
if ss & qt.QPaintEngine.DirtyClipPath:
self._updateClipPath(state.clipPath(), state.clipOperation())
if ss & qt.QPaintEngine.DirtyClipRegion:
path = qt.QPainterPath()
path.addRegion(state.clipRegion())
self._updateClipPath(path, state.clipOperation())
def type(self):
return qt.QPaintEngine.PostScript
class EMFPaintDevice(qt.QPaintDevice):
"""Paint device for EMF paint engine."""
def __init__(self, width_in, height_in, dpi=75):
qt.QPaintDevice.__init__(self)
self.engine = EMFPaintEngine(width_in, height_in, dpi=dpi)
def paintEngine(self):
return self.engine
def metric(self, m):
"""Return the metrics of the painter."""
if m == qt.QPaintDevice.PdmWidth:
return int(self.engine.width * self.engine.dpi)
elif m == qt.QPaintDevice.PdmHeight:
return int(self.engine.height * self.engine.dpi)
elif m == qt.QPaintDevice.PdmWidthMM:
return int(self.engine.width * inch_mm)
elif m == qt.QPaintDevice.PdmHeightMM:
return int(self.engine.height * inch_mm)
elif m == qt.QPaintDevice.PdmNumColors:
return 2147483647
elif m == qt.QPaintDevice.PdmDepth:
return 24
elif m == qt.QPaintDevice.PdmDpiX:
return int(self.engine.dpi)
elif m == qt.QPaintDevice.PdmDpiY:
return int(self.engine.dpi)
elif m == qt.QPaintDevice.PdmPhysicalDpiX:
return int(self.engine.dpi)
elif m == qt.QPaintDevice.PdmPhysicalDpiY:
return int(self.engine.dpi)
elif m == qt.QPaintDevice.PdmDevicePixelRatio:
return 1
# Qt >= 5.6
elif m == getattr(qt.QPaintDevice, 'PdmDevicePixelRatioScaled', -1):
return 1
else:
# fall back
return qt.QPaintDevice.metric(self, m)
| veusz/veusz | veusz/document/emf_export.py | Python | gpl-2.0 | 14,778 | 0.002977 |
"""
Classes for querying the information in a test coverage report.
"""
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
from collections import namedtuple, defaultdict
import re
import subprocess
import sys
import six
from diff_cover.git_path import GitPathTool
Violation = namedtuple('Violation', 'line, message')
class BaseViolationReporter(object):
"""
Query information from a coverage report.
"""
__metaclass__ = ABCMeta
def __init__(self, name):
"""
Provide a name for the coverage report, which will be included
in the generated diff report.
"""
self._name = name
@abstractmethod
def violations(self, src_path):
"""
Return a list of Violations recorded in `src_path`.
"""
pass
def measured_lines(self, src_path):
"""
Return a list of the lines in src_path that were measured
by this reporter.
Some reporters will always consider all lines in the file "measured".
As an optimization, such violation reporters
can return `None` to indicate that all lines are measured.
The diff reporter generator will then use all changed lines
provided by the diff.
"""
return None
def name(self):
"""
Retrieve the name of the report, which may be
included in the generated diff coverage report.
For example, `name()` could return the path to the coverage
report file or the type of reporter.
"""
return self._name
class XmlCoverageReporter(BaseViolationReporter):
"""
Query information from a Cobertura XML coverage report.
"""
def __init__(self, xml_roots):
"""
Load the Cobertura XML coverage report represented
by the lxml.etree with root element `xml_root`.
"""
super(XmlCoverageReporter, self).__init__("XML")
self._xml_roots = xml_roots
# Create a dict to cache violations dict results
# Keys are source file paths, values are output of `violations()`
self._info_cache = defaultdict(list)
def _get_src_path_line_nodes(self, xml_document, src_path):
"""
Returns a list of nodes containing line information for `src_path`
in `xml_document`.
If file is not present in `xml_document`, return None
"""
# Remove git_root from src_path for searching the correct filename
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `diff_cover/violations_reporter.py`
# search for `violations_reporter.py`
src_rel_path = GitPathTool.relative_path(src_path)
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `other_package/some_file.py`
# search for `/home/user/work/diff-cover/other_package/some_file.py`
src_abs_path = GitPathTool.absolute_path(src_path)
xpath_template = ".//class[@filename='{0}']/lines/line"
xpath = None
src_node_xpath = ".//class[@filename='{0}']".format(src_rel_path)
if xml_document.find(src_node_xpath) is not None:
xpath = xpath_template.format(src_rel_path)
src_node_xpath = ".//class[@filename='{0}']".format(src_abs_path)
if xml_document.find(src_node_xpath) is not None:
xpath = xpath_template.format(src_abs_path)
if xpath is None:
return None
return xml_document.findall(xpath)
def _cache_file(self, src_path):
"""
Load the data from `self._xml_roots`
for `src_path`, if it hasn't been already.
"""
# If we have not yet loaded this source file
if src_path not in self._info_cache:
# We only want to keep violations that show up in each xml source.
# Thus, each time, we take the intersection. However, to do this
# we must treat the first time as a special case and just add all
# the violations from the first xml report.
violations = None
# A line is measured if it is measured in any of the reports, so
# we take set union each time and can just start with the empty set
measured = set()
# Loop through the files that contain the xml roots
for xml_document in self._xml_roots:
line_nodes = self._get_src_path_line_nodes(xml_document,
src_path)
if line_nodes is None:
continue
# First case, need to define violations initially
if violations is None:
violations = set(
Violation(int(line.get('number')), None)
for line in line_nodes
if int(line.get('hits', 0)) == 0)
# If we already have a violations set,
# take the intersection of the new
# violations set and its old self
else:
violations = violations & set(
Violation(int(line.get('number')), None)
for line in line_nodes
if int(line.get('hits', 0)) == 0
)
# Measured is the union of itself and the new measured
measured = measured | set(
int(line.get('number')) for line in line_nodes
)
# If we don't have any information about the source file,
# don't report any violations
if violations is None:
violations = set()
self._info_cache[src_path] = (violations, measured)
def violations(self, src_path):
"""
See base class comments.
"""
self._cache_file(src_path)
# Yield all lines not covered
return self._info_cache[src_path][0]
def measured_lines(self, src_path):
"""
See base class docstring.
"""
self._cache_file(src_path)
return self._info_cache[src_path][1]
class BaseQualityReporter(BaseViolationReporter):
"""
Abstract class to report code quality
information, using `COMMAND`
(provided by subclasses).
"""
COMMAND = ''
OPTIONS = []
# Encoding of the stdout from the command
# This is application-dependent
STDOUT_ENCODING = 'utf-8'
# A list of filetypes to run on.
EXTENSIONS = []
def __init__(self, name, input_reports, user_options=None):
"""
Create a new quality reporter.
`name` is an identifier for the reporter
(usually the name of the tool used to generate
the report).
`input_reports` is an list of
file-like objects representing pre-generated
violation reports. The list can be empty.
If these are provided, the reporter will
use the pre-generated reports instead of invoking
the tool directly.
'user_options' is a string of options passed in.
This string contains options that are passed forward
to the reporter being used
"""
super(BaseQualityReporter, self).__init__(name)
self._info_cache = defaultdict(list)
self.user_options = user_options
# If we've been given input report files, use those
# to get the source information
if len(input_reports) > 0:
self.use_tool = False
self._load_reports(input_reports)
else:
self.use_tool = True
def violations(self, src_path):
"""
See base class comments.
"""
# If we've been given pre-generated pylint/pep8 reports,
# then we've already loaded everything we need into the cache.
# Otherwise, call pylint/pep8 ourselves
if self.use_tool:
if not any(src_path.endswith(ext) for ext in self.EXTENSIONS):
return []
if src_path not in self._info_cache:
output = self._run_command(src_path)
violations_dict = self._parse_output(output, src_path)
self._update_cache(violations_dict)
# Return the cached violation info
return self._info_cache[src_path]
def _load_reports(self, report_files):
"""
Load pre-generated pep8/pylint reports into
the cache.
`report_files` is a list of open file-like objects.
"""
for file_handle in report_files:
# Convert to unicode, replacing unreadable chars
contents = file_handle.read().decode(self.STDOUT_ENCODING,
'replace')
violations_dict = self._parse_output(contents)
self._update_cache(violations_dict)
def _update_cache(self, violations_dict):
"""
Append violations in `violations_dict` to the cache.
`violations_dict` must have the form:
{
SRC_PATH: [Violation, ]
}
"""
for src_path, violations in six.iteritems(violations_dict):
self._info_cache[src_path].extend(violations)
def _run_command(self, src_path):
"""
Run the quality command and return its output as a unicode string.
"""
# Encode the path using the filesystem encoding, determined at runtime
encoding = sys.getfilesystemencoding()
user_options = [self.user_options] if self.user_options is not None else []
command = [self.COMMAND] + self.OPTIONS + user_options + [src_path.encode(encoding)]
try:
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, stderr = process.communicate()
except OSError:
sys.stderr.write(" ".join([cmd.decode(encoding)
if isinstance(cmd, bytes) else cmd
for cmd in command]))
raise
if stderr:
raise QualityReporterError(stderr.decode(encoding))
return stdout.strip().decode(self.STDOUT_ENCODING, 'replace')
@abstractmethod
def _parse_output(self, output, src_path=None):
"""
Parse the output of this reporter
command into a dict of the form:
{
SRC_PATH: [Violation, ]
}
where `SRC_PATH` is the path to the source file
containing the violations, and the value is
a list of violations.
If `src_path` is provided, return information
just for that source.
"""
pass
class Pep8QualityReporter(BaseQualityReporter):
"""
Report PEP8 violations.
"""
COMMAND = 'pep8'
EXTENSIONS = ['py']
VIOLATION_REGEX = re.compile(r'^([^:]+):(\d+).*([EW]\d{3}.*)$')
def _parse_output(self, output, src_path=None):
"""
See base class docstring.
"""
violations_dict = defaultdict(list)
for line in output.split('\n'):
match = self.VIOLATION_REGEX.match(line)
# Ignore any line that isn't a violation
if match is not None:
pep8_src, line_number, message = match.groups()
# If we're looking for a particular source,
# filter out all other sources
if src_path is None or src_path == pep8_src:
violation = Violation(int(line_number), message)
violations_dict[pep8_src].append(violation)
return violations_dict
class PyflakesQualityReporter(BaseQualityReporter):
"""
Report Pyflakes violations.
"""
COMMAND = 'pyflakes'
EXTENSIONS = ['py']
# Match lines of the form:
# path/to/file.py:328: undefined name '_thing'
# path/to/file.py:418: 'random' imported but unused
VIOLATION_REGEX = re.compile(r'^([^:]+):(\d+): (.*)$')
def _parse_output(self, output, src_path=None):
"""
See base class docstring.
"""
violations_dict = defaultdict(list)
for line in output.split('\n'):
match = self.VIOLATION_REGEX.match(line)
# Ignore any line that isn't a violation
if match is not None:
pyflakes_src, line_number, message = match.groups()
# If we're looking for a particular source,
# filter out all other sources
if src_path is None or src_path == pyflakes_src:
violation = Violation(int(line_number), message)
violations_dict[pyflakes_src].append(violation)
return violations_dict
class PylintQualityReporter(BaseQualityReporter):
"""
Report Pylint violations.
"""
COMMAND = 'pylint'
MODERN_OPTIONS = ['--msg-template="{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"']
LEGACY_OPTIONS = ['-f', 'parseable', '--reports=no', '--include-ids=y']
OPTIONS = MODERN_OPTIONS
EXTENSIONS = ['py']
# Match lines of the form:
# path/to/file.py:123: [C0111] Missing docstring
# path/to/file.py:456: [C0111, Foo.bar] Missing docstring
VIOLATION_REGEX = re.compile(r'^([^:]+):(\d+): \[(\w+),? ?([^\]]*)] (.*)$')
def _run_command(self, src_path):
try:
return super(PylintQualityReporter, self)._run_command(src_path)
except QualityReporterError as report_error:
# Support earlier pylint version (< 1)
if "no such option: --msg-template" in report_error.message:
self.OPTIONS = self.LEGACY_OPTIONS
return super(PylintQualityReporter, self)._run_command(src_path)
else:
raise
def _parse_output(self, output, src_path=None):
"""
See base class docstring.
"""
violations_dict = defaultdict(list)
for line in output.split('\n'):
match = self.VIOLATION_REGEX.match(line)
# Ignore any line that isn't matched
# (for example, snippets from the source code)
if match is not None:
pylint_src_path, line_number, pylint_code, function_name, message = match.groups()
# If we're looking for a particular source file,
# ignore any other source files.
if src_path is None or src_path == pylint_src_path:
if function_name:
error_str = u"{0}: {1}: {2}".format(pylint_code, function_name, message)
else:
error_str = u"{0}: {1}".format(pylint_code, message)
violation = Violation(int(line_number), error_str)
violations_dict[pylint_src_path].append(violation)
return violations_dict
class QualityReporterError(Exception):
"""
A quality reporter command produced an error.
"""
def __init__(self, message):
self.message = message
| hugovk/diff-cover | diff_cover/violations_reporter.py | Python | agpl-3.0 | 15,153 | 0.000396 |
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import traceback, re
from calibre.constants import iswindows
class DeviceDefaults(object):
def __init__(self):
self.rules = (
# Amazon devices
({'vendor':0x1949}, {
'format_map': ['azw3', 'mobi', 'azw',
'azw1', 'azw4', 'pdf'],
'send_to': ['documents', 'books', 'kindle'],
}
),
)
def __call__(self, device, driver):
if iswindows:
vid = pid = 0xffff
m = re.search(r'(?i)vid_([0-9a-fA-F]+)&pid_([0-9a-fA-F]+)', device)
if m is not None:
try:
vid, pid = int(m.group(1), 16), int(m.group(2), 16)
except:
traceback.print_exc()
else:
vid, pid = device.vendor_id, device.product_id
for rule in self.rules:
tests = rule[0]
matches = True
for k, v in tests.iteritems():
if k == 'vendor' and v != vid:
matches = False
break
if k == 'product' and v != pid:
matches = False
break
if matches:
return rule[1]
return {}
| yeyanchao/calibre | src/calibre/devices/mtp/defaults.py | Python | gpl-3.0 | 1,628 | 0.006143 |
from electrum.i18n import _
fullname = 'Joinmarket coinjoins'
description = _(" ".join(["Ability to send payments as coinjoins with counterparties.",
"Paying minimal fees, you can immediately send your coins",
"with much better privacy. See https://github.com/joinmarket-org/joinmarket",
"for more details."]))
requires = [('jmclient','github.com/Joinmarket-Org/joinmarket-clientserver'),
('twisted', 'twistedmatrix.com')]
#TODO: setting it here results in Joinmarket never loading.
#It seems that Electrum will not load a plugin on startup if
#it has any setting here.
#requires_wallet_type = ['standard']
available_for = ['qt']
| AdamISZ/electrum-joinmarket-plugin | joinmarket/__init__.py | Python | gpl-3.0 | 721 | 0.011096 |
# Copyright 2014, 2015 SAP SE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import struct
import hashlib
import hmac
from io import BytesIO
###
from pyhdb.protocol.segments import RequestSegment
from pyhdb.protocol.constants import message_types
from pyhdb.protocol.parts import Authentication, Fields
from pyhdb.protocol.message import RequestMessage
from pyhdb.compat import iter_range
CLIENT_PROOF_SIZE = 32
CLIENT_KEY_SIZE = 64
class AuthManager(object):
def __init__(self, connection, user, password):
self.connection = connection
self.user = user
self.password = password
self.method = b"SCRAMSHA256"
self.client_key = os.urandom(CLIENT_KEY_SIZE)
self.client_proof = None
def perform_handshake(self):
request = RequestMessage.new(
self.connection,
RequestSegment(
message_types.AUTHENTICATE,
Authentication(self.user, {self.method: self.client_key})
)
)
response = self.connection.send_request(request)
auth_part = response.segments[0].parts[0]
if self.method not in auth_part.methods:
raise Exception(
"Only unknown authentication methods available: %s" %
b",".join(auth_part.methods.keys())
)
salt, server_key = Fields.unpack_data(
BytesIO(auth_part.methods[self.method])
)
self.client_proof = self.calculate_client_proof([salt], server_key)
return Authentication(self.user, {'SCRAMSHA256': self.client_proof})
def calculate_client_proof(self, salts, server_key):
proof = b"\x00"
proof += struct.pack('b', len(salts))
for salt in salts:
proof += struct.pack('b', CLIENT_PROOF_SIZE)
proof += self.scramble_salt(salt, server_key)
return proof
def scramble_salt(self, salt, server_key):
msg = salt + server_key + self.client_key
key = hashlib.sha256(
hmac.new(
self.password.encode('cesu-8'), salt, hashlib.sha256
).digest()
).digest()
key_hash = hashlib.sha256(key).digest()
sig = hmac.new(
key_hash, msg, hashlib.sha256
).digest()
return self._xor(sig, key)
@staticmethod
def _xor(a, b):
a = bytearray(a)
b = bytearray(b)
result = bytearray(len(a))
for i in iter_range(len(a)):
result[i] += a[i] ^ b[i]
return bytes(result)
| ralhei/PyHDB | pyhdb/auth.py | Python | apache-2.0 | 3,055 | 0 |
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from oslo_concurrency import processutils as putils
from oslo_config import cfg
from taskflow.patterns import linear_flow as lf
from taskflow import task
from glance.i18n import _, _LW
LOG = logging.getLogger(__name__)
convert_task_opts = [
cfg.StrOpt('conversion_format',
choices=('qcow2', 'raw', 'vmdk'),
help=_("The format to which images will be automatically "
"converted. When using the RBD backend, this should be "
"set to 'raw'")),
]
CONF = cfg.CONF
# NOTE(flaper87): Registering under the taskflow_executor section
# for now. It seems a waste to have a whole section dedicated to a
# single task with a single option.
CONF.register_opts(convert_task_opts, group='taskflow_executor')
class _Convert(task.Task):
conversion_missing_warned = False
def __init__(self, task_id, task_type, image_repo):
self.task_id = task_id
self.task_type = task_type
self.image_repo = image_repo
super(_Convert, self).__init__(
name='%s-Convert-%s' % (task_type, task_id))
def execute(self, image_id, file_path):
# NOTE(flaper87): A format must be explicitly
# specified. There's no "sane" default for this
# because the dest format may work differently depending
# on the environment OpenStack is running in.
conversion_format = CONF.taskflow_executor.conversion_format
if conversion_format is None:
if not _Convert.conversion_missing_warned:
msg = (_LW('The conversion format is None, please add a value '
'for it in the config file for this task to '
'work: %s') %
self.task_id)
LOG.warn(msg)
_Convert.conversion_missing_warned = True
return
# TODO(flaper87): Check whether the image is in the desired
# format already. Probably using `qemu-img` just like the
# `Introspection` task.
dest_path = os.path.join(CONF.task.work_dir, "%s.converted" % image_id)
stdout, stderr = putils.trycmd('qemu-img', 'convert', '-O',
conversion_format, file_path, dest_path,
log_errors=putils.LOG_ALL_ERRORS)
if stderr:
raise RuntimeError(stderr)
os.rename(dest_path, file_path.split("file://")[-1])
return file_path
def revert(self, image_id, result=None, **kwargs):
# NOTE(flaper87): If result is None, it probably
# means this task failed. Otherwise, we would have
# a result from its execution.
if result is None:
return
fs_path = result.split("file://")[-1]
if os.path.exists(fs_path):
os.remove(fs_path)
def get_flow(**kwargs):
"""Return task flow for converting images to different formats.
:param task_id: Task ID.
:param task_type: Type of the task.
:param image_repo: Image repository used.
"""
task_id = kwargs.get('task_id')
task_type = kwargs.get('task_type')
image_repo = kwargs.get('image_repo')
return lf.Flow(task_type).add(
_Convert(task_id, task_type, image_repo),
)
| dims/glance | glance/async/flows/convert.py | Python | apache-2.0 | 3,951 | 0 |
order = ['','K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
class Sizes(object):
_BASE = 1000.
def toSize(self, value, input='', output='K'):
"""
Convert value in other measurement
"""
input = order.index(input)
output = order.index(output)
factor = input - output
return value * (self._BASE ** factor)
def converToBestUnit(self, value, input=''):
devider = len(str(int(self._BASE))) - 1
output = (len(str(value)) -2) / devider
output += order.index(input)
if output > len(order):
output = len(order) - 1
elif output < 0:
output = 0
output = order[output]
return self.toSize(value, input, output), output
class Bytes(Sizes):
_BASE = 1024.
| Jumpscale/jumpscale6_core | lib/JumpScale/baselib/units/units.py | Python | bsd-2-clause | 787 | 0.005083 |
import os
import json
import datetime
from django.http import HttpResponse
from django.views.generic import TemplateView
from django.utils.safestring import mark_safe
from django.utils import timezone
from django.template.loader import render_to_string
from person.models import Person
from document.models import Dossier
from document.models import Submitter
from document.models import Kamervraag
from document.models import Kamerstuk
from document.views import TimelineKamervraagItem
from document.views import TimelineKamerstukItem
from government.models import Government
from website import settings
from stats.views import get_example_plot_html
class HomeView(TemplateView):
template_name = "website/index.html"
context_object_name = "homepage"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
class ContactView(TemplateView):
template_name = "website/contact.html"
context_object_name = "contact"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['contact_email'] = settings.CONTACT_EMAIL
return context
def create_timeline_date(date):
return {
'year': date.year,
'month': date.month,
'day': date.day
}
def get_dossier_timeline_json(request):
governments = Government.objects.all()
eras = []
for government in governments:
if government.date_dissolved:
end_date = government.date_dissolved
else:
end_date = timezone.now()
text = {
'headline': government.name,
'text': government.name
}
era = {
'start_date': create_timeline_date(government.date_formed),
'end_date': create_timeline_date(end_date),
'text': text
}
eras.append(era)
events = []
if 'dossier_pk' in request.GET:
dossier = Dossier.objects.get(id=request.GET['dossier_pk'])
for kamerstuk in dossier.kamerstukken:
text = {
'headline': kamerstuk.type_short,
'text': kamerstuk.type_long
}
event = {
'start_date': create_timeline_date(kamerstuk.document.date_published),
'text': text
}
events.append(event)
timeline_info = {
'events': events,
'eras': eras
}
timeline_json = json.dumps(timeline_info, sort_keys=True, indent=4)
# print(timeline_json)
return HttpResponse(timeline_json, content_type='application/json')
class PlotExampleView(TemplateView):
template_name = "website/plot_examples.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['plot_html'] = mark_safe(get_example_plot_html())
return context
class DatabaseDumpsView(TemplateView):
template_name = "website/database_dumps.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
backup_files = self.get_files(settings.DBBACKUP_STORAGE_OPTIONS['location'])
context['backup_files'] = sorted(backup_files, key=lambda backup: backup['datetime_created'], reverse=True)
return context
@staticmethod
def get_files(path):
files = []
for (dirpath, dirnames, filenames) in os.walk(path):
for file in filenames:
if '.gitignore' in file or 'readme.txt' in file:
continue
filepath = os.path.join(dirpath, file)
size = os.path.getsize(filepath)
datetime_created = os.path.getctime(filepath)
files.append({
'file': file,
'size': int(size)/1024/1024,
'datetime_created': datetime.datetime.fromtimestamp(datetime_created)
})
return files
class CSVExportsView(TemplateView):
template_name = "website/csv_exports.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
files = DatabaseDumpsView.get_files(settings.CSV_EXPORT_PATH)
context['files'] = sorted(files, key=lambda file: file['datetime_created'], reverse=True)
return context
class PersonTimelineView(TemplateView):
template_name = "website/items/person_timeline.html"
@staticmethod
def get_timeline_items(person, year=None):
if year:
year = int(year)
submitters = Submitter.objects.filter(person=person, document__date_published__range=[datetime.date(year=year, day=1, month=1), datetime.date(year=year, day=31, month=12)])
else:
submitters = Submitter.objects.filter(person=person)
submitter_ids = list(submitters.values_list('id', flat=True))
timeline_items = []
kamervragen = Kamervraag.objects.filter(document__submitter__in=submitter_ids).select_related('document', 'kamerantwoord')
for kamervraag in kamervragen:
timeline_items.append(TimelineKamervraagItem(kamervraag))
kamerstukken = Kamerstuk.objects.filter(document__submitter__in=submitter_ids).select_related('document')
for kamerstuk in kamerstukken:
timeline_items.append(TimelineKamerstukItem(kamerstuk))
timeline_items = sorted(timeline_items, key=lambda items: items.date, reverse=True)
return timeline_items
def get_context_data(self, slug, year, **kwargs):
year = int(year)
context = super().get_context_data(**kwargs)
person = Person.objects.get(slug=slug)
timeline_items = []
has_next = True
while len(timeline_items) == 0:
timeline_items = PersonTimelineView.get_timeline_items(person, year)
if timeline_items:
break
if year < 1996:
has_next = False
break
year -= 1
if year == datetime.date.today().year:
next_year = None
else:
next_year = year + 1
context['timeline_items'] = timeline_items
context['person'] = person
context['is_person_timeline'] = True
context['previous_year'] = year - 1
context['next_year'] = next_year
context['has_next'] = has_next
return context
def get_person_timeline_html(request):
person = Person.objects.get(id=request.GET['person_id'])
year = int(request.GET['year'])
timeline_items = PersonTimelineView.get_timeline_items(person, year)
if year == datetime.date.today().year:
next_year = None
else:
next_year = year + 1
html = render_to_string('website/items/person_timeline.html', {
'timeline_items': timeline_items,
'person': person,
'is_person_timeline': True,
'previous_year': year-1,
'year': next_year,
'has_next': True
})
response = json.dumps({'html': html})
return HttpResponse(response, content_type='application/json')
| openkamer/openkamer | website/views.py | Python | mit | 7,095 | 0.001409 |
# gunicorn configuration
bind = '0.0.0.0:8000'
workers = 3
# These log settings assume that gunicorn log config will be included in the django base.py logging configuration
accesslog = '-'
errorlog = '-'
access_log_format = '{"request": "%(r)s", "http_status_code": "%(s)s", "http_request_url": "%(U)s", "http_query_string": "%(q)s", "http_verb": "%(m)s", "http_version": "%(H)s", "http_referer": "%(f)s", "x_forwarded_for": "%({x-forwarded-for}i)s", "remote_address": "%(h)s", "request_usec": "%(D)s", "request_sec": "%(L)s"}'
| penzance/hdt_monitor | hdt_monitor/settings/gunicorn_config.py | Python | mit | 530 | 0.003774 |
import spidev
import time
import random
# All set commands set the state only, and so require a write command to be displayed.
class LPD6803_Chain(object):
def __init__(self, ics_in_chain=25, spi_address_hardware=0, spi_address_output=0):
# default to 25 ics in the chain, so it works with no params with the Adafruit RGB LED Pixels - http://www.adafruit.com/products/738
self.number_of_ics = ics_in_chain
self.spi = spidev.SpiDev()
self.spi.open(spi_address_hardware, spi_address_output)
self.ics = {}
for ic in range(self.number_of_ics):
self.ics[ic] = { 'R' : 0 , 'G' : 0, 'B' : 0}
#Write out the current zero'd state to the chain.
self.write()
def two_byte_pack(self, rgb_dict):
# take in our RGB values in the form of 1 int per component, and transform to 2 bytes in the structure of ...
# 1<5 bits Red><5 bits Green><5 Bits Blue>
return_bytes = bytearray(2)
# creating 16bits to allow us to bitshift values into place.
temp_16bit = 0b0000000000000000
# Set our most significant bit to on.
temp_16bit += 32768
# take 5 most significant bits from each component, by shifting 3 pos to the right. Then shift into their appropriate place.
temp_16bit |= (rgb_dict['R'] >>3) << 10 # Red into bits 11-15
temp_16bit |= (rgb_dict['G'] >>3) << 5 # Green into bits 6-10
temp_16bit |= (rgb_dict['B'] >>3) # Blue into bits 1-5
#return_bytes initialised as zeros, going to mask the bits were interested in, and then bitshift the values to acces the bits we need.
return_bytes[0] = (temp_16bit & 0xFF00) >> 8
return_bytes[1] = (temp_16bit & 0x00FF) >> 0
return return_bytes
def write(self):
# Iterate through our IC states, and write out 2 bytes for each, representing 1<5 bits Red><5 bits Green><5 Bits Blue>
# pre charging our output bytes with 32bit start frame.
byte_list = []
# write out our 32bit start frame
self.spi.xfer2([0,0,0,0])
for ic in self.ics:
byte_pair = self.two_byte_pack(self.ics[ic])
byte_list.append(byte_pair[0])
byte_list.append(byte_pair[1])
self.spi.xfer2(byte_list)
# send out 'append pulse', one for each pixel.
append_pulses = []
for ic in self.ics:
append_pulses.append(0)
self.spi.xfer2(append_pulses)
def set(self):
# Alias of write
return self.write()
def print_ics(self):
print self.ics
def set_ic(self, ic_id, rgb_value=[]):
# Check we've been given a valid rgb_value.
if ic_id > self.number_of_ics -1:
raise Exception("Invalid ic_id : ic_id given is greater than the number number of ics in the chain.")
if len(rgb_value) < 3:
raise Exception("Invalid rgb_value : %s , for pin : %s, please pass a list containing three state values eg. [255,255,255]" % (rgb_value, ic_id))
try:
# Null op to ensure we've been given an integer.
int(ic_id)
self.ics[ic_id]= {'R' : rgb_value[0], 'G' : rgb_value[1], 'B' : rgb_value[2]}
except ValueError:
raise Exception("Pin number is not a valid integer.")
def set_rgb(self, rgb_value):
if len(rgb_value) != 3:
raise Exception("Invalid rgb_value: %s, please pass a list containing three state values eg. [255,255,255]" % rgb_value)
for ic in range(self.number_of_ics):
self.ics[ic] = {'R' : rgb_value[0], 'G' : rgb_value[1], 'B' : rgb_value[2]}
def all_on(self):
# !! NOTE !!
# This does not affect pin state
byte_list = []
# write out our 32bit start frame
self.spi.xfer2([0,0,0,0])
for ic in self.ics:
byte_pair = self.two_byte_pack({'R' : 255, 'G' : 255, 'B' : 255})
byte_list.append(byte_pair[0])
byte_list.append(byte_pair[1])
self.spi.xfer2(byte_list)
# send out 'append pulse', one for each pixel.
append_pulses = []
for ic in self.ics:
append_pulses.append(0)
self.spi.xfer2(append_pulses)
def all_off(self):
# !! NOTE !!
# This does not affect pin state
byte_list = []
# write out our 32bit start frame
self.spi.xfer2([0,0,0,0])
for ic in self.ics:
byte_pair = self.two_byte_pack({'R' : 0, 'G' : 0, 'B' : 0})
byte_list.append(byte_pair[0])
byte_list.append(byte_pair[1])
self.spi.xfer2(byte_list)
# send out 'append pulse', one for each pixel.
append_pulses = []
for ic in self.ics:
append_pulses.append(0)
self.spi.xfer2(append_pulses)
def set_white(self):
for ic in range(self.number_of_ics):
self.ics[ic] = {'R' : 255, 'G' : 255, 'B' : 255}
def set_red(self):
for ic in range(self.number_of_ics):
self.ics[ic] = {'R' : 255, 'G' : 0, 'B' : 0}
def set_green(self):
for ic in range(self.number_of_ics):
self.ics[ic] = {'R' : 0, 'G' : 255, 'B' : 0}
def set_blue(self):
for ic in range(self.number_of_ics):
self.ics[ic] = {'R' : 0, 'G' : 0, 'B' : 255}
def set_off(self):
for ic in range(self.number_of_ics):
self.ics[ic] = {'R' : 0, 'G' : 0, 'B' : 0}
def all_random(self):
byte_list = []
# write out our 32bit start frame
self.spi.xfer2([0,0,0,0])
for ic in range(self.number_of_ics):
byte_pair = self.two_byte_pack({'R' : random.randint(0,255), 'G' : random.randint(0,255), 'B' : random.randint(0,255)})
byte_list.append(byte_pair[0])
byte_list.append(byte_pair[1])
self.spi.xfer2(byte_list)
# send out 'append pulse', one for each pixel.
append_pulses = []
for ic in self.ics:
append_pulses.append(0)
self.spi.xfer2(append_pulses)
def cycle(self, delay=0.01):
inc_vals = {}
for ic in range(self.number_of_ics):
inc_vals[ic] = {'R' : True, 'G' : True, 'B' : True}
self.ics[ic]['R'] = random.randint(0,255)
self.ics[ic]['G'] = random.randint(0,255)
self.ics[ic]['B'] = random.randint(0,255)
for i in range(512):
for ic in range(self.number_of_ics):
for val in ['R','G','B']:
if self.ics[ic][val] >= 255:
inc_vals[ic] = False
elif self.ics[ic][val] <= 0:
inc_vals[ic] = True
if inc_vals[ic] == True :
self.ics[ic][val] = self.ics[ic][val] + 5
else :
self.ics[ic][val] = self.ics[ic][val] - 5
self.write()
time.sleep(delay)
| rasathus/pigredients | pigredients/ics/lpd6803.py | Python | mit | 7,299 | 0.01932 |
# Copyright 2010, 2011, 2013 (C) Adam Greig
#
# This file is part of habitat.
#
# habitat is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# habitat is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with habitat. If not, see <http://www.gnu.org/licenses/>.
"""
Test the UKHAS protocol parser.
"""
from nose.tools import assert_raises
from copy import deepcopy
# Mocking the LoadableManager is a heck of a lot of effort. Not worth it.
from ...loadable_manager import LoadableManager
from ...parser import CantParse
from ...parser_modules.ukhas_parser import UKHASParser
# Provide the sensor functions to the parser
fake_sensors_config = {
"loadables": [
{"name": "sensors.base", "class": "habitat.sensors.base"},
{"name": "sensors.stdtelem", "class": "habitat.sensors.stdtelem"}
]
}
class FakeParser:
def __init__(self):
self.loadable_manager = LoadableManager(fake_sensors_config)
# A 'standard' config. Other configs can copy this and change parts.
base_config = {
"protocol": "UKHAS",
"checksum": "crc16-ccitt",
"fields": [
{
"name": "sentence_id",
"sensor": "base.ascii_int"
}, {
"name": "time",
"sensor": "stdtelem.time"
}, {
"name": "latitude",
"sensor": "stdtelem.coordinate",
"format": "dd.dddd"
}, {
"name": "longitude",
"sensor": "stdtelem.coordinate",
"format": "dd.dddd"
}, {
"name": "altitude",
"sensor": "base.ascii_int"
}, {
"name": "speed",
"sensor": "base.ascii_float"
}, {
"name": "custom_string",
"sensor": "base.string"
}
]
}
class TestUKHASParser:
"""UKHAS Parser"""
def setup(self):
self.p = UKHASParser(FakeParser())
def output_append_sentence(self, output, sentence):
"""Helper function to put a sentence in a pre-made output dictionary
for easy comparison with parser results."""
output_copy = deepcopy(output)
output_copy["_sentence"] = sentence
return output_copy
def test_pre_parse_rejects_bad_sentences(self):
# Each of these is a totally invalid stub that should just fail. The
# last one might be valid but has non-hexadecimal checksum characters.
bad_sentences = ["", "\n", "bad\n", "$$bad*\n", "bad*CC\n",
"bad*CCCC\n", "bad,bad,bad,bad\n", "$$bad*GH\n",
"$$bad,bad*GHIJ\n", "$$@invalid@,data*CCCC\n",
"$$good,data,\x01\n", "$$missing,newline*CCCC"]
for sentence in bad_sentences:
assert_raises(CantParse, self.p.pre_parse, sentence)
assert_raises(ValueError, self.p.parse, sentence, base_config)
def test_pre_parse_accepts_good_setences(self):
# Each of these short stubs should pass pre-parsing and return a
# callsign
good_sentences = ["$$good,data\n", "$$good,data*CC\n",
"$$good,data*CCCC\n",
"$$good,lots,of,1234,5678.90,data*CCCC\n"]
for sentence in good_sentences:
assert self.p.pre_parse(sentence) == "good"
def test_pre_parse_rejects_bad_callsigns(self):
bad_callsigns = ["abcdef@123", "ABC\xFA", "$$", "almost good"]
callsign_template = "$${0},data*CC\n"
for callsign in bad_callsigns:
sentence = callsign_template.format(callsign)
assert_raises(CantParse, self.p.pre_parse, sentence)
def test_pre_parse_accepts_good_callsigns(self):
good_callsigns = ["good", "g0_0d", "G0--0D", "abcde/f", "ABCDEF",
"012345", "abcDEF123"]
callsign_template = "$${0},data*CC\n"
for callsign in good_callsigns:
sentence = callsign_template.format(callsign)
assert self.p.pre_parse(sentence) == callsign
def test_pre_parse_rejects_bad_checksums(self):
bad_checksums = ["abcg", "123G", "$$", "*ABC", "defG", "123\xFA"]
checksum_template = "$$good,data*{0}\n"
for checksum in bad_checksums:
sentence = checksum_template.format(checksum)
assert_raises(CantParse, self.p.pre_parse, sentence)
def test_pre_parse_accepts_good_checksums(self):
good_checksums = ["abcd", "ABCD", "abCD", "ab12", "AB12", "aB12", "ab",
"aB", "AB", "a0", "A0"]
checksum_template = "$$good,data*{0}\n"
for checksum in good_checksums:
sentence = checksum_template.format(checksum)
assert self.p.pre_parse(sentence) == "good"
def test_parse_rejects_invalid_configs(self):
# A valid sentence for testing the configs with
sentence = "$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab\n"
# A configuration with no checksum
config_checksum_none = deepcopy(base_config)
config_checksum_none["checksum"] = "none"
# A configuration without a protocol key (should fail)
config_no_protocol = deepcopy(config_checksum_none)
del config_no_protocol["protocol"]
assert_raises(ValueError, self.p.parse, sentence, config_no_protocol)
# A configuration without a checksum key (should fail)
config_no_checksum = deepcopy(config_checksum_none)
del config_no_checksum["checksum"]
assert_raises(ValueError, self.p.parse, sentence, config_no_checksum)
# A configuration without a fields dictionary (should fail)
config_no_fields = deepcopy(config_checksum_none)
del config_no_fields["fields"]
assert_raises(ValueError, self.p.parse, sentence, config_no_fields)
# A configuration with an empty fields dictionary (should fail)
config_empty_fields = deepcopy(config_checksum_none)
config_empty_fields["fields"] = {}
assert_raises(ValueError, self.p.parse, sentence, config_empty_fields)
# A configuration where a field has no name (should fail)
config_field_without_name = deepcopy(config_checksum_none)
del config_field_without_name["fields"][0]["name"]
assert_raises(ValueError, self.p.parse, sentence,
config_field_without_name)
# A configuration where a field has no sensor (should fail)
config_field_without_sensor = deepcopy(config_checksum_none)
del config_field_without_sensor["fields"][0]["sensor"]
assert_raises(ValueError, self.p.parse, sentence,
config_field_without_sensor)
# A configuration where a coordinate field lacks a format (should fail)
config_field_without_format = deepcopy(config_checksum_none)
del config_field_without_format["fields"][2]["format"]
assert_raises(ValueError, self.p.parse, sentence,
config_field_without_format)
# A configuration with an invalid checksum (should fail)
config_checksum_invalid = deepcopy(config_checksum_none)
config_checksum_invalid = "invalid"
assert_raises(ValueError, self.p.parse, sentence,
config_checksum_invalid)
# A configuration with an invalid protocol key (should fail)
config_invalid_protocol = deepcopy(config_checksum_none)
config_invalid_protocol["protocol"] = "invalid"
assert_raises(ValueError, self.p.parse, sentence,
config_invalid_protocol)
# A configuration with an invalid field sensor (should fail)
config_field_sensor_invalid = deepcopy(config_checksum_none)
config_field_sensor_invalid["fields"][0]["sensor"] = "invalid"
assert_raises(ValueError, self.p.parse, sentence,
config_field_sensor_invalid)
# A configuration with an invalid coordinate format (should fail)
config_format_invalid = deepcopy(config_checksum_none)
config_format_invalid["fields"][2]["format"] = "invalid"
assert_raises(ValueError, self.p.parse, sentence,
config_format_invalid)
# Configurations with an invalid field names (should fail)
config_name_invalid = deepcopy(config_checksum_none)
config_name_invalid["fields"][0]["name"] = "_notallowed"
assert_raises(ValueError, self.p.parse, sentence, config_name_invalid)
config_name_invalid["fields"][0]["name"] = "payload"
assert_raises(ValueError, self.p.parse, sentence, config_name_invalid)
# A configuration with a duplicate field name (should fail)
config_duplicate_name = deepcopy(config_checksum_none)
config_duplicate_name["fields"][1]["name"] = \
config_duplicate_name["fields"][0]["name"]
assert_raises(ValueError, self.p.parse, sentence, config_duplicate_name)
def test_parse_parses_correct_checksums(self):
# Correct parser output for the checksum test sentences
output_checksum_test = {
"payload": "habitat", "sentence_id": 1,
"time": "00:00:00",
"latitude": 0.0, "longitude": 0.0, "altitude": 0,
"speed": 0.0, "custom_string": "hab"}
# A configuration with no checksum
config_checksum_none = deepcopy(base_config)
config_checksum_none["checksum"] = "none"
sentence_no_checksum = "$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab\n"
assert (
self.p.parse(sentence_no_checksum, config_checksum_none)
== self.output_append_sentence(output_checksum_test,
sentence_no_checksum))
# A configuration with a CRC16-CCITT checksum
config_checksum_crc16_ccitt = deepcopy(base_config)
config_checksum_crc16_ccitt["checksum"] = "crc16-ccitt"
sentence_crc16_ccitt = "$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab*EE5E\n"
assert (
self.p.parse(sentence_crc16_ccitt, config_checksum_crc16_ccitt)
== self.output_append_sentence(output_checksum_test,
sentence_crc16_ccitt))
# A configuration with an XOR checksum
config_checksum_xor = deepcopy(base_config)
config_checksum_xor["checksum"] = "xor"
sentence_xor = "$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab*0b\n"
assert (
self.p.parse(sentence_xor, config_checksum_xor)
== self.output_append_sentence(output_checksum_test,
sentence_xor))
# A configuration with a Fletcher-16 checksum
config_checksum_fletcher_16 = deepcopy(base_config)
config_checksum_fletcher_16["checksum"] = "fletcher-16"
sentence_fletcher_16 = "$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab*e3a6\n"
assert (
self.p.parse(sentence_fletcher_16, config_checksum_fletcher_16)
== self.output_append_sentence(output_checksum_test,
sentence_fletcher_16))
# A configuration with a Fletcher-16 checksum, mod 256 (legacy)
config_checksum_fletcher_16_256 = deepcopy(base_config)
config_checksum_fletcher_16_256["checksum"] = "fletcher-16-256"
sentence_fletcher_16_256 = \
"$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab*DBF5\n"
assert (
self.p.parse(sentence_fletcher_16_256,
config_checksum_fletcher_16_256)
== self.output_append_sentence(output_checksum_test,
sentence_fletcher_16_256))
def test_parse_rejects_incorrect_checksums(self):
sentence_no_checksum = "$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab\n"
# A configuration with a CRC16-CCITT checksum
config_checksum_crc16_ccitt = deepcopy(base_config)
config_checksum_crc16_ccitt["checksum"] = "crc16-ccitt"
sentence_bad_crc16_ccitt = \
"$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab*abcd\n"
assert_raises(ValueError, self.p.parse, sentence_bad_crc16_ccitt,
config_checksum_crc16_ccitt)
assert_raises(ValueError, self.p.parse, sentence_no_checksum,
config_checksum_crc16_ccitt)
# A configuration with an XOR checksum
config_checksum_xor = deepcopy(base_config)
config_checksum_xor["checksum"] = "xor"
sentence_bad_xor = "$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab*aa\n"
assert_raises(ValueError, self.p.parse, sentence_bad_xor,
config_checksum_xor)
assert_raises(ValueError, self.p.parse, sentence_no_checksum,
config_checksum_crc16_ccitt)
# A configuration with a Fletcher-16 checksum
config_checksum_fletcher_16 = deepcopy(base_config)
config_checksum_fletcher_16["checksum"] = "fletcher-16"
sentence_bad_fletcher_16 = \
"$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab*abcd\n"
assert_raises(ValueError, self.p.parse, sentence_bad_fletcher_16,
config_checksum_fletcher_16)
assert_raises(ValueError, self.p.parse, sentence_no_checksum,
config_checksum_fletcher_16)
# A configuration with a Fletcher-16 checksum, mod 256 (legacy)
config_checksum_fletcher_16_256 = deepcopy(base_config)
config_checksum_fletcher_16_256["checksum"] = "fletcher-16-256"
sentence_bad_fletcher_16_256 = \
"$$habitat,1,00:00:00,0.0,0.0,0,0.0,hab*dcba\n"
assert_raises(ValueError, self.p.parse, sentence_bad_fletcher_16_256,
config_checksum_fletcher_16_256)
assert_raises(ValueError, self.p.parse, sentence_no_checksum,
config_checksum_fletcher_16_256)
def test_parse_rejects_invalid_values(self):
# A configuration with no checksum
config = deepcopy(base_config)
config["checksum"] = "none"
sentence_bad_int = "$$habitat,a,00:00:00,0.0,0.0,0,0.0,hab\n"
assert_raises(ValueError, self.p.parse, sentence_bad_int, config)
sentence_bad_time = "$$habitat,1,aa:bb:cc,0.0,0.0,0,0.0,hab\n"
assert_raises(ValueError, self.p.parse, sentence_bad_time, config)
sentence_bad_time_2 = "$$habitat,1,123,0.0,0.0,0,0.0,hab\n"
assert_raises(ValueError, self.p.parse, sentence_bad_time_2, config)
sentence_bad_float = "$$habitat,1,00:00:00,abc,0.0,0,0.0,hab\n"
assert_raises(ValueError, self.p.parse, sentence_bad_float, config)
def test_parse_rejects_bad_minutes(self):
# A configuration with coordinates in degrees and minutes
config_minutes = deepcopy(base_config)
config_minutes["fields"][2]["format"] = "ddmm.mm"
config_minutes["fields"][3]["format"] = "ddmm.mm"
config_minutes["checksum"] = "none"
sentence_bad_minutes = \
"$$habitat,1,00:00:00,087.123,0000.00,0,0.0,hab\n"
assert_raises(ValueError, self.p.parse, sentence_bad_minutes,
config_minutes)
def test_parse_parses_good_sentences(self):
# Several examples of valid sentences, where the coordinates are
# variously mangled (in minutes, or with funny padding, random 0s and
# spaces
# A configuration with coordinates in degrees and minutes
config_minutes = deepcopy(base_config)
config_minutes["fields"][2]["format"] = "ddmm.mm"
config_minutes["fields"][3]["format"] = "ddmm.mm"
config_minutes["checksum"] = "none"
# Correct parser output for (most) of the good sentences
output_good = {
"payload": "habitat", "sentence_id": 123,
"time": "12:45:06",
"latitude": -35.1032, "longitude": 138.8568,
"altitude": 4285, "speed": 3.6, "custom_string": "hab"}
sentence_good_1 = \
"$$habitat,123,12:45:06,-35.1032,138.8568,4285,3.6,hab*5681\n"
assert(self.p.parse(sentence_good_1, base_config)
== self.output_append_sentence(output_good, sentence_good_1))
sentence_good_3 = \
"$$habitat,123,12:45:06,-3506.192,13851.408,4285,3.6,hab*6139\n"
assert(self.p.parse(sentence_good_3, config_minutes)
== self.output_append_sentence(output_good, sentence_good_3))
sentence_good_4 = \
"$$habitat,123,12:45:06, -35.1032,138.8568,4285,3.6,hab*96A2\n"
assert(self.p.parse(sentence_good_4, base_config)
== self.output_append_sentence(output_good, sentence_good_4))
sentence_good_5 = \
"$$habitat,123,12:45:06,-035.1032,138.8568,4285,3.6,hab*C5CA\n"
assert(self.p.parse(sentence_good_5, base_config)
== self.output_append_sentence(output_good, sentence_good_5))
sentence_good_6 = \
"$$habitat,123,12:45:06,035.1032,0138.8568,4285,3.6,hab*D856\n"
# Correct parser output for sentence_good_6 (positive latitude)
output_good_6 = deepcopy(output_good)
output_good_6["latitude"] = 35.1032
assert(self.p.parse(sentence_good_6, base_config)
== self.output_append_sentence(output_good_6, sentence_good_6))
def test_parse_handles_shorter_sentences(self):
# A sentence with less fields than the config suggests, but otherwise
# valid
sentence_short = "$$habitat,123,12:45:06,-35.1032,138.8568,4285*5260\n"
assert_raises(ValueError, self.p.parse, sentence_short, base_config)
def test_parse_handles_longer_sentences(self):
# A sentence with more fields than the config suggests, but otherwise
# valid
sentence_long = \
"$$habitat,123,12:45:06,-35.1032,138.8568,4285,3.6,hab,123," \
"4.56,seven*3253\n"
assert_raises(ValueError, self.p.parse, sentence_long, base_config)
def test_parser_rejects_sentence_with_no_newline(self):
# sentence from test_parse_handles_shorter_sentences with no \n:
bad_sentence = "$$habitat,123,12:45:06,-35.1032,138.8568,4285*5260"
assert_raises(ValueError, self.p.parse, bad_sentence, base_config)
| ukhas/habitat | habitat/tests/test_parser_modules/test_ukhas_parser.py | Python | gpl-3.0 | 18,589 | 0.000592 |
from __future__ import division
from operator import add, mul
import cProfile
def memo(f):
cache = {}
def _f(*args):
try:
return cache[args]
except KeyError:
result = cache[args] = f(*args)
return result
except TypeError:
return f(*args)
return _f
def mod(a, b):
"""
Type :: (Int, Int) -> Int
Return modulo of a over b, make sure to return an positive number
when b is great than zero
"""
return (a % b + b) % b
def gcd(a, b):
"""
Type :: (Int, Int) -> Int
Return :: Greatest Common divisor
"""
while b is not 0:
a, b = b, a % b
return a
def exgcd(a, b):
"""
Type :: (Int, Int) -> (Int, Int, Int)
Return :: (g, x, y), g is gcd of a and b and
x * a + y * b = g
"""
if b is 0:
return (a, 1, 0)
else:
g, x, y = exgcd(b, a % b)
return (g, y, x - (a // b) * y)
@memo
def modinv(a, m):
"""
Type :: (Int, Int) -> Int
Return :: Return module inverse of a * x = 1 (mod m)
"""
if gcd(a, m) is not 1: raise Exception("Not coprime")
_, x, y = exgcd(a, m)
return (m + x % m) % m
def sieve(m):
"""
Type :: Int -> [Int]
Generate primes number up to m, and return a list
"""
ret, judge = [], [True] * (m + 1)
judge[0] = judge[1] = False
ret.append(2)
for i in xrange(4, m + 1, 2): judge[i] = False
for i in xrange(3, m + 1, 2):
if judge[i]:
ret.append(i)
for j in xrange(i * i, m + 1, i): judge[j] = False
return ret
MAXN = 1000
primes = sieve(MAXN)
primes_set = set(primes)
def factor(n):
"""
Type :: Int -> [(Int, Int)]
Return the factorizatoin result of decompose number n
>>> factor(12)
[(2, 2), (3, 1)]
>>> factor(10007)
[(10007, 1)]
>>> factor(0)
Traceback (most recent call last):
...
Exception: Should be nonzero number
"""
if n is 0: raise Exception("Should be nonzero number")
ret, i = [], 0
while n is not 1 and i < len(primes):
if n % primes[i] == 0:
c = 0
while n % primes[i] == 0:
c += 1
n //= primes[i]
ret.append((primes[i], c))
i += 1
if n is not 1: ret.append((n, 1))
return ret
def euler_phi(n):
"""
Type :: Int -> Int
Calculate the Euler phi result of number n in around log(n) of time
>>> euler_phi(12)
4
>>> euler_phi(17)
16
>>> euler_phi(33)
20
"""
facts = factor(n)
return reduce(lambda acc, x: acc * (x[0] - 1) // x[0], facts, n)
def euler_phi2(n):
"""
Type :: Int -> [Int]
Generate the Euler phi result up to number n, and return the result
as a list
>>> euler_phi2(20) == [0] + [euler_phi(i) for i in range(1, 21)]
True
>>> euler_phi2(100) == [0] + [euler_phi(i) for i in range(1, 101)]
True
>>> euler_phi2(1000) == [0] + [euler_phi(i) for i in range(1, 1001)]
True
"""
ret = [i for i in range(n + 1)]
for i in range(2, n + 1):
if ret[i] == i:
for j in range(i, n + 1, i): ret[j] = ret[j] // i * (i - 1)
return ret
def gen_fact_mod_prime(p):
"""
Type :: Int -> [Int]
Generate the fact of i(mod p) for 1 <= i < p, p should be a prime number
>>> gen_fact_mod_prime(3)
[1, 1, 2]
>>> gen_fact_mod_prime(7)
[1, 1, 2, 6, 3, 1, 6]
"""
ret = [1] * p
for i in range(2, p): ret[i] = ret[i - 1] * i % p
return ret
def fact_mod(n, p, facts):
"""
Type :: (Int, Int, [Int]) -> (Int, Int)
Suppose n! = a * p^e (mod p), then the function return (a mod p, e)
facts is i!(mod p) for 0 <= i < p, use Lucas theory
>>> facts = gen_fact_mod_prime(7)
>>> fact_mod(5, 7, facts)
(1, 0)
>>> fact_mod(15, 7, facts)
(2, 2)
"""
if (n == 0): return (1, 0)
(a, e) = fact_mod(n // p, p, facts)
e += n // p
if (n // p % 2 != 0): return (a * (p - facts[n % p]) % p, e)
return (a * facts[n % p] % p, e)
def comb_mod(n, k, p):
"""
Type :: (Int, Int, Int) -> Int
Return C(n, k) mod p, p is a prime number.
>>> comb_mod(5, 3, 7)
3
>>> comb_mod(6, 2, 7)
1
"""
if n < 0 or k < 0 or n < k: return 0
facts = gen_fact_mod_prime(p)
a1, e1 = fact_mod(n, p, facts)
a2, e2 = fact_mod(k, p, facts)
a3, e3 = fact_mod(n - k, p, facts)
if (e1 > e2 + e3):
return 0
else:
return a1 * modinv(a2 * a3 % p, p) % p
def chinese_remainder_theory_for2(x, a, y, b):
"""
Type :: (Int, Int, Int, Int) -> Int
Return z for z = a (mod x) and z = b (mod y). Here z is unique modulo
M = lcm(x, y), return (z, M). On failure return, M = -1
"""
g, s, t = exgcd(x, y)
if (a % g) != (b % g):
return (0, -1)
else:
return (mod(s * b * x + t * a * y, x * y) // g, x * y // g)
def chinese_remainder_theory(xs, ass):
"""
Type :: ([Int], [Int]) -> Int
Return : z that z[i] = a[i] (mod xs[i]) for 0 <= i < n
Require : Require a[i] to be relative coprime to each other
>>> chinese_remainder_theory([3, 5, 7], [2,3,2])
(23, 105)
"""
ret = (ass[0], xs[0])
for i in xrange(1, len(xs)):
ret = chinese_remainder_theory_for2(ret[1], ret[0], xs[i], ass[i])
if ret[1] == -1: break
return ret
def comb_mod2(n, r, m, pa, facts1):
"""
Type :: (Int, Int, Int) -> Int
m is of form p^a, and n is very large
"""
p, a = pa
def n_fact_fact(n):
if n is 0 or n is 1:
return 1
elif n < m:
return facts1[n] * n_fact_fact(n // p) % m
else:
a = facts1[m - 1]
b = facts1[n % m]
c = n_fact_fact(n // p)
# print 'n = %d a = %d b = %d c = %d' % (n, a, b, c)
return pow(a, n // m, m) * b * c % m
def get_power(n, p):
ret = 0
while n > 0:
ret += n // p
n //= p
return ret
b = get_power(n, p) - get_power(r, p) - get_power(n - r, p)
if b >= a: return 0
m1 = n_fact_fact(n)
m2 = n_fact_fact(r)
m3 = n_fact_fact(n - r)
return (p ** b) * m1 * modinv_table[(m2, m)] * modinv_table[(m3, m)] % m
def solve(n, r, fs1, fs2, xss):
xs = [27, 11, 13, 37]
ass = [comb_mod2(n, r, xs[i], xss[i], fs1[i]) for i in xrange(4)]
return chinese_remainder_theory(xs, ass)
def init(xs):
ret1, ret2 = [], []
for i in xrange(len(xs)):
p, a = xs[i]
m = p ** a
t1, t2 = [1],[1]
for i in xrange(1, p):
t2.append(t2[-1] * i % p)
for i in xrange(1, m):
if gcd(i, m) == 1:
t1.append(t1[-1] * i % m)
else:
t1.append(t1[-1])
ret1.append(t1)
ret2.append(t2)
return ret1, ret2
modinv_table = {}
modulo = 142857
for x in {27, 11, 13, 37}:
for y in xrange(1, x):
if gcd(x, y) == 1:
modinv_table[(y, x)] = modinv(y, x)
def main():
n = int(raw_input())
xss = [(3,3), (11,1), (13, 1), (37, 1)]
facts1, facts2 = init(xss)
for _ in xrange(n):
n, r = map(int, raw_input().strip().split())
print solve(n, r, facts1, facts2, xss)[0]
if __name__ == '__main__':
main()
# cProfile.run('main()')
| m00nlight/hackerrank | algorithm/Number-Theory/nCr/main.py | Python | gpl-2.0 | 7,426 | 0.005252 |
#
# Copyright 2015 IBM Corp.
#
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine.resources.openstack.neutron import neutron
from heat.engine import support
from heat.engine import translation
class Listener(neutron.NeutronResource):
"""A resource for managing LBaaS v2 Listeners.
This resource creates and manages Neutron LBaaS v2 Listeners,
which represent a listening endpoint for the vip.
"""
support_status = support.SupportStatus(version='6.0.0')
required_service_extension = 'lbaasv2'
PROPERTIES = (
PROTOCOL_PORT, PROTOCOL, LOADBALANCER, NAME,
ADMIN_STATE_UP, DESCRIPTION, DEFAULT_TLS_CONTAINER_REF,
SNI_CONTAINER_REFS, CONNECTION_LIMIT, TENANT_ID
) = (
'protocol_port', 'protocol', 'loadbalancer', 'name',
'admin_state_up', 'description', 'default_tls_container_ref',
'sni_container_refs', 'connection_limit', 'tenant_id'
)
PROTOCOLS = (
TCP, HTTP, HTTPS, TERMINATED_HTTPS,
) = (
'TCP', 'HTTP', 'HTTPS', 'TERMINATED_HTTPS',
)
ATTRIBUTES = (
LOADBALANCERS_ATTR, DEFAULT_POOL_ID_ATTR
) = (
'loadbalancers', 'default_pool_id'
)
properties_schema = {
PROTOCOL_PORT: properties.Schema(
properties.Schema.INTEGER,
_('TCP or UDP port on which to listen for client traffic.'),
required=True,
constraints=[
constraints.Range(1, 65535),
]
),
PROTOCOL: properties.Schema(
properties.Schema.STRING,
_('Protocol on which to listen for the client traffic.'),
required=True,
constraints=[
constraints.AllowedValues(PROTOCOLS),
]
),
LOADBALANCER: properties.Schema(
properties.Schema.STRING,
_('ID or name of the load balancer with which listener '
'is associated.'),
required=True,
constraints=[
constraints.CustomConstraint('neutron.lbaas.loadbalancer')
]
),
NAME: properties.Schema(
properties.Schema.STRING,
_('Name of this listener.'),
update_allowed=True
),
ADMIN_STATE_UP: properties.Schema(
properties.Schema.BOOLEAN,
_('The administrative state of this listener.'),
update_allowed=True,
default=True
),
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of this listener.'),
update_allowed=True,
default=''
),
DEFAULT_TLS_CONTAINER_REF: properties.Schema(
properties.Schema.STRING,
_('Default TLS container reference to retrieve TLS '
'information.'),
update_allowed=True
),
SNI_CONTAINER_REFS: properties.Schema(
properties.Schema.LIST,
_('List of TLS container references for SNI.'),
update_allowed=True
),
CONNECTION_LIMIT: properties.Schema(
properties.Schema.INTEGER,
_('The maximum number of connections permitted for this '
'load balancer. Defaults to -1, which is infinite.'),
update_allowed=True,
default=-1,
constraints=[
constraints.Range(min=-1),
]
),
TENANT_ID: properties.Schema(
properties.Schema.STRING,
_('The ID of the tenant who owns the listener.')
),
}
attributes_schema = {
LOADBALANCERS_ATTR: attributes.Schema(
_('ID of the load balancer this listener is associated to.'),
type=attributes.Schema.LIST
),
DEFAULT_POOL_ID_ATTR: attributes.Schema(
_('ID of the default pool this listener is associated to.'),
type=attributes.Schema.STRING
)
}
def translation_rules(self, props):
return [
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.LOADBALANCER],
client_plugin=self.client_plugin(),
finder='find_resourceid_by_name_or_id',
entity='loadbalancer'
),
]
def validate(self):
res = super(Listener, self).validate()
if res:
return res
if self.properties[self.PROTOCOL] == self.TERMINATED_HTTPS:
if self.properties[self.DEFAULT_TLS_CONTAINER_REF] is None:
msg = (_('Property %(ref)s required when protocol is '
'%(term)s.') % {'ref': self.DEFAULT_TLS_CONTAINER_REF,
'term': self.TERMINATED_HTTPS})
raise exception.StackValidationFailed(message=msg)
def _check_lb_status(self):
lb_id = self.properties[self.LOADBALANCER]
return self.client_plugin().check_lb_status(lb_id)
def handle_create(self):
properties = self.prepare_properties(
self.properties,
self.physical_resource_name())
properties['loadbalancer_id'] = properties.pop(self.LOADBALANCER)
return properties
def check_create_complete(self, properties):
if self.resource_id is None:
try:
listener = self.client().create_listener(
{'listener': properties})['listener']
self.resource_id_set(listener['id'])
except Exception as ex:
if self.client_plugin().is_invalid(ex):
return False
raise
return self._check_lb_status()
def _show_resource(self):
return self.client().show_listener(
self.resource_id)['listener']
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
self._update_called = False
return prop_diff
def check_update_complete(self, prop_diff):
if not prop_diff:
return True
if not self._update_called:
try:
self.client().update_listener(self.resource_id,
{'listener': prop_diff})
self._update_called = True
except Exception as ex:
if self.client_plugin().is_invalid(ex):
return False
raise
return self._check_lb_status()
def handle_delete(self):
self._delete_called = False
def check_delete_complete(self, data):
if self.resource_id is None:
return True
if not self._delete_called:
try:
self.client().delete_listener(self.resource_id)
self._delete_called = True
except Exception as ex:
if self.client_plugin().is_invalid(ex):
return False
elif self.client_plugin().is_not_found(ex):
return True
raise
return self._check_lb_status()
def resource_mapping():
return {
'OS::Neutron::LBaaS::Listener': Listener,
}
| cwolferh/heat-scratch | heat/engine/resources/openstack/neutron/lbaas/listener.py | Python | apache-2.0 | 7,961 | 0 |
# Copyright 2016, Cossack Labs Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import asyncio
import collections
import collections.abc
import contextlib
import http
import json
import logging
import os
import os.path
import random
import re
import shutil
import signal
import socket
import ssl
import stat
import subprocess
import tempfile
import traceback
import unittest
from base64 import b64decode, b64encode
from distutils.dir_util import copy_tree
from tempfile import NamedTemporaryFile
from urllib.parse import urlparse
from urllib.request import urlopen
import asyncpg
import grpc
import mysql.connector
import psycopg2
import psycopg2.errors
import psycopg2.extras
import pymysql
import redis
import requests
import semver
import sqlalchemy as sa
import sys
import time
import yaml
from ddt import ddt, data
from hvac import Client
from prometheus_client.parser import text_string_to_metric_families
from sqlalchemy.dialects import mysql as mysql_dialect
from sqlalchemy.dialects import postgresql as postgresql_dialect
from sqlalchemy.dialects.postgresql import BYTEA
from sqlalchemy.exc import DatabaseError
import api_pb2
import api_pb2_grpc
import utils
from random_utils import random_bytes, random_email, random_int32, random_int64, random_str
from utils import (read_storage_public_key, read_storage_private_key,
read_zone_public_key, read_zone_private_key,
read_poison_public_key, read_poison_private_key,
destroy_server_storage_key,
decrypt_acrastruct, deserialize_and_decrypt_acrastruct,
load_random_data_config, get_random_data_files,
clean_test_data, safe_string, prepare_encryptor_config,
get_encryptor_config, abs_path, get_test_encryptor_config, send_signal_by_process_name,
load_yaml_config, dump_yaml_config, BINARY_OUTPUT_FOLDER)
# add to path our wrapper until not published to PYPI
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'wrappers/python'))
from acrawriter import create_acrastruct
# log python logs with time format as in golang
format = u"%(asctime)s - %(message)s"
handler = logging.StreamHandler(stream=sys.stderr)
handler.setFormatter(logging.Formatter(fmt=format, datefmt="%Y-%m-%dT%H:%M:%S%z"))
handler.setLevel(logging.DEBUG)
logger = logging.getLogger()
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
DB_HOST = os.environ.get('TEST_DB_HOST', 'localhost')
DB_NAME = os.environ.get('TEST_DB_NAME', 'postgres')
DB_PORT = os.environ.get('TEST_DB_PORT', 5432)
TEST_TLS_CA = abs_path(os.environ.get('TEST_TLS_CA', 'tests/ssl/ca/ca.crt'))
TEST_TLS_SERVER_CERT = abs_path(os.environ.get('TEST_TLS_SERVER_CERT', os.path.join(os.path.dirname(__file__), 'ssl/acra-server/acra-server.crt')))
TEST_TLS_SERVER_KEY = abs_path(os.environ.get('TEST_TLS_SERVER_KEY', os.path.join(os.path.dirname(__file__), 'ssl/acra-server/acra-server.key')))
# keys copied to tests/* with modified rights to 0400 because keys in docker/ssl/ has access from groups/other but some
# db drivers prevent usage of keys with global rights
TEST_TLS_CLIENT_CERT = abs_path(os.environ.get('TEST_TLS_CLIENT_CERT', os.path.join(os.path.dirname(__file__), 'ssl/acra-writer/acra-writer.crt')))
TEST_TLS_CLIENT_KEY = abs_path(os.environ.get('TEST_TLS_CLIENT_KEY', os.path.join(os.path.dirname(__file__), 'ssl/acra-writer/acra-writer.key')))
TEST_TLS_CLIENT_2_CERT = abs_path(os.environ.get('TEST_TLS_CLIENT_2_CERT', os.path.join(os.path.dirname(__file__), 'ssl/acra-writer-2/acra-writer-2.crt')))
TEST_TLS_CLIENT_2_KEY = abs_path(os.environ.get('TEST_TLS_CLIENT_2_KEY', os.path.join(os.path.dirname(__file__), 'ssl/acra-writer-2/acra-writer-2.key')))
TEST_TLS_OCSP_CA = abs_path(os.environ.get('TEST_TLS_OCSP_CA', os.path.join(os.path.dirname(__file__), 'ssl/ca/ca.crt')))
TEST_TLS_OCSP_CERT = abs_path(os.environ.get('TEST_TLS_OCSP_CERT', os.path.join(os.path.dirname(__file__), 'ssl/ocsp-responder/ocsp-responder.crt')))
TEST_TLS_OCSP_KEY = abs_path(os.environ.get('TEST_TLS_OCSP_KEY', os.path.join(os.path.dirname(__file__), 'ssl/ocsp-responder/ocsp-responder.key')))
TEST_TLS_OCSP_INDEX = abs_path(os.environ.get('TEST_TLS_OCSP_INDEX', os.path.join(os.path.dirname(__file__), 'ssl/ca/index.txt')))
TEST_TLS_CRL_PATH = abs_path(os.environ.get('TEST_TLS_CRL_PATH', os.path.join(os.path.dirname(__file__), 'ssl/ca')))
TEST_WITH_TLS = os.environ.get('TEST_TLS', 'off').lower() == 'on'
OCSP_SERVER_PORT = int(os.environ.get('TEST_OCSP_SERVER_PORT', 8888))
CRL_HTTP_SERVER_PORT = int(os.environ.get('TEST_HTTP_SERVER_PORT', 8889))
TEST_WITH_TRACING = os.environ.get('TEST_TRACE', 'off').lower() == 'on'
TEST_WITH_REDIS = os.environ.get('TEST_REDIS', 'off').lower() == 'on'
TEST_TRACE_TO_JAEGER = os.environ.get('TEST_TRACE_JAEGER', 'off').lower() == 'on'
TEST_RANDOM_DATA_CONFIG = load_random_data_config()
TEST_RANDOM_DATA_FILES = get_random_data_files()
NoClientCert, RequestClientCert, RequireAnyClientCert, VerifyClientCertIfGiven, RequireAndVerifyClientCert = range(5)
if TEST_WITH_TLS:
ACRA_TLS_AUTH = RequireAndVerifyClientCert # verify if provided https://golang.org/pkg/crypto/tls/#ClientAuthType
else:
ACRA_TLS_AUTH = VerifyClientCertIfGiven
# 200 is overhead of encryption (chosen manually)
# multiply 2 because tested acrastruct in acrastruct
COLUMN_DATA_SIZE = (TEST_RANDOM_DATA_CONFIG['data_max_size'] + 200) * 2
metadata = sa.MetaData()
test_table = sa.Table('test', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('data', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('raw_data', sa.Text),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
)
acrarollback_output_table = sa.Table('acrarollback_output', metadata,
sa.Column('data', sa.LargeBinary),
)
# keys of json objects that return acra-addzone tool
ZONE_ID = 'id'
ZONE_PUBLIC_KEY = 'public_key'
zones = []
poison_record = None
poison_record_acrablock = None
master_key = None
KEYS_FOLDER = None
ACRA_MASTER_KEY_VAR_NAME = 'ACRA_MASTER_KEY'
MASTER_KEY_PATH = '/tmp/acra-test-master.key'
TEST_WITH_VAULT = os.environ.get('TEST_WITH_VAULT', 'off').lower() == 'on'
TEST_SSL_VAULT = os.environ.get('TEST_SSL_VAULT', 'off').lower() == 'on'
TEST_VAULT_TLS_CA = abs_path(os.environ.get('TEST_VAULT_TLS_CA', 'tests/ssl/ca/ca.crt'))
VAULT_KV_ENGINE_VERSION=os.environ.get('VAULT_KV_ENGINE_VERSION', 'v1')
CRYPTO_ENVELOPE_HEADER = b'%%%'
# TLS_CERT_CLIENT_* represent two different ClientIDs are used in tests, initialized in setupModule function
TLS_CERT_CLIENT_ID_1 = None
TLS_CERT_CLIENT_ID_2 = None
TLS_CLIENT_ID_SOURCE_DN = 'distinguished_name'
TLS_CLIENT_ID_SOURCE_SERIAL = 'serial_number'
POISON_KEY_PATH = '.poison_key/poison_key'
STATEMENT_TIMEOUT = 5 * 1000 # 5 sec
SETUP_SQL_COMMAND_TIMEOUT = 0.1
# how long wait forked process to respond
FORK_TIMEOUT = 2
# seconds for sleep call after failed polling forked process
FORK_FAIL_SLEEP = 0.1
CONNECTION_FAIL_SLEEP = 0.1
SOCKET_CONNECT_TIMEOUT = 3
KILL_WAIT_TIMEOUT = 2
CONNECT_TRY_COUNT = 3
SQL_EXECUTE_TRY_COUNT = 5
# http://docs.python-requests.org/en/master/user/advanced/#timeouts
# use only for requests.* methods
REQUEST_TIMEOUT = (5, 5) # connect_timeout, read_timeout
PG_UNIX_HOST = '/tmp'
DB_USER = os.environ.get('TEST_DB_USER', 'postgres')
DB_USER_PASSWORD = os.environ.get('TEST_DB_USER_PASSWORD', 'postgres')
SSLMODE = os.environ.get('TEST_SSL_MODE', 'require' if TEST_WITH_TLS else 'disable')
TEST_MYSQL = utils.get_bool_env('TEST_MYSQL', default=False)
TEST_MARIADB = utils.get_bool_env('TEST_MARIADB', default=False)
if TEST_MYSQL or TEST_MARIADB:
TEST_POSTGRESQL = False
DB_DRIVER = "mysql+pymysql"
TEST_MYSQL = True
connect_args = {
'user': DB_USER, 'password': DB_USER_PASSWORD,
'read_timeout': SOCKET_CONNECT_TIMEOUT,
'write_timeout': SOCKET_CONNECT_TIMEOUT,
}
pymysql_tls_args = {}
if TEST_WITH_TLS:
pymysql_tls_args.update(
ssl={
"ca": TEST_TLS_CA,
"cert": TEST_TLS_CLIENT_CERT,
"key": TEST_TLS_CLIENT_KEY,
'check_hostname': False,
}
)
connect_args.update(pymysql_tls_args)
db_dialect = mysql_dialect.dialect()
if TEST_MARIADB:
TEST_MARIADB = True
else:
TEST_POSTGRESQL = True
DB_DRIVER = "postgresql"
connect_args = {
'connect_timeout': SOCKET_CONNECT_TIMEOUT,
'user': DB_USER, 'password': DB_USER_PASSWORD,
"options": "-c statement_timeout={}".format(STATEMENT_TIMEOUT),
'sslmode': 'disable',
'application_name': 'acra-tests'
}
asyncpg_connect_args = {
'timeout': SOCKET_CONNECT_TIMEOUT,
'statement_cache_size': 0,
'command_timeout': STATEMENT_TIMEOUT,
}
db_dialect = postgresql_dialect.dialect()
if TEST_WITH_TLS:
connect_args.update({
# for psycopg2 key names took from
# https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLCERT
'sslcert': TEST_TLS_CLIENT_CERT,
'sslkey': TEST_TLS_CLIENT_KEY,
'sslrootcert': TEST_TLS_CA,
'sslmode': 'require',
})
def get_tls_connection_args(client_key, client_cert, for_mysql=TEST_MYSQL):
if for_mysql:
connect_args = {
'user': DB_USER, 'password': DB_USER_PASSWORD,
'read_timeout': SOCKET_CONNECT_TIMEOUT,
'write_timeout': SOCKET_CONNECT_TIMEOUT,
}
pymysql_tls_args = {}
pymysql_tls_args.update(
ssl={
"ca": TEST_TLS_CA,
"cert": client_cert,
"key": client_key,
'check_hostname': False,
}
)
connect_args.update(pymysql_tls_args)
else:
connect_args = {
'connect_timeout': SOCKET_CONNECT_TIMEOUT,
'user': DB_USER, 'password': DB_USER_PASSWORD,
"options": "-c statement_timeout={}".format(STATEMENT_TIMEOUT),
'sslmode': 'disable',
'application_name': 'acra-tests'
}
connect_args.update({
# for psycopg2 key names took from
# https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLCERT
'sslcert': client_cert,
'sslkey': client_key,
'sslrootcert': TEST_TLS_CA,
'sslmode': 'require',
})
return connect_args
def get_tls_connection_args_without_certificate(for_mysql=TEST_MYSQL):
if for_mysql:
connect_args = {
'user': DB_USER, 'password': DB_USER_PASSWORD,
'read_timeout': SOCKET_CONNECT_TIMEOUT,
'write_timeout': SOCKET_CONNECT_TIMEOUT,
}
pymysql_tls_args = {}
pymysql_tls_args.update(
ssl={
"ca": TEST_TLS_CA,
'check_hostname': False,
}
)
connect_args.update(pymysql_tls_args)
else:
connect_args = {
'connect_timeout': SOCKET_CONNECT_TIMEOUT,
'user': DB_USER, 'password': DB_USER_PASSWORD,
"options": "-c statement_timeout={}".format(STATEMENT_TIMEOUT),
'sslmode': 'disable',
'application_name': 'acra-tests'
}
connect_args.update({
# for psycopg2 key names took from
# https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLCERT
'sslrootcert': TEST_TLS_CA,
'sslmode': 'require',
})
return connect_args
def get_random_id():
return random.randint(1, 100000)
def get_pregenerated_random_data():
data_file = random.choice(TEST_RANDOM_DATA_FILES)
with open(data_file, 'r') as f:
return f.read()
def create_acrastruct_with_client_id(data, client_id):
server_public1 = read_storage_public_key(client_id, KEYS_FOLDER.name)
if isinstance(data, str):
data = data.encode('utf-8')
acra_struct = create_acrastruct(data, server_public1)
return acra_struct
def stop_process(process):
"""stop process if exists by terminate and kill at end to be sure
that process will not alive as zombi-process"""
if not isinstance(process, collections.abc.Iterable):
process = [process]
# send signal to each. they can handle it asynchronously
for p in process:
try:
logger.info("terminate pid {}".format(p.pid))
p.terminate()
except:
traceback.print_exc()
# synchronously wait termination or kill
for p in process:
try:
# None if not terminated yet then wait some time
if p.poll() is None:
p.wait(timeout=KILL_WAIT_TIMEOUT)
except:
traceback.print_exc()
try:
logger.info("kill pid {}".format(p.pid))
p.kill()
logger.info("killed pid {}".format(p.pid))
except:
traceback.print_exc()
def get_connect_args(port=5432, sslmode=None, **kwargs):
args = connect_args.copy()
args['port'] = int(port)
if TEST_POSTGRESQL:
args['sslmode'] = sslmode if sslmode else SSLMODE
args.update(kwargs)
return args
KEYSTORE_VERSION = os.environ.get('TEST_KEYSTORE', 'v1')
def get_master_key():
"""Returns master key value (base64-encoded)."""
global master_key
if not master_key:
master_key = os.environ.get(ACRA_MASTER_KEY_VAR_NAME)
if not master_key:
subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'), '--keystore={}'.format(KEYSTORE_VERSION),
'--generate_master_key={}'.format(MASTER_KEY_PATH)])
with open(MASTER_KEY_PATH, 'rb') as f:
master_key = b64encode(f.read()).decode('ascii')
return master_key
def get_poison_record():
"""generate one poison record for speed up tests and don't create subprocess
for new records"""
global poison_record
if not poison_record:
poison_record = b64decode(subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-poisonrecordmaker'), '--keys_dir={}'.format(KEYS_FOLDER.name),
],
timeout=PROCESS_CALL_TIMEOUT))
return poison_record
def get_poison_record_with_acrablock():
"""generate one poison record with acrablock for speed up tests and don't create subprocess
for new records"""
global poison_record_acrablock
if not poison_record_acrablock:
poison_record_acrablock = b64decode(subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-poisonrecordmaker'), '--keys_dir={}'.format(KEYS_FOLDER.name), '--type=acrablock',
],
timeout=PROCESS_CALL_TIMEOUT))
return poison_record_acrablock
def create_client_keypair(name, only_storage=False, keys_dir=None, extra_kwargs: dict=None):
if not keys_dir:
keys_dir = KEYS_FOLDER.name
args = [os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'), '-client_id={}'.format(name),
'-keys_output_dir={}'.format(keys_dir),
'--keys_public_output_dir={}'.format(keys_dir),
'--keystore={}'.format(KEYSTORE_VERSION)]
if only_storage:
args.append('--generate_acrawriter_keys')
if extra_kwargs:
for key, value in extra_kwargs.items():
param = '-{0}={1}'.format(key, value)
args.append(param)
return subprocess.call(args, cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT)
def create_client_keypair_from_certificate(tls_cert, extractor=TLS_CLIENT_ID_SOURCE_DN, only_storage=False, keys_dir=None, extra_kwargs: dict=None):
if not keys_dir:
keys_dir = KEYS_FOLDER.name
args = [os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'), '--client_id=',
'--tls_cert={}'.format(tls_cert),
'--tls_identifier_extractor_type={}'.format(extractor),
'-keys_output_dir={}'.format(keys_dir),
'--keys_public_output_dir={}'.format(keys_dir),
'--keystore={}'.format(KEYSTORE_VERSION)]
if only_storage:
args.append('--generate_acrawriter_keys')
if extra_kwargs:
for key, value in extra_kwargs.items():
param = '-{0}={1}'.format(key, value)
args.append(param)
return subprocess.call(args, cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT)
WAIT_CONNECTION_ERROR_MESSAGE = "can't wait connection"
def wait_connection(port, count=1000, sleep=0.001):
"""try connect to 127.0.0.1:port and close connection
if can't then sleep on and try again (<count> times)
if <count> times is failed than raise Exception
"""
while count:
try:
connection = socket.create_connection(('127.0.0.1', port), timeout=SOCKET_CONNECT_TIMEOUT)
connection.close()
return
except ConnectionRefusedError:
pass
count -= 1
time.sleep(sleep)
raise Exception(WAIT_CONNECTION_ERROR_MESSAGE)
def wait_command_success(command, count=10, sleep=0.200):
"""try executing `command` using `os.system()`
if exit code != 0 then sleep on and try again (<count> times)
if <count> times is failed than raise Exception
"""
while count:
ret = os.system(command)
if ret == 0:
return
count -= 1
time.sleep(sleep)
raise Exception(f"can't wait command success: {command}")
def wait_unix_socket(socket_path, count=1000, sleep=0.005):
last_exc = Exception("can't wait unix socket")
while count:
connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
connection.settimeout(SOCKET_CONNECT_TIMEOUT)
connection.connect(socket_path)
return
except Exception as exc:
last_exc = exc
finally:
connection.close()
count -= 1
time.sleep(sleep)
raise last_exc
def get_db_host():
"""use unix socket for postgresql and tcp with localhost for mysql"""
if TEST_POSTGRESQL and not TEST_WITH_TLS:
return PG_UNIX_HOST
else:
return DB_HOST
def get_engine_connection_string(connection_string, dbname):
addr = urlparse(connection_string)
port = addr.port
if connection_string.startswith('tcp'):
return get_postgresql_tcp_connection_string(port, dbname)
else:
port = re.search(r'\.s\.PGSQL\.(\d+)', addr.path)
if port:
port = port.group(1)
return get_postgresql_unix_connection_string(port, dbname)
def get_postgresql_unix_connection_string(port, dbname):
return '{}:///{}?host={}&port={}'.format(DB_DRIVER, dbname, PG_UNIX_HOST, port)
def get_postgresql_tcp_connection_string(port, dbname):
return '{}://localhost:{}/{}'.format(DB_DRIVER, port, dbname)
def get_tcp_connection_string(port):
return 'tcp://localhost:{}'.format(port)
def socket_path_from_connection_string(connection_string):
if '://' in connection_string:
return connection_string.split('://')[1]
else:
return connection_string
def acra_api_connection_string(port):
return "tcp://localhost:{}".format(port)
def get_ocsp_server_connection_string(port=None):
if not port:
port = OCSP_SERVER_PORT
return 'http://127.0.0.1:{}'.format(port)
def get_crl_http_server_connection_string(port=None):
if not port:
port = CRL_HTTP_SERVER_PORT
return 'http://127.0.0.1:{}'.format(port)
def fork(func):
process = func()
count = 0
step = FORK_TIMEOUT / FORK_FAIL_SLEEP
while count <= FORK_TIMEOUT:
if process.poll() is None:
logging.info("forked %s [%s]", process.args[0], process.pid)
return process
count += step
time.sleep(FORK_FAIL_SLEEP)
stop_process(process)
raise Exception("Can't fork")
def fork_ocsp_server(port: int, check_connection: bool=True):
logging.info("fork OpenSSL OCSP server with port {}".format(port))
ocsp_server_connection = get_ocsp_server_connection_string(port)
args = {
'port': port,
'index': TEST_TLS_OCSP_INDEX,
'rsigner': TEST_TLS_OCSP_CERT,
'rkey': TEST_TLS_OCSP_KEY,
'CA': TEST_TLS_CA,
'ignore_err': None,
}
cli_args = sorted([f'-{k}={v}' if v is not None else f'-{k}' for k, v in args.items()])
print('openssl ocsp args: {}'.format(' '.join(cli_args)))
process = fork(lambda: subprocess.Popen(['openssl', 'ocsp'] + cli_args))
check_cmd = f"openssl ocsp -CAfile {TEST_TLS_CA} -issuer {TEST_TLS_CA} -cert {TEST_TLS_CLIENT_CERT} -url {ocsp_server_connection}"
if check_connection:
print('check OCSP server connection {}'.format(ocsp_server_connection))
try:
wait_command_success(check_cmd)
except:
stop_process(process)
raise
logging.info("fork openssl ocsp finished [pid={}]".format(process.pid))
return process
def fork_crl_http_server(port: int, check_connection: bool=True):
logging.info("fork HTTP server with port {}".format(port))
http_server_connection = get_crl_http_server_connection_string(port)
# use cwd= parameter for Popen instead of --directory parameter to support 3.6 that doesn't accept --directory
cli_args = ['--bind', '127.0.0.1', str(port)]
print('python HTTP server args: {}'.format(' '.join(cli_args)))
process = fork(lambda: subprocess.Popen(['python3', '-m', 'http.server'] + cli_args, cwd=TEST_TLS_CRL_PATH))
if check_connection:
print('check HTTP server connection {}'.format(http_server_connection))
try:
wait_connection(port)
except:
stop_process(process)
raise
logging.info("fork HTTP server finished [pid={}]".format(process.pid))
return process
class ProcessStub(object):
pid = 'stub'
def kill(self, *args, **kwargs):
pass
def wait(self, *args, **kwargs):
pass
def terminate(self, *args, **kwargs):
pass
def poll(self, *args, **kwargs):
pass
# declare global variables with ProcessStub by default to clean them in tearDownModule without extra checks with
# stop_process
OCSP_SERVER = ProcessStub()
CRL_HTTP_SERVER = ProcessStub()
def fork_certificate_validation_services():
global OCSP_SERVER, CRL_HTTP_SERVER
if TEST_WITH_TLS:
OCSP_SERVER = fork_ocsp_server(OCSP_SERVER_PORT)
CRL_HTTP_SERVER = fork_crl_http_server(CRL_HTTP_SERVER_PORT)
def kill_certificate_validation_services():
if TEST_WITH_TLS:
processes = [OCSP_SERVER, CRL_HTTP_SERVER]
stop_process(processes)
DEFAULT_VERSION = '1.8.0'
DEFAULT_BUILD_ARGS = []
ACRAROLLBACK_MIN_VERSION = "1.8.0"
Binary = collections.namedtuple(
'Binary', ['name', 'from_version', 'build_args'])
BINARIES = [
Binary(name='acra-server', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-backup', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-tokens', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-addzone', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-keymaker', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-keys', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-poisonrecordmaker', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-rollback', from_version=ACRAROLLBACK_MIN_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-translator', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-rotate', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-backup', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
Binary(name='acra-tokens', from_version=DEFAULT_VERSION,
build_args=DEFAULT_BUILD_ARGS),
]
BUILD_TAGS = os.environ.get("TEST_BUILD_TAGS", '')
def build_binaries():
"""Build Acra CE binaries for testing."""
builds = [
(binary.from_version, ['go', 'build', '-o={}'.format(os.path.join(BINARY_OUTPUT_FOLDER, binary.name)), '-tags={}'.format(BUILD_TAGS)] +
binary.build_args +
['github.com/cossacklabs/acra/cmd/{}'.format(binary.name)])
for binary in BINARIES
]
go_version = get_go_version()
GREATER, EQUAL, LESS = (1, 0, -1)
for version, build in builds:
if semver.VersionInfo.parse(go_version).compare(version) == LESS:
continue
# try to build 3 times with timeout
build_count = 3
for i in range(build_count):
try:
subprocess.check_call(build, cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT)
break
except (AssertionError, subprocess.TimeoutExpired):
if i == (build_count-1):
raise
continue
def clean_binaries():
for i in BINARIES:
try:
os.remove(os.path.join(BINARY_OUTPUT_FOLDER, i.name))
except:
pass
def clean_misc():
pass
PROCESS_CALL_TIMEOUT = 120
def get_go_version():
output = subprocess.check_output(['go', 'version'])
# example: go1.7.2 or go1.7
version = re.search(r'go([\d.]+)', output.decode('utf-8')).group(1)
# convert to 3 part semver format
if version.count('.') < 2:
version = '{}.0'.format(version)
return version
def drop_tables():
engine_raw = sa.create_engine(
'{}://{}:{}/{}'.format(DB_DRIVER, DB_HOST, DB_PORT, DB_NAME),
connect_args=connect_args)
metadata.drop_all(engine_raw)
# Set this to False to not rebuild binaries on setup.
CLEAN_BINARIES = utils.get_bool_env('TEST_CLEAN_BINARIES', default=True)
# Set this to False to not build binaries in principle.
BUILD_BINARIES = True
def setUpModule():
global zones
global KEYS_FOLDER
global TLS_CERT_CLIENT_ID_1
global TLS_CERT_CLIENT_ID_2
clean_misc()
KEYS_FOLDER = tempfile.TemporaryDirectory()
if CLEAN_BINARIES:
clean_binaries()
if BUILD_BINARIES:
build_binaries()
# must be before any call of key generators or forks of acra/proxy servers
os.environ.setdefault(ACRA_MASTER_KEY_VAR_NAME, get_master_key())
# first keypair for using without zones
assert create_client_keypair_from_certificate(TEST_TLS_CLIENT_CERT) == 0
assert create_client_keypair_from_certificate(TEST_TLS_CLIENT_2_CERT) == 0
TLS_CERT_CLIENT_ID_1 = extract_client_id_from_cert(TEST_TLS_CLIENT_CERT)
TLS_CERT_CLIENT_ID_2 = extract_client_id_from_cert(TEST_TLS_CLIENT_2_CERT)
# add two zones
zones.append(json.loads(subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-addzone'), '--keys_output_dir={}'.format(KEYS_FOLDER.name)],
cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT).decode('utf-8')))
zones.append(json.loads(subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-addzone'), '--keys_output_dir={}'.format(KEYS_FOLDER.name)],
cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT).decode('utf-8')))
socket.setdefaulttimeout(SOCKET_CONNECT_TIMEOUT)
drop_tables()
fork_certificate_validation_services()
def extract_client_id_from_cert(tls_cert, extractor=TLS_CLIENT_ID_SOURCE_DN):
res = json.loads(subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'extract-client-id',
'--tls_identifier_extractor_type={}'.format(extractor),
'--tls_cert={}'.format(tls_cert),
'--print_json'
],
cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT).decode('utf-8'))
return res['client_id']
def tearDownModule():
if CLEAN_BINARIES:
clean_binaries()
clean_misc()
KEYS_FOLDER.cleanup()
# use list.clear instead >>> zones = []; to avoid creation new variable with new address and allow to use it from
# other test modules
zones.clear()
clean_test_data()
for path in [MASTER_KEY_PATH]:
try:
os.remove(path)
except:
pass
drop_tables()
kill_certificate_validation_services()
if sys.version_info[1] > 6:
ConnectionArgs = collections.namedtuple(
"ConnectionArgs",
field_names=["user", "password", "host", "port", "dbname",
"ssl_ca", "ssl_key", "ssl_cert", "format"],
# 'format' is optional, other fields are required.
defaults=[None])
else:
class ConnectionArgs:
def __init__(self, user=None, password=None, host=None, port=None, dbname=None,
ssl_ca=None, ssl_key=None, ssl_cert=None, format=None):
self.user = user
self.password = password
self.host = host
self.port = port
self.dbname = dbname
self.ssl_ca = ssl_ca
self.ssl_key = ssl_key
self.ssl_cert = ssl_cert
self.format = format
class QueryExecutor(object):
def __init__(self, connection_args):
self.connection_args = connection_args
def execute(self, query, args=None):
raise NotImplementedError
def execute_prepared_statement(self, query, args=None):
raise NotImplementedError
class PyMysqlExecutor(QueryExecutor):
def execute(self, query, args=None):
if args:
self.fail("<args> param for executor {} not supported now".format(self.__class__))
with contextlib.closing(pymysql.connect(
host=self.connection_args.host, port=self.connection_args.port,
user=self.connection_args.user,
password=self.connection_args.password,
db=self.connection_args.dbname,
cursorclass=pymysql.cursors.DictCursor,
**pymysql_tls_args)) as connection:
with connection.cursor() as cursor:
cursor.execute(query, args)
return cursor.fetchall()
def execute_prepared_statement(self, query, args=None):
if args:
self.fail("<args> param for executor {} not supported now".format(self.__class__))
with contextlib.closing(pymysql.connect(
host=self.connection_args.host, port=self.connection_args.port,
user=self.connection_args.user,
password=self.connection_args.password,
db=self.connection_args.dbname,
cursorclass=pymysql.cursors.DictCursor,
**pymysql_tls_args)) as connection:
with connection.cursor() as cursor:
cursor.execute("PREPARE test_statement FROM {}".format(str(sa.literal(query).compile(dialect=db_dialect, compile_kwargs={"literal_binds": True}))))
cursor.execute('EXECUTE test_statement')
return cursor.fetchall()
class MysqlExecutor(QueryExecutor):
def _result_to_dict(self, description, data):
"""convert list of tuples of rows to list of dicts"""
columns_name = [i[0] for i in description]
result = []
for row in data:
row_data = {column_name: value
for column_name, value in zip(columns_name, row)}
result.append(row_data)
return result
def execute(self, query, args=None):
if args is None:
args = []
with contextlib.closing(mysql.connector.Connect(
use_unicode=False, raw=True, charset='ascii',
host=self.connection_args.host, port=self.connection_args.port,
user=self.connection_args.user,
password=self.connection_args.password,
database=self.connection_args.dbname,
ssl_ca=self.connection_args.ssl_ca,
ssl_cert=self.connection_args.ssl_cert,
ssl_key=self.connection_args.ssl_key,
ssl_disabled=not TEST_WITH_TLS)) as connection:
with contextlib.closing(connection.cursor()) as cursor:
cursor.execute(query, args)
data = cursor.fetchall()
result = self._result_to_dict(cursor.description, data)
return result
def execute_prepared_statement(self, query, args=None):
if args is None:
args = []
with contextlib.closing(mysql.connector.Connect(
use_unicode=False, charset='ascii',
host=self.connection_args.host, port=self.connection_args.port,
user=self.connection_args.user,
password=self.connection_args.password,
database=self.connection_args.dbname,
ssl_ca=self.connection_args.ssl_ca,
ssl_cert=self.connection_args.ssl_cert,
ssl_key=self.connection_args.ssl_key,
ssl_disabled=not TEST_WITH_TLS)) as connection:
with contextlib.closing(connection.cursor(prepared=True)) as cursor:
cursor.execute(query, args)
data = cursor.fetchall()
result = self._result_to_dict(cursor.description, data)
return result
def execute_prepared_statement_no_result(self, query, args=None):
if args is None:
args = []
with contextlib.closing(mysql.connector.Connect(
use_unicode=False, charset='ascii',
host=self.connection_args.host, port=self.connection_args.port,
user=self.connection_args.user,
password=self.connection_args.password,
database=self.connection_args.dbname,
ssl_ca=self.connection_args.ssl_ca,
ssl_cert=self.connection_args.ssl_cert,
ssl_key=self.connection_args.ssl_key,
ssl_disabled=not TEST_WITH_TLS)) as connection:
with contextlib.closing(connection.cursor(prepared=True)) as cursor:
cursor.execute(query, args)
connection.commit()
class AsyncpgExecutor(QueryExecutor):
TextFormat = 'text'
BinaryFormat = 'binary'
def _connect(self, loop):
ssl_context = ssl.create_default_context(cafile=self.connection_args.ssl_ca)
ssl_context.load_cert_chain(self.connection_args.ssl_cert, self.connection_args.ssl_key)
ssl_context.check_hostname = True
return loop.run_until_complete(
asyncpg.connect(
host=self.connection_args.host, port=self.connection_args.port,
user=self.connection_args.user, password=self.connection_args.password,
database=self.connection_args.dbname, ssl=ssl_context,
**asyncpg_connect_args))
def _set_text_format(self, conn):
"""Force text format to numeric types."""
loop = asyncio.get_event_loop()
for pg_type in ['int2', 'int4', 'int8']:
loop.run_until_complete(
conn.set_type_codec(pg_type,
schema='pg_catalog',
encoder=str,
decoder=int,
format='text')
)
for pg_type in ['float4', 'float8']:
loop.run_until_complete(
conn.set_type_codec(pg_type,
schema='pg_catalog',
encoder=str,
decoder=float,
format='text')
)
def execute_prepared_statement(self, query, args=None):
if not args:
args = []
loop = asyncio.get_event_loop()
conn = self._connect(loop)
if self.connection_args.format == self.TextFormat:
self._set_text_format(conn)
try:
stmt = loop.run_until_complete(
conn.prepare(query, timeout=STATEMENT_TIMEOUT))
result = loop.run_until_complete(
stmt.fetch(*args, timeout=STATEMENT_TIMEOUT))
return result
finally:
conn.terminate()
def execute(self, query, args=None):
if not args:
args = []
loop = asyncio.get_event_loop()
conn = self._connect(loop)
if self.connection_args.format == self.TextFormat:
self._set_text_format(conn)
try:
result = loop.run_until_complete(
conn.fetch(query, *args, timeout=STATEMENT_TIMEOUT))
return result
finally:
loop.run_until_complete(conn.close(timeout=STATEMENT_TIMEOUT))
class Psycopg2Executor(QueryExecutor):
def execute(self, query, args=None):
if args:
self.fail("<args> param for executor {} not supported now".format(self.__class__))
connection_args = get_connect_args(self.connection_args.port)
with psycopg2.connect(
host=self.connection_args.host,
dbname=self.connection_args.dbname, **connection_args) as connection:
with connection.cursor(
cursor_factory=psycopg2.extras.DictCursor) as cursor:
cursor.execute(query, args)
data = cursor.fetchall()
utils.memoryview_rows_to_bytes(data)
return data
def execute_prepared_statement(self, query, args=None):
if args:
self.fail("<args> param for executor {} not supported now".format(self.__class__))
kwargs = get_connect_args(self.connection_args.port)
with psycopg2.connect(
host=self.connection_args.host,
dbname=self.connection_args.dbname, **kwargs) as connection:
with connection.cursor(
cursor_factory=psycopg2.extras.DictCursor) as cursor:
cursor.execute("prepare test_statement as {}".format(query))
cursor.execute("execute test_statement")
data = cursor.fetchall()
utils.memoryview_rows_to_bytes(data)
return data
class KeyMakerTest(unittest.TestCase):
def test_key_length(self):
key_size = 32
def random_keys(size):
if KEYSTORE_VERSION == 'v1':
# Keystore v1 uses simple binary data for keys
value = os.urandom(size)
elif KEYSTORE_VERSION == 'v2':
# Keystore v2 uses more complex JSON format
encryption = os.urandom(size)
signature = os.urandom(size)
keys = {
'encryption': b64encode(encryption).decode('ascii'),
'signature': b64encode(signature).decode('ascii'),
}
value = json.dumps(keys).encode('ascii')
else:
self.fail("keystore version not supported")
return {ACRA_MASTER_KEY_VAR_NAME: b64encode(value)}
with tempfile.TemporaryDirectory() as folder:
with self.assertRaises(subprocess.CalledProcessError) as exc:
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'), '--keystore={}'.format(KEYSTORE_VERSION),
'--keys_output_dir={}'.format(folder),
'--keys_public_output_dir={}'.format(folder)],
env=random_keys(key_size - 1))
with tempfile.TemporaryDirectory() as folder:
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'), '--keystore={}'.format(KEYSTORE_VERSION),
'--keys_output_dir={}'.format(folder),
'--keys_public_output_dir={}'.format(folder)],
env=random_keys(key_size))
with tempfile.TemporaryDirectory() as folder:
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'), '--keystore={}'.format(KEYSTORE_VERSION),
'--keys_output_dir={}'.format(folder),
'--keys_public_output_dir={}'.format(folder)],
env=random_keys(key_size * 2))
def test_gen_keys_with_empty_client_id(self):
#keys not needed client_id for generation
with tempfile.TemporaryDirectory() as folder:
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'), '--keystore={}'.format(KEYSTORE_VERSION),
'--keys_output_dir={}'.format(folder),
"--client_id=''",
'--generate_poisonrecord_keys',
'--generate_log_key',
'--keys_public_output_dir={}'.format(folder)])
#check that keymaker will no fail on case of not created directory
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'),
'--client_id=',
'--tls_cert={}'.format(TEST_TLS_CLIENT_CERT),
'--keystore={}'.format(KEYSTORE_VERSION),
'--generate_symmetric_storage_key',
'--keys_output_dir={}'.format('/tmp/.testkeys')])
shutil.rmtree('/tmp/.testkeys')
class PrometheusMixin(object):
_prometheus_addresses_field_name = 'prometheus_addresses'
LOG_METRICS = os.environ.get('TEST_LOG_METRICS', False)
def get_prometheus_address(self, port):
addr = 'tcp://localhost:{}'.format(port)
if not hasattr(self, self._prometheus_addresses_field_name):
self.prometheus_addresses = []
self.prometheus_addresses.append(addr)
return addr
def clear_prometheus_addresses(self):
setattr(self, self._prometheus_addresses_field_name, [])
def _get_metrics_url(self, address):
addr = urlparse(address)
return 'http://{}/metrics'.format(addr.netloc)
def log_prometheus_metrics(self):
if not self.LOG_METRICS:
return
for address in getattr(self, self._prometheus_addresses_field_name, []):
response = requests.get(self._get_metrics_url(address))
if response.status_code == 200:
logging.info(response.text)
else:
logging.error(
"Can't fetch prometheus metrics from address: %s",
[address])
class TLSAuthenticationByDistinguishedNameMixin(object):
def get_acraserver_connection_string(self, port=None):
"""unix socket connection string to allow connect directory to acra by db driver"""
if not port:
port = self.ACRASERVER_PORT
return get_tcp_connection_string(port)
def get_identifier_extractor_type(self):
return TLS_CLIENT_ID_SOURCE_DN
class TLSAuthenticationBySerialNumberMixin(TLSAuthenticationByDistinguishedNameMixin):
def get_identifier_extractor_type(self):
return TLS_CLIENT_ID_SOURCE_SERIAL
class VaultClient:
version_options = {
'v1': dict(version=1),
'v2': dict(version=2),
}
def __init__(self, verify=None):
self.url = os.environ.get('VAULT_ADDRESS', 'http://localhost:8201')
self.token = os.environ.get('VAULT_CLIENT_TOKEN', 'root_token')
self.vault_client = Client(url=self.url, token=self.token, verify=verify)
def get_vault_url(self):
return self.url
def get_vault_token(self):
return self.token
def enable_kv_secret_engine(self, mount_path=None):
self.vault_client.sys.enable_secrets_engine(
backend_type='kv',
path=mount_path,
options=self.version_options[VAULT_KV_ENGINE_VERSION],
)
time.sleep(2)
def disable_kv_secret_engine(self, mount_path=None):
self.vault_client.sys.disable_secrets_engine(path=mount_path)
def put_master_key_by_version(self, path, version, mount_point=None):
self.master_key = get_master_key()
master_secret = {
'acra_master_key': self.master_key
}
kv_secret_engine = None
if version == "v1":
kv_secret_engine = self.vault_client.secrets.kv.v1
elif version == "v2":
kv_secret_engine = self.vault_client.secrets.kv.v2
kv_secret_engine.create_or_update_secret(
path=path,
secret=master_secret,
mount_point=mount_point,
)
def get_vault_cli_args(self, mount_path=None, secret_path=None):
args = {
'vault_connection_api_string': self.vault_client.url,
'vault_secrets_path': '{0}/{1}'.format(mount_path, secret_path)
}
if TEST_SSL_VAULT:
args['vault_tls_transport_enable'] = True
args['vault_tls_ca_path'] = TEST_VAULT_TLS_CA
return args
class BaseTestCase(PrometheusMixin, unittest.TestCase):
DEBUG_LOG = os.environ.get('DEBUG_LOG', True)
# for debugging with manually runned acra-server
EXTERNAL_ACRA = False
ACRASERVER_PORT = int(os.environ.get('TEST_ACRASERVER_PORT', 10003))
ACRASERVER_PROMETHEUS_PORT = int(os.environ.get('TEST_ACRASERVER_PROMETHEUS_PORT', 11004))
ACRA_BYTEA = 'pgsql_hex_bytea'
DB_BYTEA = 'hex'
WHOLECELL_MODE = False
ZONE = False
TEST_DATA_LOG = False
acra = ProcessStub()
def checkSkip(self):
if not TEST_WITH_TLS:
self.skipTest("running tests with TLS")
def wait_acraserver_connection(self, connection_string: str, *args, **kwargs):
if connection_string.startswith('unix'):
return wait_unix_socket(
socket_path_from_connection_string(connection_string),
*args, **kwargs)
else:
return wait_connection(connection_string.split(':')[-1])
def get_acraserver_connection_string(self, port=None):
if not port:
port = self.ACRASERVER_PORT
return get_tcp_connection_string(port)
def get_acraserver_api_connection_string(self, port=None):
if not port:
port = self.ACRASERVER_PORT + 1
elif port == self.ACRASERVER_PORT:
port = port + 1
return acra_api_connection_string(port)
def get_acraserver_bin_path(self):
return os.path.join(BINARY_OUTPUT_FOLDER, 'acra-server')
def with_tls(self):
return TEST_WITH_TLS
def _fork_acra(self, acra_kwargs, popen_kwargs):
logging.info("fork acra")
connection_string = self.get_acraserver_connection_string(
acra_kwargs.get('incoming_connection_port', self.ACRASERVER_PORT))
api_connection_string = self.get_acraserver_api_connection_string(
acra_kwargs.get('incoming_connection_api_port')
)
for path in [socket_path_from_connection_string(connection_string), socket_path_from_connection_string(api_connection_string)]:
try:
os.remove(path)
except:
pass
args = {
'db_host': DB_HOST,
'db_port': DB_PORT,
'logging_format': 'cef',
# we doesn't need in tests waiting closing connections
'incoming_connection_close_timeout': 0,
self.ACRA_BYTEA: 'true',
'tls_ocsp_from_cert': 'ignore',
'tls_crl_from_cert': 'ignore',
'incoming_connection_string': connection_string,
'incoming_connection_api_string': api_connection_string,
'acrastruct_wholecell_enable': 'true' if self.WHOLECELL_MODE else 'false',
'acrastruct_injectedcell_enable': 'false' if self.WHOLECELL_MODE else 'true',
'd': 'true' if self.DEBUG_LOG else 'false',
'zonemode_enable': 'true' if self.ZONE else 'false',
'http_api_enable': 'true' if self.ZONE else 'true',
'keystore_cache_on_start_enable': 'false',
'keys_dir': KEYS_FOLDER.name,
}
# keystore v2 doest not support caching, disable it for now
if KEYSTORE_VERSION == 'v2':
args['keystore_cache_size'] = -1
if TEST_WITH_TRACING:
args['tracing_log_enable'] = 'true'
if TEST_TRACE_TO_JAEGER:
args['tracing_jaeger_enable'] = 'true'
if self.LOG_METRICS:
args['incoming_connection_prometheus_metrics_string'] = self.get_prometheus_address(
self.ACRASERVER_PROMETHEUS_PORT)
if self.with_tls():
args['tls_key'] = TEST_TLS_SERVER_KEY
args['tls_cert'] = TEST_TLS_SERVER_CERT
args['tls_ca'] = TEST_TLS_CA
args['tls_auth'] = ACRA_TLS_AUTH
args['tls_ocsp_url'] = 'http://localhost:{}'.format(OCSP_SERVER_PORT)
args['tls_ocsp_from_cert'] = 'use'
args['tls_crl_url'] = 'http://localhost:{}/crl.pem'.format(CRL_HTTP_SERVER_PORT)
args['tls_crl_from_cert'] = 'use'
else:
# Explicitly disable certificate validation by default since otherwise we may end up
# in a situation when some certificate contains OCSP or CRL URI while corresponding
# services were not started by this script (because TLS testing was disabled)
args['tls_ocsp_from_cert'] = 'ignore'
args['tls_crl_from_cert'] = 'ignore'
if TEST_MYSQL:
args['mysql_enable'] = 'true'
args['postgresql_enable'] = 'false'
args.update(acra_kwargs)
if not popen_kwargs:
popen_kwargs = {}
cli_args = sorted(['--{}={}'.format(k, v) for k, v in args.items() if v is not None])
print("acra-server args: {}".format(' '.join(cli_args)))
process = fork(lambda: subprocess.Popen([self.get_acraserver_bin_path()] + cli_args,
**popen_kwargs))
try:
self.wait_acraserver_connection(connection_string)
except:
stop_process(process)
raise
logging.info("fork acra finished [pid={}]".format(process.pid))
return process
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
return self._fork_acra(acra_kwargs, popen_kwargs)
def fork_translator(self, translator_kwargs, popen_kwargs=None):
logging.info("fork acra-translator")
from utils import load_default_config
default_config = load_default_config("acra-translator")
default_args = {
'incoming_connection_close_timeout': 0,
'keys_dir': KEYS_FOLDER.name,
'logging_format': 'cef',
'keystore_cache_on_start_enable': 'false',
}
default_config.update(default_args)
default_config.update(translator_kwargs)
if not popen_kwargs:
popen_kwargs = {}
if self.DEBUG_LOG:
default_config['d'] = 1
# keystore v2 doest not support caching, disable it for now
if KEYSTORE_VERSION == 'v2':
default_config['keystore_cache_size'] = -1
if TEST_WITH_TRACING:
default_config['tracing_log_enable'] = 1
if TEST_TRACE_TO_JAEGER:
default_config['tracing_jaeger_enable'] = 1
cli_args = ['--{}={}'.format(k, v) for k, v in default_config.items()]
translator = fork(lambda: subprocess.Popen([os.path.join(BINARY_OUTPUT_FOLDER, 'acra-translator')] + cli_args, **popen_kwargs))
try:
if default_config['incoming_connection_grpc_string']:
wait_connection(urlparse(default_config['incoming_connection_grpc_string']).port)
if default_config['incoming_connection_http_string']:
wait_connection(urlparse(default_config['incoming_connection_http_string']).port)
except:
stop_process(translator)
raise
return translator
def setUp(self):
self.checkSkip()
try:
if not self.EXTERNAL_ACRA:
self.acra = self.fork_acra()
base_args = get_connect_args(port=self.ACRASERVER_PORT, sslmode='require')
tls_args_1 = base_args.copy()
tls_args_1.update(get_tls_connection_args(TEST_TLS_CLIENT_KEY, TEST_TLS_CLIENT_CERT))
connect_str = get_engine_connection_string(
self.get_acraserver_connection_string(self.ACRASERVER_PORT), DB_NAME)
self.engine1 = sa.create_engine(connect_str, connect_args=tls_args_1)
tls_args_2 = base_args.copy()
tls_args_2.update(get_tls_connection_args(TEST_TLS_CLIENT_2_KEY, TEST_TLS_CLIENT_2_CERT))
self.engine2 = sa.create_engine(
get_engine_connection_string(
self.get_acraserver_connection_string(self.ACRASERVER_PORT), DB_NAME), connect_args=tls_args_2)
self.engine_raw = sa.create_engine(
'{}://{}:{}/{}'.format(DB_DRIVER, DB_HOST, DB_PORT, DB_NAME),
connect_args=connect_args)
self.engines = [self.engine1, self.engine2, self.engine_raw]
metadata.create_all(self.engine_raw)
self.engine_raw.execute('delete from test;')
for engine in self.engines:
count = 0
# try with sleep if acra not up yet
while True:
try:
if TEST_MYSQL:
engine.execute("select 1;")
else:
engine.execute(
"UPDATE pg_settings SET setting = '{}' "
"WHERE name = 'bytea_output'".format(self.DB_BYTEA))
break
except Exception as e:
time.sleep(SETUP_SQL_COMMAND_TIMEOUT)
count += 1
if count == SQL_EXECUTE_TRY_COUNT:
raise
except:
self.tearDown()
raise
def tearDown(self):
try:
self.log_prometheus_metrics()
self.clear_prometheus_addresses()
except:
pass
try:
self.engine_raw.execute('delete from test;')
except:
pass
for engine in getattr(self, 'engines', []):
engine.dispose()
stop_process([getattr(self, 'acra', ProcessStub())])
send_signal_by_process_name('acra-server', signal.SIGKILL)
def log(self, data, expected=b'<no expected value>',
storage_client_id=None, zone_id=None,
poison_key=False):
"""this function for printing data which used in test and for
reproducing error with them if any error detected"""
if not self.TEST_DATA_LOG:
return
def key_name():
if storage_client_id:
return 'client storage, id={}'.format(storage_client_id)
elif zone_id:
return 'zone storage, id={}'.format(zone_id)
elif poison_key:
return 'poison record key'
else:
return 'unknown'
log_entry = {
'master_key': get_master_key(),
'key_name': key_name(),
'data': b64encode(data).decode('ascii'),
'expected': b64encode(expected).decode('ascii'),
}
if storage_client_id:
public_key = read_storage_public_key(storage_client_id, KEYS_FOLDER.name)
private_key = read_storage_private_key(KEYS_FOLDER.name, storage_client_id)
log_entry['public_key'] = b64encode(public_key).decode('ascii')
log_entry['private_key'] = b64encode(private_key).decode('ascii')
if zone_id:
public_key = read_zone_public_key(storage_client_id, KEYS_FOLDER.name)
private_key = read_zone_private_key(KEYS_FOLDER.name, storage_client_id)
log_entry['zone_public'] = b64encode(public_key).decode('ascii')
log_entry['zone_private'] = b64encode(private_key).decode('ascii')
log_entry['zone_id'] = zone_id
if poison_key:
public_key = read_poison_public_key(KEYS_FOLDER.name)
private_key = read_poison_private_key(KEYS_FOLDER.name)
log_entry['public_key'] = b64encode(public_key).decode('ascii')
log_entry['private_key'] = b64encode(private_key).decode('ascii')
log_entry['poison_record'] = b64encode(get_poison_record()).decode('ascii')
logging.debug("test log: {}".format(json.dumps(log_entry)))
class AcraCatchLogsMixin(object):
def __init__(self, *args, **kwargs):
self.log_files = {}
super(AcraCatchLogsMixin, self).__init__(*args, **kwargs)
def read_log(self, process):
with open(self.log_files[process].name, 'r', errors='replace',
encoding='utf-8') as f:
log = f.read()
print(log.encode(encoding='utf-8', errors='replace'))
return log
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
log_file = tempfile.NamedTemporaryFile('w+', encoding='utf-8')
popen_args = {
'stderr': subprocess.STDOUT,
'stdout': log_file,
'close_fds': True,
'bufsize': 0,
}
process = super(AcraCatchLogsMixin, self).fork_acra(
popen_args, **acra_kwargs
)
assert process
# register process to not forget close all descriptors
self.log_files[process] = log_file
return process
def tearDown(self, *args, **kwargs):
super(AcraCatchLogsMixin, self).tearDown(*args, **kwargs)
for process, log_file in self.log_files.items():
log_file.close()
try:
os.remove(log_file.name)
except:
pass
stop_process(process)
class AcraTranslatorMixin(object):
def get_identifier_extractor_type(self):
return TLS_CLIENT_ID_SOURCE_DN
def get_http_schema(self):
return 'https'
def get_http_default_kwargs(self):
return {
'timeout': REQUEST_TIMEOUT,
'verify': TEST_TLS_CA,
# https://requests.readthedocs.io/en/master/user/advanced/#client-side-certificates
# first crt, second key
'cert': (TEST_TLS_CLIENT_CERT, TEST_TLS_CLIENT_KEY),
}
def http_decrypt_request(self, port, client_id, zone_id, acrastruct):
api_url = '{}://localhost:{}/v1/decrypt'.format(self.get_http_schema(), port)
if zone_id:
api_url = '{}?zone_id={}'.format(api_url, zone_id)
kwargs = self.get_http_default_kwargs()
kwargs['data'] = acrastruct
with requests.post(api_url, **kwargs) as response:
return response.content
def http_encrypt_request(self, port, client_id, zone_id, data):
api_url = '{}://localhost:{}/v1/encrypt'.format(self.get_http_schema(), port)
if zone_id:
api_url = '{}?zone_id={}'.format(api_url, zone_id)
kwargs = self.get_http_default_kwargs()
kwargs['data'] = data
with requests.post(api_url, **kwargs) as response:
return response.content
def get_grpc_channel(self, port):
'''setup grpc to use tls client authentication'''
with open(TEST_TLS_CA, 'rb') as ca_file, open(TEST_TLS_CLIENT_KEY, 'rb') as key_file, open(TEST_TLS_CLIENT_CERT, 'rb') as cert_file:
ca_bytes = ca_file.read()
key_bytes = key_file.read()
cert_bytes = cert_file.read()
tls_credentials = grpc.ssl_channel_credentials(ca_bytes, key_bytes, cert_bytes)
return grpc.secure_channel('localhost:{}'.format(port), tls_credentials)
def grpc_encrypt_request(self, port, client_id, zone_id, data):
with self.get_grpc_channel(port) as channel:
stub = api_pb2_grpc.WriterStub(channel)
try:
if zone_id:
response = stub.Encrypt(api_pb2.EncryptRequest(
zone_id=zone_id.encode('ascii'), data=data,
client_id=client_id.encode('ascii')),
timeout=SOCKET_CONNECT_TIMEOUT)
else:
response = stub.Encrypt(api_pb2.EncryptRequest(
client_id=client_id.encode('ascii'), data=data),
timeout=SOCKET_CONNECT_TIMEOUT)
except grpc.RpcError as exc:
logging.info(exc)
return b''
return response.acrastruct
def grpc_decrypt_request(self, port, client_id, zone_id, acrastruct, raise_exception_on_failure=False):
with self.get_grpc_channel(port) as channel:
stub = api_pb2_grpc.ReaderStub(channel)
try:
if zone_id:
response = stub.Decrypt(api_pb2.DecryptRequest(
zone_id=zone_id.encode('ascii'), acrastruct=acrastruct,
client_id=client_id.encode('ascii')),
timeout=SOCKET_CONNECT_TIMEOUT)
else:
response = stub.Decrypt(api_pb2.DecryptRequest(
client_id=client_id.encode('ascii'), acrastruct=acrastruct),
timeout=SOCKET_CONNECT_TIMEOUT)
except grpc.RpcError as exc:
logging.info(exc)
if raise_exception_on_failure:
raise
return b''
return response.data
class HexFormatTest(BaseTestCase):
def testClientIDRead(self):
"""test decrypting with correct clientID and not decrypting with
incorrect clientID or using direct connection to db"""
client_id = TLS_CERT_CLIENT_ID_1
server_public1 = read_storage_public_key(client_id, KEYS_FOLDER.name)
data = get_pregenerated_random_data()
acra_struct = create_acrastruct(
data.encode('ascii'), server_public1)
row_id = get_random_id()
self.log(storage_client_id=client_id,
data=acra_struct, expected=data.encode('ascii'))
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': acra_struct, 'raw_data': data})
result = self.engine1.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertEqual(row['data'], row['raw_data'].encode('utf-8'))
self.assertEqual(row['empty'], b'')
result = self.engine2.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
result = self.engine_raw.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
def testReadAcrastructInAcrastruct(self):
"""test correct decrypting acrastruct when acrastruct concatenated to
partial another acrastruct"""
client_id = TLS_CERT_CLIENT_ID_1
server_public1 = read_storage_public_key(client_id, KEYS_FOLDER.name)
incorrect_data = get_pregenerated_random_data()
correct_data = get_pregenerated_random_data()
suffix_data = get_pregenerated_random_data()[:10]
fake_offset = (3+45+84) - 4
fake_acra_struct = create_acrastruct(
incorrect_data.encode('ascii'), server_public1)[:fake_offset]
inner_acra_struct = create_acrastruct(
correct_data.encode('ascii'), server_public1)
data = fake_acra_struct + inner_acra_struct + suffix_data.encode('ascii')
correct_data = correct_data + suffix_data
row_id = get_random_id()
self.log(storage_client_id=client_id,
data=data,
expected=fake_acra_struct+correct_data.encode('ascii'))
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': correct_data})
result = self.engine1.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
try:
self.assertEqual(row['data'][fake_offset:],
row['raw_data'].encode('utf-8'))
self.assertEqual(row['data'][:fake_offset], fake_acra_struct[:fake_offset])
except:
print('incorrect data: {}\ncorrect data: {}\ndata: {}\n data len: {}'.format(
incorrect_data, correct_data, row['data'], len(row['data'])))
raise
self.assertEqual(row['empty'], b'')
result = self.engine2.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'][fake_offset:].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
result = self.engine_raw.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'][fake_offset:].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
class BaseBinaryPostgreSQLTestCase(BaseTestCase):
"""Setup test fixture for testing PostgreSQL extended protocol."""
def checkSkip(self):
super().checkSkip()
if not TEST_POSTGRESQL:
self.skipTest("test only PostgreSQL")
FORMAT = AsyncpgExecutor.BinaryFormat
def setUp(self):
super().setUp()
def executor_with_ssl(ssl_key, ssl_cert):
args = ConnectionArgs(
host=get_db_host(), port=self.ACRASERVER_PORT, dbname=DB_NAME,
user=DB_USER, password=DB_USER_PASSWORD,
ssl_ca=TEST_TLS_CA,
ssl_key=ssl_key,
ssl_cert=ssl_cert,
format=self.FORMAT,
)
return AsyncpgExecutor(args)
self.executor1 = executor_with_ssl(TEST_TLS_CLIENT_KEY, TEST_TLS_CLIENT_CERT)
self.executor2 = executor_with_ssl(TEST_TLS_CLIENT_2_KEY, TEST_TLS_CLIENT_2_CERT)
def compileQuery(self, query, parameters={}, literal_binds=False):
"""
Compile SQLAlchemy query and parameter dictionary
into SQL text and parameter list for the executor.
"""
# Ask SQLAlchemy to compile the query in database-agnostic SQL.
# After that manually replace placeholders in text. Unfortunately,
# passing "dialect=postgresql_dialect" does not seem to work :(
compile_kwargs = {"literal_binds": literal_binds}
query = str(query.compile(compile_kwargs=compile_kwargs))
values = []
for placeholder, value in parameters.items():
# SQLAlchemy default dialect has placeholders of form ":name".
# PostgreSQL syntax is "$n", with 1-based sequential parameters.
saPlaceholder = ':' + placeholder
pgPlaceholder = '$' + str(len(values) + 1)
# Replace and keep values only for those placeholders which
# are actually used in the query.
if saPlaceholder in query:
values.append(value)
query = query.replace(saPlaceholder, pgPlaceholder)
return query, values
def compileBulkInsertQuery(self, query, parameters={}, literal_binds=False):
"""
Compile SQLAlchemy insert query and parameter dictionary into SQL text and parameter list for the executor.
It is used regexp parsing to get the correct order of insert params, values are stored in tuple with the same order.
"""
compile_kwargs = {"literal_binds": literal_binds}
query = str(query.compile(compile_kwargs=compile_kwargs))
values = []
# example of the insert string:
# INSERT INTO test_table (id, zone_id, nullable_column, empty) VALUES (:id, :zone_id, :nullable_column, :empty)
pattern_string = r'(INSERT INTO) (\S+).*\((.*?)\).*(VALUES).*\((.*?)\)(.*\;?)'
res = re.findall(pattern_string, query, re.IGNORECASE | re.DOTALL)
if len(res) > 0:
# regexp matching result should look like this:
# `id, zone_id, nullable_column, empty`
intos = str(res[0][2])
count = 1
for idx, params in enumerate(parameters):
# each value in bulk insert has unique suffix like ':id_m0'
suffix = '_m'+str(idx)
# so we need to split it by comma value to iterate over
for into_value in intos.split(', '):
values.append(params[into_value])
query = query.replace(':' + into_value + suffix, '$' + str(count))
count += 1
return query, tuple(values)
def compileInsertQuery(self, query, parameters={}, literal_binds=False):
"""
Compile SQLAlchemy insert query and parameter dictionary into SQL text and parameter list for the executor.
It is used regexp parsing to get the correct order of insert params, values are stored in tuple with the same order.
"""
compile_kwargs = {"literal_binds": literal_binds}
query = str(query.compile(compile_kwargs=compile_kwargs))
values = []
# example of the insert string:
# INSERT INTO test_table (id, zone_id, nullable_column, empty) VALUES (:id, :zone_id, :nullable_column, :empty)
pattern_string = r'(INSERT INTO) (\S+).*\((.*?)\).*(VALUES).*\((.*?)\)(.*\;?)'
res = re.findall(pattern_string, query, re.IGNORECASE | re.DOTALL)
if len(res) > 0:
# regexp matching result should look like this:
# `id, zone_id, nullable_column, empty`
intos = str(res[0][2])
count = 1
# so we need to split it by comma value to iterate over
for into_value in intos.split(', '):
values.append(parameters[into_value])
query = query.replace(':' + into_value, '$' + str(count))
count += 1
return query, tuple(values)
class BaseBinaryMySQLTestCase(BaseTestCase):
"""Setup test fixture for testing MySQL extended protocol."""
def checkSkip(self):
super().checkSkip()
if not TEST_MYSQL:
self.skipTest("test only MySQL")
def setUp(self):
super().setUp()
def executor_with_ssl(ssl_key, ssl_cert):
args = ConnectionArgs(
host=get_db_host(), port=self.ACRASERVER_PORT, dbname=DB_NAME,
user=DB_USER, password=DB_USER_PASSWORD,
ssl_ca=TEST_TLS_CA,
ssl_key=ssl_key,
ssl_cert=ssl_cert,
)
return MysqlExecutor(args)
self.executor1 = executor_with_ssl(TEST_TLS_CLIENT_KEY, TEST_TLS_CLIENT_CERT)
self.executor2 = executor_with_ssl(TEST_TLS_CLIENT_2_KEY, TEST_TLS_CLIENT_2_CERT)
def compileInsertQuery(self, query, parameters={}, literal_binds=False):
"""
Compile SQLAlchemy insert query and parameter dictionary into SQL text and parameter list for the executor.
It is used regexp parsing to get the correct order of insert params, values are stored in tuple with the same order.
"""
compile_kwargs = {"literal_binds": literal_binds}
query = str(query.compile(compile_kwargs=compile_kwargs))
values = []
# example of the insert string:
# INSERT INTO test_table (id, zone_id, nullable_column, empty) VALUES (:id, :zone_id, :nullable_column, :empty)
pattern_string = r'(INSERT INTO) (\S+).*\((.*?)\).*(VALUES).*\((.*?)\)(.*\;?)'
res = re.findall(pattern_string, query, re.IGNORECASE | re.DOTALL)
if len(res) > 0:
# regexp matching result should look like this:
# `id, zone_id, nullable_column, empty`
intos = str(res[0][2])
# so we need to split it by comma value to iterate over
for into_value in intos.split(', '):
values.append(parameters[into_value])
query = query.replace(':' + into_value, '?')
return query, tuple(values)
def compileBulkInsertQuery(self, query, parameters={}, literal_binds=False):
"""
Compile SQLAlchemy insert query and parameter dictionary into SQL text and parameter list for the executor.
It is used regexp parsing to get the correct order of insert params, values are stored in tuple with the same order.
"""
compile_kwargs = {"literal_binds": literal_binds}
query = str(query.compile(compile_kwargs=compile_kwargs))
values = []
# example of the insert string:
# INSERT INTO test_table (id, zone_id, nullable_column, empty) VALUES (:id, :zone_id, :nullable_column, :empty)
pattern_string = r'(INSERT INTO) (\S+).*\((.*?)\).*(VALUES).*\((.*?)\)(.*\;?)'
res = re.findall(pattern_string, query, re.IGNORECASE | re.DOTALL)
if len(res) > 0:
# regexp matching result should look like this:
# `id, zone_id, nullable_column, empty`
intos = str(res[0][2])
for idx, params in enumerate(parameters):
# each value in bulk insert contains unique suffix like ':id_m0'
suffix = '_m'+str(idx)
# so we need to split it by comma value to iterate over
for into_value in intos.split(', '):
values.append(params[into_value])
query = query.replace(':' + into_value + suffix, '?')
return query, tuple(values)
def compileQuery(self, query, parameters={}, literal_binds=False):
"""
Compile SQLAlchemy query and parameter dictionary into SQL text and parameter list for the executor.
It is used regexp parsing to get the correct order of parameters, values are stored in tuple with the same order.
"""
compile_kwargs = {"literal_binds": literal_binds}
query = str(query.compile(compile_kwargs=compile_kwargs))
values = []
# parse all parameters like `:id` in the query
pattern_string = r'(:\w+)'
res = re.findall(pattern_string, query, re.IGNORECASE | re.DOTALL)
if len(res) > 0:
for placeholder in res:
# parameters map contain values where keys without ':' so we need trim the placeholder before
key = placeholder.lstrip(':')
values.append(parameters[key])
query = query.replace(placeholder, '?')
return query, tuple(values)
class BaseCensorTest(BaseTestCase):
CENSOR_CONFIG_FILE = 'default.yaml'
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
acra_kwargs['acracensor_config_file'] = self.CENSOR_CONFIG_FILE
return self._fork_acra(acra_kwargs, popen_kwargs)
class FailedRunProcessMixin(object):
def getOutputFromProcess(self, args):
logger.info("run command '{}'".format(' '.join(args)))
process = subprocess.Popen(args, stderr=subprocess.PIPE)
try:
_, stderr = process.communicate(timeout=5) # 5 second enough to start binary and stop execution with error
except:
raise
finally:
process.kill()
logger.debug(stderr)
return stderr.decode('utf-8')
def assertProcessHasNotMessage(self, args, status_code, expectedMessage):
logger.info("run command '{}'".format(' '.join(args)))
process = subprocess.Popen(args, stderr=subprocess.PIPE, cwd=os.getcwd())
try:
_, stderr = process.communicate(timeout=1)
logger.debug(stderr)
self.assertEqual(process.returncode, status_code)
self.assertNotIn(expectedMessage.lower(), stderr.decode('utf-8').lower(), "Has message that should not to be in")
except:
raise
finally:
process.kill()
class TestCensorVersionChecks(BaseCensorTest, FailedRunProcessMixin):
def setUp(self):
# doesn't need to start acra-server and connections
pass
def tearDown(self):
# doesn't need to stop acra-server and connections
pass
def checkErrorMessage(self, configFile, expectedMessage):
args = [self.get_acraserver_bin_path(),
'--acracensor_config_file={}'.format(configFile),
# required param
'--db_host={}'.format(DB_HOST)
]
stderr = self.getOutputFromProcess(args)
self.assertIn(expectedMessage.lower(), stderr.lower())
def testWithoutVersion(self):
expectedMessage = 'level=error msg="can\'t setup censor" code=561 error="acra-censor\'s config is outdated"'
self.checkErrorMessage(abs_path('tests/acra-censor_configs/without_version.yaml'), expectedMessage)
def testNewerVersion(self):
expectedMessage = "acra-censor's config is outdated"
self.checkErrorMessage(abs_path('tests/acra-censor_configs/new_version.yaml'), expectedMessage)
def testIncorrectFormat(self):
expectedMessage = 'level=error msg="can\'t setup censor" code=561 error="strconv.parseuint: parsing'
self.checkErrorMessage(abs_path('tests/acra-censor_configs/incorrect_version_format.yaml'), expectedMessage)
class CensorBlacklistTest(BaseCensorTest):
CENSOR_CONFIG_FILE = abs_path('tests/acra-censor_configs/acra-censor_blacklist.yaml')
def testBlacklist(self):
connection_args = ConnectionArgs(host=get_db_host(), port=self.ACRASERVER_PORT,
user=DB_USER, password=DB_USER_PASSWORD,
dbname=DB_NAME, ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT)
if TEST_MYSQL:
expectedException = (pymysql.err.OperationalError,
mysql.connector.errors.DatabaseError)
expectedExceptionInPreparedStatement = mysql.connector.errors.DatabaseError
executors = [PyMysqlExecutor(connection_args),
MysqlExecutor(connection_args)]
if TEST_POSTGRESQL:
expectedException = (psycopg2.ProgrammingError,
asyncpg.exceptions.SyntaxOrAccessError)
expectedExceptionInPreparedStatement = asyncpg.exceptions.SyntaxOrAccessError
executors = [Psycopg2Executor(connection_args),
AsyncpgExecutor(connection_args)]
testQueries = ["select * from test", # should be denied by query
"select * from acrarollback_output", # should be denied by table
"select data from test where id=1", # should be denied by pattern
"insert into test(id, data, empty) values(1, DEFAULT, '')"] # should be denied by pattern
for executor in executors:
for testQuery in testQueries:
with self.assertRaises(expectedException):
executor.execute(testQuery)
try:
executor.execute_prepared_statement(testQuery)
except psycopg2.ProgrammingError as e:
self.assertTrue(str(e) == "no results to fetch")
except expectedExceptionInPreparedStatement:
return
class CensorWhitelistTest(BaseCensorTest):
CENSOR_CONFIG_FILE = abs_path('tests/acra-censor_configs/acra-censor_whitelist.yaml')
def testWhitelist(self):
connection_args = ConnectionArgs(host=get_db_host(), port=self.ACRASERVER_PORT,
user=DB_USER, password=DB_USER_PASSWORD,
dbname=DB_NAME, ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT)
if TEST_MYSQL:
expectedException = (pymysql.err.OperationalError,
mysql.connector.errors.DatabaseError)
expectedExceptionInPreparedStatement = mysql.connector.errors.DatabaseError
executors = [PyMysqlExecutor(connection_args),
MysqlExecutor(connection_args)]
if TEST_POSTGRESQL:
expectedException = (psycopg2.ProgrammingError,
asyncpg.exceptions.SyntaxOrAccessError)
expectedExceptionInPreparedStatement = asyncpg.exceptions.SyntaxOrAccessError
executors = [Psycopg2Executor(connection_args),
AsyncpgExecutor(connection_args)]
# all those queries should be denied because no matching allow rules specified
testQueries = ["select * from acrarollback_output",
"insert into test(id, data, empty) values(1, DEFAULT, '')"]
for executor in executors:
for testQuery in testQueries:
with self.assertRaises(expectedException):
executor.execute(testQuery)
try:
executor.execute_prepared_statement(testQuery)
except psycopg2.ProgrammingError as e:
self.assertTrue(str(e) == "no results to fetch")
except expectedExceptionInPreparedStatement:
return
class ZoneHexFormatTest(BaseTestCase):
ZONE = True
def testRead(self):
data = get_pregenerated_random_data()
zone_public = b64decode(zones[0][ZONE_PUBLIC_KEY].encode('ascii'))
acra_struct = create_acrastruct(
data.encode('ascii'), zone_public,
context=zones[0][ZONE_ID].encode('ascii'))
row_id = get_random_id()
self.log(zone_id=zones[0][ZONE_ID],
data=acra_struct, expected=data.encode('ascii'))
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': acra_struct, 'raw_data': data})
zone = zones[0][ZONE_ID].encode('ascii')
result = self.engine1.execute(
sa.select([sa.cast(zone, BYTEA), test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertEqual(row['data'], row['raw_data'].encode('utf-8'))
self.assertEqual(row['empty'], b'')
# without zone in another acra-server, in the same acra-server and without any acra-server
for engine in self.engines:
result = engine.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'].decode('ascii', errors='ignore'), row['raw_data'])
self.assertEqual(row['empty'], b'')
def testReadAcrastructInAcrastruct(self):
incorrect_data = get_pregenerated_random_data()
correct_data = get_pregenerated_random_data()
suffix_data = get_pregenerated_random_data()[:10]
zone_public = b64decode(zones[0][ZONE_PUBLIC_KEY].encode('ascii'))
fake_offset = (3+45+84) - 1
fake_acra_struct = create_acrastruct(
incorrect_data.encode('ascii'), zone_public, context=zones[0][ZONE_ID].encode('ascii'))[:fake_offset]
inner_acra_struct = create_acrastruct(
correct_data.encode('ascii'), zone_public, context=zones[0][ZONE_ID].encode('ascii'))
data = fake_acra_struct + inner_acra_struct + suffix_data.encode('ascii')
correct_data = correct_data + suffix_data
self.log(zone_id=zones[0][ZONE_ID],
data=data,
expected=fake_acra_struct+correct_data.encode('ascii'))
row_id = get_random_id()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': correct_data})
zone = zones[0][ZONE_ID].encode('ascii')
result = self.engine1.execute(
sa.select([sa.cast(zone, BYTEA), test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertEqual(row['data'][fake_offset:],
safe_string(row['raw_data']).encode('utf-8'))
self.assertEqual(row['data'][:fake_offset], fake_acra_struct[:fake_offset])
self.assertEqual(row['empty'], b'')
result = self.engine2.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(len(row['data'][fake_offset:]), len(row['raw_data'][fake_offset:]))
self.assertEqual(row['empty'], b'')
result = self.engine_raw.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'][fake_offset:].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
class TestEnableCachedOnStartupTest(HexFormatTest):
def checkSkip(self):
super().checkSkip()
if KEYSTORE_VERSION == 'v2':
self.skipTest("test only for keystore Version v1")
def setUp(self):
self.cached_dir = tempfile.TemporaryDirectory()
# fill temp dir with all keys
copy_tree(KEYS_FOLDER.name, self.cached_dir.name)
super().setUp()
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
acra_kwargs['keystore_cache_on_start_enable'] = 'true'
acra_kwargs['keys_dir'] = self.cached_dir.name
return super(TestEnableCachedOnStartupTest, self).fork_acra(
popen_kwargs, **acra_kwargs)
def testReadAcrastructInAcrastruct(self):
self.cached_dir.cleanup()
super().testReadAcrastructInAcrastruct()
def testClientIDRead(self):
self.cached_dir.cleanup()
super().testClientIDRead()
class TestEnableCachedOnStartupServerV2ErrorExit(BaseTestCase):
def checkSkip(self):
if KEYSTORE_VERSION == 'v1':
self.skipTest("test only for keystore Version v2")
def setUp(self):
self.log_file = tempfile.NamedTemporaryFile('w+', encoding='utf-8')
def testRun(self):
self.checkSkip()
acra_kwargs = {
'log_to_file': self.log_file.name,
'keystore_cache_on_start_enable': 'true',
}
try:
self.fork_acra(**acra_kwargs)
except Exception as exc:
self.assertEqual(str(exc), WAIT_CONNECTION_ERROR_MESSAGE)
with open(self.log_file.name, 'r') as f:
log = f.read()
self.assertIn("Can't cache on start with disabled cache", log)
self.tearDown()
class TestEnableCachedOnStartupTranslatorSV2ErrorExit(AcraTranslatorMixin, BaseTestCase):
def checkSkip(self):
if KEYSTORE_VERSION == 'v1':
self.skipTest("test only for keystore Version v2")
def setUp(self):
self.log_file = tempfile.NamedTemporaryFile('w+', encoding='utf-8')
def testRun(self):
translator_kwargs = {
'log_to_file': self.log_file.name,
'keystore_cache_on_start_enable': 'true',
}
with ProcessContextManager(self.fork_translator(translator_kwargs)):
with self.assertRaises(Exception):
with open(self.log_file.name, 'r') as f:
log = f.read()
self.assertIn("Can't cache on start with disabled cache", log)
self.tearDown()
class TestDisableCachedOnStartupTest(HexFormatTest):
def setUp(self):
self.non_cached_dir = tempfile.TemporaryDirectory()
# fill temp dir with all keys
copy_tree(KEYS_FOLDER.name, self.non_cached_dir.name)
super().setUp()
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
# keystore_cache_on_start_enable is false by default in super().fork_acra()
acra_kwargs['keys_dir'] = self.non_cached_dir.name
return super(TestDisableCachedOnStartupTest, self).fork_acra(
popen_kwargs, **acra_kwargs)
def testReadAcrastructInAcrastruct(self):
self.non_cached_dir.cleanup()
with self.assertRaises(Exception):
super().testReadAcrastructInAcrastruct()
def testClientIDRead(self):
self.non_cached_dir.cleanup()
with self.assertRaises(Exception):
super().testClientIDRead()
class EscapeFormatTest(HexFormatTest):
ACRA_BYTEA = 'pgsql_escape_bytea'
DB_BYTEA = 'escape'
def checkSkip(self):
if TEST_MYSQL:
self.skipTest("useful only for postgresql")
elif not TEST_WITH_TLS:
self.skipTest("running tests only with TLS")
class ZoneEscapeFormatTest(ZoneHexFormatTest):
ACRA_BYTEA = 'pgsql_escape_bytea'
DB_BYTEA = 'escape'
class TestConnectionClosing(BaseTestCase):
class mysql_closing(contextlib.closing):
"""
extended contextlib.closing that add close() method that call close()
method of wrapped object
Need to wrap pymysql.connection with own __enter__/__exit__
implementation that will return connection instead of cursor (as do
pymysql.Connection.__enter__())
"""
def close(self):
logger.info('mysql_closing.close()')
self.thing.close()
def setUp(self):
self.checkSkip()
try:
if not self.EXTERNAL_ACRA:
self.acra = self.fork_acra(
tls_ocsp_from_cert='ignore',
tls_crl_from_cert='ignore',
tls_ocsp_url='',
tls_crl_url='',
)
except:
self.tearDown()
raise
def get_connection(self):
count = CONNECT_TRY_COUNT
while True:
try:
if TEST_MYSQL:
return TestConnectionClosing.mysql_closing(
pymysql.connect(**get_connect_args(port=self.ACRASERVER_PORT)))
else:
return TestConnectionClosing.mysql_closing(psycopg2.connect(
host=get_db_host(), **get_connect_args(port=self.ACRASERVER_PORT)))
except:
count -= 1
if count == 0:
raise
time.sleep(CONNECTION_FAIL_SLEEP)
def tearDown(self):
procs = []
if not self.EXTERNAL_ACRA and hasattr(self, 'acra'):
procs.append(self.acra)
stop_process(procs)
send_signal_by_process_name('acra-server', signal.SIGKILL)
def getActiveConnectionCount(self, cursor):
if TEST_MYSQL:
query = "SHOW STATUS WHERE `variable_name` = 'Threads_connected';"
cursor.execute(query)
return int(cursor.fetchone()[1])
else:
cursor.execute('select count(*) from pg_stat_activity;')
return int(cursor.fetchone()[0])
def getConnectionLimit(self, connection=None):
created_connection = False
if connection is None:
connection = self.get_connection()
created_connection = True
if TEST_MYSQL:
query = "SHOW VARIABLES WHERE `variable_name` = 'max_connections';"
with connection.cursor() as cursor:
cursor.execute(query)
return int(cursor.fetchone()[1])
else:
with TestConnectionClosing.mysql_closing(connection.cursor()) as cursor:
try:
cursor.execute('select setting from pg_settings where name=\'max_connections\';')
pg_max_connections = int(cursor.fetchone()[0])
cursor.execute('select rolconnlimit from pg_roles where rolname = current_user;')
pg_rolconnlimit = int(cursor.fetchone()[0])
cursor.close()
if pg_rolconnlimit <= 0:
return pg_max_connections
return min(pg_max_connections, pg_rolconnlimit)
except:
if created_connection:
connection.close()
raise
def check_count(self, cursor, expected):
# give a time to close connections via postgresql
# because performance where tests will run not always constant,
# we wait try_count times. in best case it will not need to sleep
timeout = 3
step = 0.1
iterations = timeout / step
for i in range(int(iterations)):
try:
self.assertEqual(self.getActiveConnectionCount(cursor), expected)
break
except AssertionError:
if i == (iterations - 1):
raise
# some wait for closing. chosen manually
time.sleep(step)
def checkConnectionLimit(self, connection_limit):
connections = []
try:
exception = None
try:
for i in range(connection_limit):
connections.append(self.get_connection())
except Exception as exc:
exception = exc
self.assertIsNotNone(exception)
is_correct_exception_message = False
if TEST_MYSQL:
exception_type = pymysql.err.OperationalError
correct_messages = [
'Too many connections'
]
for message in correct_messages:
if exception.args[0] in [1203, 1040] and message in exception.args[1]:
is_correct_exception_message = True
break
else:
exception_type = psycopg2.OperationalError
# exception doesn't has any related code, only text messages
correct_messages = [
'FATAL: too many connections for role',
'FATAL: sorry, too many clients already',
'FATAL: remaining connection slots are reserved for non-replication superuser connections'
]
for message in correct_messages:
if message in exception.args[0]:
is_correct_exception_message = True
break
self.assertIsInstance(exception, exception_type)
self.assertTrue(is_correct_exception_message)
except:
for connection in connections:
connection.close()
raise
return connections
def testClosingConnectionsWithDB(self):
with self.get_connection() as connection:
connection.autocommit = True
with TestConnectionClosing.mysql_closing(connection.cursor()) as cursor:
current_connection_count = self.getActiveConnectionCount(cursor)
with self.get_connection():
self.assertEqual(self.getActiveConnectionCount(cursor),
current_connection_count+1)
connection_limit = self.getConnectionLimit(connection)
created_connections = self.checkConnectionLimit(
connection_limit)
for conn in created_connections:
conn.close()
self.check_count(cursor, current_connection_count)
# try create new connection
with self.get_connection():
self.check_count(cursor, current_connection_count + 1)
self.check_count(cursor, current_connection_count)
class BasePoisonRecordTest(AcraCatchLogsMixin, AcraTranslatorMixin, BaseTestCase):
SHUTDOWN = True
TEST_DATA_LOG = True
DETECT_POISON_RECORDS = True
def get_poison_record_data(self):
return get_poison_record()
def setUp(self):
super(BasePoisonRecordTest, self).setUp()
try:
self.log(poison_key=True, data=get_poison_record())
except:
self.tearDown()
raise
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
args = {
'poison_shutdown_enable': 'true' if self.SHUTDOWN else 'false',
'poison_detect_enable': 'true' if self.DETECT_POISON_RECORDS else 'false',
# use text format to simplify check some error messages in logs, for example code=XXX instead of '|XXX|' in
# CEF format
'logging_format': 'text',
}
if hasattr(self, 'poisonscript'):
args['poison_run_script_file'] = self.poisonscript
acra_kwargs.update(args)
return super(BasePoisonRecordTest, self).fork_acra(popen_kwargs, **acra_kwargs)
def fork_translator(self, translator_kwargs, popen_kwargs=None):
args = {
'poison_shutdown_enable': 'true' if self.SHUTDOWN else 'false',
'poison_detect_enable': 'true' if self.DETECT_POISON_RECORDS else 'false',
# use text format to simplify check some error messages in logs, for example code=XXX instead of '|XXX|' in
# CEF format
'logging_format': 'text',
}
if hasattr(self, 'poisonscript'):
args['poison_run_script_file'] = self.poisonscript
translator_kwargs.update(args)
return super(BasePoisonRecordTest, self).fork_translator(translator_kwargs, popen_kwargs)
def get_base_translator_args(self):
return {
'tls_ocsp_from_cert': 'ignore',
'tls_crl_from_cert': 'ignore',
'tls_key': abs_path(TEST_TLS_SERVER_KEY),
'tls_cert': abs_path(TEST_TLS_SERVER_CERT),
'tls_ca': TEST_TLS_CA,
'tls_identifier_extractor_type': self.get_identifier_extractor_type(),
'acratranslator_client_id_from_connection_enable': 'true',
}
class TestPoisonRecordShutdown(BasePoisonRecordTest):
SHUTDOWN = True
def testShutdown(self):
"""fetch data from table by specifying row id
this method works with ZoneMode ON and OFF because in both cases acra-server should find poison record
on data decryption failure
"""
row_id = get_random_id()
data = self.get_poison_record_data()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': 'poison_record'})
with self.assertRaises(DatabaseError):
result = self.engine1.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
if row['data'] == data:
self.fail("unexpected response")
log = self.read_log(self.acra)
self.assertIn('code=101', log)
self.assertIn('Detected poison record, exit', log)
self.assertNotIn('executed code after os.Exit', log)
def testShutdown2(self):
"""check working poison record callback on full select
this method works with ZoneMode ON and OFF because in both cases acra-server should find poison record
on data decryption failure
"""
row_id = get_random_id()
data = self.get_poison_record_data()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': 'poison_record'})
with self.assertRaises(DatabaseError):
result = self.engine1.execute(
sa.select([test_table]))
rows = result.fetchall()
for row in rows:
if row['id'] == row_id and row['data'] == data:
self.fail("unexpected response")
log = self.read_log(self.acra)
self.assertIn('code=101', log)
self.assertIn('Detected poison record, exit', log)
self.assertNotIn('executed code after os.Exit', log)
def testShutdown3(self):
"""check working poison record callback on full select inside another data
this method works with ZoneMode ON and OFF because in both cases acra-server should find poison record
on data decryption failure
"""
row_id = get_random_id()
poison_record = get_poison_record()
begin_tag = poison_record[:4]
# test with extra long begin tag
data = os.urandom(100) + begin_tag + poison_record + os.urandom(100)
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': 'poison_record'})
with self.assertRaises(DatabaseError):
result = self.engine1.execute(
sa.select([test_table]))
rows = result.fetchall()
for row in rows:
if row['id'] == row_id and row['data'] == data:
self.fail("unexpected response")
log = self.read_log(self.acra)
self.assertIn('code=101', log)
self.assertIn('Detected poison record, exit', log)
self.assertNotIn('executed code after os.Exit', log)
def testShutdownWithExplicitZone(self):
"""check callback with select by id and specify zone id in select query
This method works with ZoneMode ON and OFF because in both cases acra-server should find poison record
on data decryption failure. Plus in ZoneMode OFF acra-server will ignore ZoneID
"""
row_id = get_random_id()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': self.get_poison_record_data(), 'raw_data': 'poison_record'})
with self.assertRaises(DatabaseError):
zone = zones[0][ZONE_ID].encode('ascii')
result = self.engine1.execute(
sa.select([sa.cast(zone, BYTEA), test_table])
.where(test_table.c.id == row_id))
print(result.fetchall())
log = self.read_log(self.acra)
self.assertIn('code=101', log)
self.assertIn('Detected poison record, exit', log)
self.assertNotIn('executed code after os.Exit', log)
def testShutdownTranslatorHTTP(self):
"""check poison record decryption via acra-translator using HTTP v1 API
This method works with ZoneMode ON and OFF because in both cases acra-translator should match poison record
on data decryption failure
"""
http_port = 3356
http_connection_string = 'tcp://127.0.0.1:{}'.format(http_port)
translator_kwargs = self.get_base_translator_args()
translator_kwargs.update({
'incoming_connection_http_string': http_connection_string,
})
data = self.get_poison_record_data()
with ProcessContextManager(self.fork_translator(translator_kwargs)):
with self.assertRaises(requests.exceptions.ConnectionError) as exc:
response = self.http_decrypt_request(http_port, TLS_CERT_CLIENT_ID_1, None, data)
self.assertEqual(exc.exception.args[0].args[0], 'Connection aborted.')
# check that port not listening anymore
with self.assertRaises(Exception) as exc:
wait_connection(http_port, count=1, sleep=0)
self.assertEqual(exc.exception.args[0], WAIT_CONNECTION_ERROR_MESSAGE)
def testShutdownTranslatorgRPC(self):
"""check poison record decryption via acra-translator using gRPC API
This method works with ZoneMode ON and OFF because in both cases acra-translator should match poison record
on data decryption failure
"""
grpc_port = 3357
grpc_connection_string = 'tcp://127.0.0.1:{}'.format(grpc_port)
translator_kwargs = self.get_base_translator_args()
translator_kwargs.update({
'incoming_connection_grpc_string': grpc_connection_string,
})
data = self.get_poison_record_data()
with ProcessContextManager(self.fork_translator(translator_kwargs)):
with self.assertRaises(grpc.RpcError) as exc:
response = self.grpc_decrypt_request(grpc_port, TLS_CERT_CLIENT_ID_1, None, data,
raise_exception_on_failure=True)
self.assertEqual(exc.exception.code(), grpc.StatusCode.UNAVAILABLE)
# check that port not listening anymore
with self.assertRaises(Exception) as exc:
wait_connection(grpc_port, count=1, sleep=0)
self.assertEqual(exc.exception.args[0], WAIT_CONNECTION_ERROR_MESSAGE)
class TestPoisonRecordShutdownWithAcraBlock(TestPoisonRecordShutdown):
def get_poison_record_data(self):
return get_poison_record_with_acrablock()
class TestPoisonRecordOffStatus(BasePoisonRecordTest):
SHUTDOWN = True
DETECT_POISON_RECORDS = False
def testShutdown(self):
"""case with select by specifying row id, checks that acra-server doesn't initialize poison record detection
and any callbacks, and returns data as is on decryption failure even if it's valid poison record
Works with ZoneMode On/OFF
"""
row_id = get_random_id()
data = self.get_poison_record_data()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': 'poison_record'})
result = self.engine1.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
# AcraServer must return data as is
if row['data'] != data:
self.fail("unexpected response")
log = self.read_log(self.acra)
self.assertNotIn('Recognized poison record', log)
self.assertNotIn('Turned on poison record detection', log)
self.assertNotIn('code=101', log)
def testShutdown2(self):
"""case with select full table, checks that acra-server doesn't initialize poison record detection
and any callbacks, and returns data as is on decryption failure even if it's valid poison record
Works with ZoneMode On/OFF
"""
row_id = get_random_id()
data = self.get_poison_record_data()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': 'poison_record'})
result = self.engine1.execute(
sa.select([test_table]))
rows = result.fetchall()
for row in rows:
# AcraServer must return data as is
if row['id'] == row_id and row['data'] != data:
self.fail("unexpected response")
log = self.read_log(self.acra)
self.assertNotIn('Recognized poison record', log)
self.assertNotIn('Turned on poison record detection', log)
self.assertNotIn('code=101', log)
def testShutdown3(self):
"""case with select full table and inlined poison record, checks that acra-server doesn't initialize poison
record detection and any callbacks, and returns data as is on decryption failure even if it's valid poison
record
Works with ZoneMode On/OFF
"""
row_id = get_random_id()
poison_record = self.get_poison_record_data()
begin_tag = poison_record[:4]
# test with extra long begin tag
data = os.urandom(100) + begin_tag + poison_record + os.urandom(100)
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': 'poison_record'})
result = self.engine1.execute(
sa.select([test_table]))
rows = result.fetchall()
for row in rows:
# AcraServer must return data as is
if row['id'] == row_id and row['data'] != data:
self.fail("unexpected response")
log = self.read_log(self.acra)
self.assertNotIn('Recognized poison record', log)
self.assertNotIn('Turned on poison record detection', log)
self.assertNotIn('code=101', log)
def testShutdownWithExplicitZone(self):
"""case with explicitly specified ZoneID in SELECT query, checks that acra-server doesn't initialize poison
record detection and any callbacks, and returns data as is on decryption failure even if it's valid poison
record
Works with ZoneMode On/OFF
"""
row_id = get_random_id()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': self.get_poison_record_data(), 'raw_data': 'poison_record'})
zone = zones[0][ZONE_ID].encode('ascii')
result = self.engine1.execute(
sa.select([sa.cast(zone, BYTEA), test_table])
.where(test_table.c.id == row_id))
rows = result.fetchall()
for zone, _, data, raw_data, _, _ in result:
self.assertEqual(zone, zone)
self.assertEqual(data, poison_record)
log = self.read_log(self.acra)
self.assertNotIn('Recognized poison record', log)
self.assertNotIn('Turned on poison record detection', log)
self.assertNotIn('code=101', log)
def testShutdownTranslatorHTTP(self):
"""check poison record ignoring via acra-translator using HTTP v1 API, omitting initialization poison
record detection and any callbacks, returning data as is on decryption failure even if it's valid poison
record
Works with ZoneMode On/OFF
"""
http_port = 3356
http_connection_string = 'tcp://127.0.0.1:{}'.format(http_port)
with tempfile.NamedTemporaryFile('w+', encoding='utf-8') as log_file:
translator_kwargs = self.get_base_translator_args()
translator_kwargs.update({
'incoming_connection_http_string': http_connection_string,
'log_to_file': log_file.name,
})
data = self.get_poison_record_data()
with ProcessContextManager(self.fork_translator(translator_kwargs)) as translator:
response = self.http_decrypt_request(http_port, TLS_CERT_CLIENT_ID_1, None, data)
self.assertEqual(response, b"Can't decrypt AcraStruct")
with open(log_file.name, 'r') as f:
log = f.read()
self.assertNotIn('Recognized poison record', log)
self.assertNotIn('Turned on poison record detection', log)
self.assertNotIn('code=101', log)
def testShutdownTranslatorgRPC(self):
"""check poison record ignoring via acra-translator using gRPC API, omitting initialization poison
record detection and any callbacks, returning data as is on decryption failure even if it's valid poison
record
Works with ZoneMode On/OFF
"""
grpc_port = 3357
grpc_connection_string = 'tcp://127.0.0.1:{}'.format(grpc_port)
with tempfile.NamedTemporaryFile('w+', encoding='utf-8') as log_file:
translator_kwargs = self.get_base_translator_args()
translator_kwargs.update({
'incoming_connection_grpc_string': grpc_connection_string,
'log_to_file': log_file.name,
})
data = self.get_poison_record_data()
with ProcessContextManager(self.fork_translator(translator_kwargs)):
with self.assertRaises(grpc.RpcError) as exc:
response = self.grpc_decrypt_request(grpc_port, TLS_CERT_CLIENT_ID_1, None, data,
raise_exception_on_failure=True)
self.assertEqual(exc.exception.code(), grpc.StatusCode.UNKNOWN)
self.assertEqual(exc.exception.details(), "can't decrypt data")
with open(log_file.name, 'r') as f:
log = f.read()
self.assertNotIn('Recognized poison record', log)
self.assertNotIn('Turned on poison record detection', log)
self.assertNotIn('code=101', log)
class TestPoisonRecordOffStatusWithAcraBlock(TestPoisonRecordOffStatus):
def get_poison_record_data(self):
return get_poison_record_with_acrablock()
class TestShutdownPoisonRecordWithZone(TestPoisonRecordShutdown):
ZONE = True
WHOLECELL_MODE = False
SHUTDOWN = True
class TestShutdownPoisonRecordWithZoneAcraBlock(TestShutdownPoisonRecordWithZone):
def get_poison_record_data(self):
return get_poison_record_with_acrablock()
class TestShutdownPoisonRecordWithZoneOffStatus(TestPoisonRecordOffStatus):
ZONE = True
WHOLECELL_MODE = False
SHUTDOWN = True
DETECT_POISON_RECORDS = False
class TestShutdownPoisonRecordWithZoneOffStatusWithAcraBlock(TestShutdownPoisonRecordWithZoneOffStatus):
def get_poison_record_data(self):
return get_poison_record_with_acrablock()
class TestNoCheckPoisonRecord(BasePoisonRecordTest):
WHOLECELL_MODE = False
SHUTDOWN = False
DEBUG_LOG = True
DETECT_POISON_RECORDS = False
def testNoDetect(self):
row_id = get_random_id()
poison_record = get_poison_record()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': poison_record, 'raw_data': 'poison_record'})
result = self.engine1.execute(test_table.select())
result.fetchall()
log = self.read_log(self.acra)
self.assertNotIn('Recognized poison record', log)
self.assertNotIn('Turned on poison record detection', log)
self.assertNotIn('code=101', log)
result = self.engine1.execute(
sa.select([test_table]))
for _, data, raw_data, _, _ in result:
self.assertEqual(poison_record, data)
class TestNoCheckPoisonRecordWithZone(TestNoCheckPoisonRecord):
ZONE = True
class TestCheckLogPoisonRecord(BasePoisonRecordTest):
SHUTDOWN = True
DEBUG_LOG = True
TEST_DATA_LOG = True
def setUp(self):
self.poison_script_file = NamedTemporaryFile('w')
# u+rwx
os.chmod(self.poison_script_file.name, stat.S_IRWXU)
self.poison_script = self.poison_script_file.name
super(TestCheckLogPoisonRecord, self).setUp()
def tearDown(self):
self.poison_script_file.close()
super(TestCheckLogPoisonRecord, self).tearDown()
def testDetect(self):
row_id = get_random_id()
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': get_poison_record(), 'raw_data': 'poison_record'})
with self.assertRaises(DatabaseError):
self.engine1.execute(test_table.select())
log = self.read_log(self.acra)
self.assertIn('Recognized poison record', log)
self.assertIn('Turned on poison record detection', log)
self.assertIn('code=101', log)
class TestKeyStorageClearing(BaseTestCase):
def setUp(self):
self.checkSkip()
try:
self.init_key_stores()
if not self.EXTERNAL_ACRA:
self.acra = self.fork_acra(
zonemode_enable='true',
http_api_enable='true',
tls_ocsp_from_cert='ignore',
tls_crl_from_cert='ignore',
tls_ocsp_url='',
tls_crl_url='',
keys_dir=self.server_keys_dir)
args = get_connect_args(port=self.ACRASERVER_PORT, sslmode='require')
args.update(get_tls_connection_args(TEST_TLS_CLIENT_KEY, TEST_TLS_CLIENT_CERT))
self.engine1 = sa.create_engine(
get_engine_connection_string(
self.get_acraserver_connection_string(),
DB_NAME),
connect_args=args)
self.engine_raw = sa.create_engine(
'{}://{}:{}/{}'.format(DB_DRIVER, DB_HOST, DB_PORT, DB_NAME),
connect_args=connect_args)
self.engines = [self.engine1, self.engine_raw]
metadata.create_all(self.engine_raw)
self.engine_raw.execute('delete from test;')
except:
self.tearDown()
raise
def tearDown(self):
try:
self.engine_raw.execute('delete from test;')
except:
pass
for engine in getattr(self, 'engines', []):
engine.dispose()
processes = []
if not self.EXTERNAL_ACRA and hasattr(self, 'acra'):
processes.append(self.acra)
stop_process(processes)
send_signal_by_process_name('acra-server', signal.SIGKILL)
self.server_keystore.cleanup()
def init_key_stores(self):
self.server_keystore = tempfile.TemporaryDirectory()
self.server_keys_dir = os.path.join(self.server_keystore.name, '.acrakeys')
create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT, keys_dir=self.server_keys_dir, only_storage=True)
def test_clearing(self):
# execute any query for loading key by acra
result = self.engine1.execute(sa.select([1]).limit(1))
result.fetchone()
with urlopen('http://localhost:{}/resetKeyStorage'.format(self.ACRASERVER_PORT+1)) as response:
self.assertEqual(response.status, 200)
class HashiCorpVaultMasterKeyLoaderMixin:
DEFAULT_MOUNT_PATH = 'test_kv'
secret_path = 'foo'
def setUp(self):
if not TEST_WITH_VAULT:
self.skipTest("test with HashiCorp Vault ACRA_MASTER_KEY loader")
if TEST_SSL_VAULT:
self.vault_client = VaultClient(verify=TEST_VAULT_TLS_CA)
else:
self.vault_client = VaultClient()
self.vault_client.enable_kv_secret_engine(mount_path=self.DEFAULT_MOUNT_PATH)
self.vault_client.put_master_key_by_version(self.secret_path, VAULT_KV_ENGINE_VERSION, mount_point=self.DEFAULT_MOUNT_PATH)
super().setUp()
def fork_acra(self, popen_kwargs: dict = None, **acra_kwargs: dict):
args = self.vault_client.get_vault_cli_args(self.DEFAULT_MOUNT_PATH, self.secret_path)
acra_kwargs.update(args)
return self._fork_acra(acra_kwargs, popen_kwargs)
def fork_translator(self, translator_kwargs, popen_kwargs=None):
args = self.vault_client.get_vault_cli_args(self.DEFAULT_MOUNT_PATH,self.secret_path)
translator_kwargs.update(args)
return super().fork_translator(translator_kwargs, popen_kwargs)
def read_rotation_public_key(self, extra_kwargs: dict = None):
args = self.vault_client.get_vault_cli_args(self.DEFAULT_MOUNT_PATH,self.secret_path)
return super().read_rotation_public_key(extra_kwargs=args)
def create_keypair(self, extra_kwargs: dict = None):
args = self.vault_client.get_vault_cli_args(self.DEFAULT_MOUNT_PATH,self.secret_path)
return super().create_keypair(extra_kwargs=args)
def tearDown(self):
super().tearDown()
self.vault_client.disable_kv_secret_engine(mount_path=self.DEFAULT_MOUNT_PATH)
class TestKeyStoreMigration(BaseTestCase):
"""Test "acra-keys migrate" utility."""
# We need to test different keystore formats so we can't touch
# the global KEYS_FOLDER. We need to launch service instances
# with particular keystore configuration. Ignore the usual
# setup and teardown routines that start Acra services.
def setUp(self):
self.checkSkip()
self.test_dir = tempfile.TemporaryDirectory()
self.engine_raw = sa.create_engine(
'{}://{}:{}/{}'.format(DB_DRIVER, DB_HOST, DB_PORT, DB_NAME),
connect_args=get_connect_args(DB_PORT))
metadata.create_all(self.engine_raw)
self.engine_raw.execute(test_table.delete())
self.master_keys = {}
def tearDown(self):
self.engine_raw.execute(test_table.delete())
self.engine_raw.dispose()
self.test_dir.cleanup()
# Instead, use these methods according to individual test needs.
def get_master_key(self, version):
"""Returns master key value for given version (base64-encoded)."""
if version not in self.master_keys:
temp_file = os.path.join(self.test_dir.name, 'master.key')
subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'), '--keystore={}'.format(version),
'--generate_master_key={}'.format(temp_file)])
with open(temp_file, 'rb') as f:
master_key = b64encode(f.read()).decode('ascii')
self.master_keys[version] = master_key
os.remove(temp_file)
return self.master_keys[version]
def create_key_store(self, version):
"""Create new keystore of given version."""
# Start with service transport keys and client storage keys.
self.client_id = TLS_CERT_CLIENT_ID_1
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'),
'--generate_acrawriter_keys',
'--client_id={}'.format(self.client_id),
'--keys_output_dir={}'.format(self.current_key_store_path()),
'--keys_public_output_dir={}'.format(self.current_key_store_path()),
'--keystore={}'.format(version),
],
env={ACRA_MASTER_KEY_VAR_NAME: self.get_master_key(version)},
timeout=PROCESS_CALL_TIMEOUT)
# Then add some zones that we're going to test with.
zone_output = subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-addzone'),
'--keys_output_dir={}'.format(self.current_key_store_path()),
],
env={ACRA_MASTER_KEY_VAR_NAME: self.get_master_key(version)},
timeout=PROCESS_CALL_TIMEOUT)
zone_config = json.loads(zone_output.decode('utf-8'))
self.zone_id = zone_config[ZONE_ID]
# Keep the current version around, we'll need it for migration.
self.keystore_version = version
def migrate_key_store(self, new_version):
"""Migrate keystore from current to given new version."""
# Run the migration tool. New keystore is in a new directory.
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'), 'migrate',
'--src_keys_dir={}'.format(self.current_key_store_path()),
'--src_keys_dir_public={}'.format(self.current_key_store_path()),
'--src_keystore={}'.format(self.keystore_version),
'--dst_keys_dir={}'.format(self.new_key_store_path()),
'--dst_keys_dir_public={}'.format(self.new_key_store_path()),
'--dst_keystore={}'.format(new_version),
],
env={'SRC_ACRA_MASTER_KEY': self.get_master_key(self.keystore_version),
'DST_ACRA_MASTER_KEY': self.get_master_key(new_version)},
timeout=PROCESS_CALL_TIMEOUT)
# Finalize the migration, replacing old keystore with the new one.
# We assume the services to be not running at this moment.
os.rename(self.current_key_store_path(), self.old_key_store_path())
os.rename(self.new_key_store_path(), self.current_key_store_path())
self.keystore_version = new_version
def change_key_store_path(self):
"""Change the absolute path of the keystore directory."""
# Swap the whole testing directory for a new one.
old_key_store_path = self.current_key_store_path()
old_test_dir = self.test_dir
new_test_dir = tempfile.TemporaryDirectory()
self.test_dir = new_test_dir
new_key_store_path = self.current_key_store_path()
# Move the keystore to the new location.
os.rename(old_key_store_path, new_key_store_path)
# Remove the old, now unneeded directory.
old_test_dir.cleanup()
def start_services(self, zone_mode=False):
"""Start Acra services required for testing."""
master_key = self.get_master_key(self.keystore_version)
master_key_env = {ACRA_MASTER_KEY_VAR_NAME: master_key}
self.acra_server = self.fork_acra(
zonemode_enable='true' if zone_mode else 'false',
keys_dir=self.current_key_store_path(),
tls_ocsp_from_cert='ignore',
tls_crl_from_cert='ignore',
tls_ocsp_url='',
tls_crl_url='',
keystore_cache_size=-1,
popen_kwargs={'env': master_key_env})
args = get_connect_args(port=self.ACRASERVER_PORT, sslmode='require')
args.update(get_tls_connection_args(TEST_TLS_CLIENT_KEY, TEST_TLS_CLIENT_CERT))
self.engine = sa.create_engine(
get_engine_connection_string(
self.get_acraserver_connection_string(),
DB_NAME),
connect_args=args)
# Remember whether we're running in zone mode. We need to know this
# to store and retrieve the data correctly.
self.zone_mode = zone_mode
def stop_services(self):
"""Gracefully stop Acra services being tested."""
self.engine.dispose()
stop_process(self.acra_server)
@contextlib.contextmanager
def running_services(self, **kwargs):
self.start_services(**kwargs)
try:
yield
finally:
self.stop_services()
def insert_as_client(self, data):
"""Encrypt and insert data via AcraServer."""
# It's too bothersome to thread through the master key setting.
# Set it here and reset it back after reading the public key.
new_master_key = self.get_master_key(self.keystore_version)
old_master_key = os.environ[ACRA_MASTER_KEY_VAR_NAME]
os.environ[ACRA_MASTER_KEY_VAR_NAME] = new_master_key
# Encryption depends on whether we're using zones or not.
if self.zone_mode:
acra_struct = create_acrastruct(
data.encode('ascii'),
read_zone_public_key(
self.zone_id,
self.current_key_store_path()),
context=self.zone_id.encode('ascii'))
else:
acra_struct = create_acrastruct(
data.encode('ascii'),
read_storage_public_key(
self.client_id,
self.current_key_store_path()))
os.environ[ACRA_MASTER_KEY_VAR_NAME] = old_master_key
row_id = get_random_id()
self.engine.execute(test_table.insert(), {
'id': row_id, 'data': acra_struct, 'raw_data': data,
})
return row_id
def select_as_client(self, row_id):
"""Select decrypted data via AcraServer."""
# If we're using zones, zone ID should precede the encrypted data.
if self.zone_mode:
cols = [sa.cast(self.zone_id.encode('ascii'), BYTEA),
test_table.c.data, test_table.c.raw_data]
else:
cols = [test_table.c.data, test_table.c.raw_data]
rows = self.engine.execute(
sa.select(cols).where(test_table.c.id == row_id))
return rows.first()
def select_directly(self, row_id):
"""Select raw data directly from database."""
rows = self.engine_raw.execute(
sa.select([test_table.c.data]).where(test_table.c.id == row_id))
return rows.first()
def current_key_store_path(self):
return os.path.join(self.test_dir.name, '.acrakeys')
def new_key_store_path(self):
return os.path.join(self.test_dir.name, '.acrakeys.new')
def old_key_store_path(self):
return os.path.join(self.test_dir.name, '.acrakeys.old')
# Now we can proceed with the tests...
def test_migrate_v1_to_v2(self):
"""Verify v1 -> v2 keystore migration."""
data_1 = get_pregenerated_random_data()
data_2 = get_pregenerated_random_data()
self.create_key_store('v1')
# Try saving some data with default zone
with self.running_services():
row_id_1 = self.insert_as_client(data_1)
# Check that we're able to put and get data via AcraServer.
selected = self.select_as_client(row_id_1)
self.assertEquals(selected['data'], data_1.encode('ascii'))
self.assertEquals(selected['raw_data'], data_1)
# Get encrypted data. It should really be encrypted.
encrypted_1 = self.select_directly(row_id_1)
self.assertNotEquals(encrypted_1['data'], data_1.encode('ascii'))
# Now do the same with a specific zone
with self.running_services(zone_mode=True):
row_id_1_zoned = self.insert_as_client(data_1)
# Check that we're able to put and get data via AcraServer.
selected = self.select_as_client(row_id_1_zoned)
self.assertEquals(selected['data'], data_1.encode('ascii'))
self.assertEquals(selected['raw_data'], data_1)
# Get encrypted data. It should really be encrypted.
encrypted_1_zoned = self.select_directly(row_id_1_zoned)
self.assertNotEquals(encrypted_1_zoned['data'], data_1.encode('ascii'))
# Also, it should be different from the default-zoned data.
self.assertNotEquals(encrypted_1_zoned['data'], encrypted_1['data'])
self.migrate_key_store('v2')
# After we have migrated the keys, check the setup again.
with self.running_services():
# Old data should still be there, accessible via AcraServer.
selected = self.select_as_client(row_id_1)
self.assertEquals(selected['data'], data_1.encode('ascii'))
self.assertEquals(selected['raw_data'], data_1)
# Key migration does not change encrypted data.
encrypted_1_migrated = self.select_directly(row_id_1)
self.assertEquals(encrypted_1_migrated['data'],
encrypted_1['data'])
# We're able to put some new data into the table and get it back.
row_id_2 = self.insert_as_client(data_2)
selected = self.select_as_client(row_id_2)
self.assertEquals(selected['data'], data_2.encode('ascii'))
self.assertEquals(selected['raw_data'], data_2)
# And again, this time with zones.
with self.running_services(zone_mode=True):
# Old data should still be there, accessible via AcraServer.
selected = self.select_as_client(row_id_1_zoned)
self.assertEquals(selected['data'], data_1.encode('ascii'))
self.assertEquals(selected['raw_data'], data_1)
# Key migration does not change encrypted data.
encrypted_1_zoned_migrated = self.select_directly(row_id_1_zoned)
self.assertEquals(encrypted_1_zoned_migrated['data'],
encrypted_1_zoned['data'])
# We're able to put some new data into the table and get it back.
row_id_2_zoned = self.insert_as_client(data_2)
selected = self.select_as_client(row_id_2_zoned)
self.assertEquals(selected['data'], data_2.encode('ascii'))
self.assertEquals(selected['raw_data'], data_2)
def test_moved_key_store(self):
"""Verify that keystore can be moved to a different absolute path."""
self.create_key_store(KEYSTORE_VERSION)
# Save some data, do a sanity check.
data = get_pregenerated_random_data()
with self.running_services():
row_id = self.insert_as_client(data)
selected = self.select_as_client(row_id)
self.assertEquals(selected['data'], data.encode('ascii'))
# Move the keystore to a different (still temporary) location.
self.change_key_store_path()
# Check that keystore path is not included into encryption context.
# We should still be able to access the data with the same keystore
# but located at different path.
with self.running_services():
selected = self.select_as_client(row_id)
self.assertEquals(selected['data'], data.encode('ascii'))
class RedisMixin:
TEST_REDIS_KEYS_DB = 0
TEST_REDIS_TOKEN_DB = 1
def checkSkip(self):
super().checkSkip()
if not TEST_WITH_REDIS:
self.skipTest("test only with Redis")
elif not TEST_WITH_TLS:
self.skipTest("running tests only with TLS")
def setUp(self):
self.redis_keys_client = redis.Redis(host='localhost', port=6379, db=self.TEST_REDIS_KEYS_DB)
self.redis_tokens_client = redis.Redis(host='localhost', port=6379, db=self.TEST_REDIS_TOKEN_DB)
super().setUp()
def tearDown(self):
self.redis_keys_client.flushall()
self.redis_tokens_client.flushall()
super().tearDown()
class TestAcraKeysWithZoneIDGeneration(unittest.TestCase):
def setUp(self):
self.master_key = get_master_key()
self.zone_dir = tempfile.TemporaryDirectory()
def test_rotate_symmetric_zone_key(self):
zone = json.loads(subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-addzone'), '--keys_output_dir={}'.format(self.zone_dir.name)],
cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT).decode('utf-8'))
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'generate',
'--zone_symmetric_key',
'--keys_dir={}'.format(self.zone_dir.name),
'--keys_dir_public={}'.format(self.zone_dir.name),
'--zone_id={}'.format(zone['id'])
],
env={ACRA_MASTER_KEY_VAR_NAME: self.master_key},
timeout=PROCESS_CALL_TIMEOUT)
path = '{}/{}_zone_sym.old'.format(self.zone_dir.name, zone['id'])
self.assertTrue(len(os.listdir(path)) != 0)
class TestAcraKeysWithClientIDGeneration(unittest.TestCase):
def setUp(self):
self.master_key = get_master_key()
self.dir_with_distinguished_name_client_id = tempfile.TemporaryDirectory()
self.dir_with_serial_number_client_id = tempfile.TemporaryDirectory()
self.create_key_store_with_client_id_from_cert(TLS_CLIENT_ID_SOURCE_DN, self.dir_with_distinguished_name_client_id.name)
self.create_key_store_with_client_id_from_cert(TLS_CLIENT_ID_SOURCE_SERIAL, self.dir_with_serial_number_client_id.name)
def test_generate_client_id_from_distinguished_name(self):
readKey = self.read_key_by_client_id(TLS_CLIENT_ID_SOURCE_DN, self.dir_with_distinguished_name_client_id.name)
self.assertTrue(readKey)
def test_non_client_id_keys_generation(self):
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'generate',
'--audit_log_symmetric_key',
'--poison_record_keys',
'--keys_dir={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keys_dir_public={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keystore={}'.format(KEYSTORE_VERSION),
],
env={ACRA_MASTER_KEY_VAR_NAME: self.master_key},
timeout=PROCESS_CALL_TIMEOUT)
def test_keys_generation_without_client_id(self):
with self.assertRaises(subprocess.CalledProcessError) as exc:
subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'generate',
'--keys_dir={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keys_dir_public={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keystore={}'.format(KEYSTORE_VERSION),
],
env={ACRA_MASTER_KEY_VAR_NAME: self.master_key},
stderr=subprocess.STDOUT)
self.assertIn("--client_id or --tls_cert is required to generate keys".lower(), exc.exception.output.decode('utf8').lower())
self.assertEqual(exc.exception.returncode, 1)
with self.assertRaises(subprocess.CalledProcessError) as exc:
subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'generate',
"--client_id='test'",
'--keys_dir={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keys_dir_public={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keystore={}'.format(KEYSTORE_VERSION),
],
env={ACRA_MASTER_KEY_VAR_NAME: self.master_key},
stderr=subprocess.STDOUT)
self.assertIn("Invalid client ID".lower(), exc.exception.output.decode('utf8').lower())
self.assertEqual(exc.exception.returncode, 1)
def test_read_keys_symmetric(self):
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'generate',
'--client_id={}'.format("testclientid"),
'--client_storage_symmetric_key',
'--keys_dir={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keys_dir_public={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keystore={}'.format(KEYSTORE_VERSION),
],
env={ACRA_MASTER_KEY_VAR_NAME: self.master_key},
timeout=PROCESS_CALL_TIMEOUT)
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'read',
'--keys_dir={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keys_dir_public={}'.format(self.dir_with_distinguished_name_client_id.name),
'client/testclientid/symmetric'
],
env={ACRA_MASTER_KEY_VAR_NAME: self.master_key},
timeout=PROCESS_CALL_TIMEOUT)
def test_read_keys_symmetric_zone(self):
zone = json.loads(subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-addzone'), '--keys_output_dir={}'.format(self.dir_with_distinguished_name_client_id.name)],
cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT).decode('utf-8'))
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'read',
'--keys_dir={}'.format(self.dir_with_distinguished_name_client_id.name),
'--keys_dir_public={}'.format(self.dir_with_distinguished_name_client_id.name),
'zone/{}/symmetric'.format(zone['id'])
],
env={ACRA_MASTER_KEY_VAR_NAME: self.master_key},
timeout=PROCESS_CALL_TIMEOUT)
def test_generate_client_id_from_serial_number(self):
readKey = self.read_key_by_client_id(TLS_CLIENT_ID_SOURCE_SERIAL, self.dir_with_serial_number_client_id.name)
self.assertTrue(readKey)
def read_key_by_client_id(self, extractor, dir_name):
cmd_output = json.loads(subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'extract-client-id',
'--tls_identifier_extractor_type={}'.format(extractor),
'--tls_cert={}'.format(TEST_TLS_SERVER_CERT),
'--print_json'
],
cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT).decode('utf-8'))
client_id = cmd_output['client_id']
readKey = subprocess.check_output([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'read',
'--keys_dir={}'.format(dir_name),
'--keys_dir_public={}'.format(dir_name),
'--public',
'client/{}/storage'.format(client_id),
],
cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT)
return readKey
def create_key_store_with_client_id_from_cert(self, extractor, dir_name):
"""Create new keystore of given version using acra-keys tool."""
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'generate',
'--tls_cert={}'.format(TEST_TLS_SERVER_CERT),
'--tls_identifier_extractor_type={}'.format(extractor),
'--keys_dir={}'.format(dir_name),
'--keys_dir_public={}'.format(dir_name),
'--keystore={}'.format(KEYSTORE_VERSION),
],
env={ACRA_MASTER_KEY_VAR_NAME: self.master_key},
timeout=PROCESS_CALL_TIMEOUT)
class TestAcraKeysWithRedis(RedisMixin, unittest.TestCase):
def setUp(self):
self.checkSkip()
super().setUp()
def checkSkip(self):
if not TEST_WITH_REDIS:
self.skipTest("test only with Redis")
def test_read_command_keystore(self):
master_key = get_master_key()
client_id = 'keypair1'
subprocess.check_call(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'),
'--client_id={}'.format(client_id),
'--generate_acrawriter_keys',
'--generate_symmetric_storage_key',
'--redis_host_port=localhost:6379',
'--keystore={}'.format(KEYSTORE_VERSION)
],
env={ACRA_MASTER_KEY_VAR_NAME: master_key},
timeout=PROCESS_CALL_TIMEOUT)
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'read',
'--public',
'--redis_host_port=localhost:6379',
'client/keypair1/storage'
],
env={ACRA_MASTER_KEY_VAR_NAME: master_key},
timeout=PROCESS_CALL_TIMEOUT)
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'read',
'--redis_host_port=localhost:6379',
'client/keypair1/symmetric'
],
env={ACRA_MASTER_KEY_VAR_NAME: master_key},
timeout=PROCESS_CALL_TIMEOUT)
subprocess.check_call([
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keys'),
'read',
'--private',
'--redis_host_port=localhost:6379',
'client/keypair1/storage'
],
env={ACRA_MASTER_KEY_VAR_NAME: master_key},
timeout=PROCESS_CALL_TIMEOUT)
class TestPostgreSQLParseQueryErrorSkipExit(AcraCatchLogsMixin, BaseTestCase):
"""By default AcraServer skip any errors connected SQL parse queries failures.
It can be changed by --sql_parse_error_exit=true cmd param."""
def checkSkip(self):
if not TEST_POSTGRESQL:
self.skipTest("Only for postgresql")
super().checkSkip()
def executePreparedStatement(self, query):
return AsyncpgExecutor(ConnectionArgs(
host=get_db_host(), port=self.ACRASERVER_PORT, dbname=DB_NAME,
user=DB_USER, password=DB_USER_PASSWORD,
format=AsyncpgExecutor.BinaryFormat,
ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT
)).execute_prepared_statement(query=query)
def read_public_key(self, extra_kwargs: dict = None):
return read_storage_public_key(TLS_CERT_CLIENT_ID_1, KEYS_FOLDER.name, extra_kwargs=extra_kwargs)
def insert_random_data(self):
row_id = get_random_id()
data = get_pregenerated_random_data()
public_key = self.read_public_key()
acra_struct = create_acrastruct(data.encode('ascii'), public_key)
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': acra_struct, 'raw_data': data})
return row_id, data
def test_skip_error(self):
# First, let's put some test data into the table.
row_id_1, raw_data_1 = self.insert_random_data()
query = 'WITH test_with AS (SELECT 1) SELECT * FROM test'
# Request should be successful.
# It should return encrypted data because of parse skipping.
result = self.executePreparedStatement(query=query)
row = result[0]
self.assertEqual(row['id'], row_id_1)
self.assertEqual(row['data'], raw_data_1.encode('utf-8'))
self.assertEqual(row['empty'], b'')
self.assertIn("ignoring error of non parsed sql statement", self.read_log(self.acra))
class TestPostgreSQLParseQueryErrorExit(AcraCatchLogsMixin, BaseTestCase):
def checkSkip(self):
if not TEST_POSTGRESQL:
self.skipTest("Only for postgresql")
super().checkSkip()
def fork_acra(self, popen_kwargs: dict = None, **acra_kwargs: dict):
acra_kwargs['sql_parse_on_error_exit_enable'] = 'true'
return super(TestPostgreSQLParseQueryErrorExit, self).fork_acra(popen_kwargs, **acra_kwargs)
def executePreparedStatement(self, query):
return AsyncpgExecutor(ConnectionArgs(
host=get_db_host(), port=self.ACRASERVER_PORT, dbname=DB_NAME,
user=DB_USER, password=DB_USER_PASSWORD,
ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT
)).execute_prepared_statement(query=query)
def test_exit_on_parse_error(self):
query = 'WITH test_with AS (SELECT 1) SELECT * FROM test'
try:
self.executePreparedStatement(query=query)
except asyncpg.exceptions.ConnectionDoesNotExistError:
self.assertIn("Can't parse SQL from Parse packet", self.read_log(self.acra))
pass
class TestKeyRotation(BaseTestCase):
"""Verify key rotation without data reencryption."""
# TODO(ilammy, 2020-03-13): test with rotated zone keys as well
# That is, as soon as it is possible to rotate them (T1581)
def read_rotation_public_key(self, extra_kwargs: dict = None):
return read_storage_public_key(TLS_CERT_CLIENT_ID_1, KEYS_FOLDER.name, extra_kwargs=extra_kwargs)
def create_keypair(self, extra_kwargs: dict = None):
create_client_keypair(TLS_CERT_CLIENT_ID_1, only_storage=True, extra_kwargs=extra_kwargs)
def test_read_after_rotation(self):
"""Verify that AcraServer can decrypt data with old keys."""
def insert_random_data():
row_id = get_random_id()
data = get_pregenerated_random_data()
public_key = self.read_rotation_public_key()
acra_struct = create_acrastruct(data.encode('ascii'), public_key)
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': acra_struct, 'raw_data': data})
return row_id, data
# First, let's put some test data into the table.
row_id_1, raw_data_1 = insert_random_data()
# After that rotate the storage key for the client,
# but don't touch the encrypted data.
self.create_keypair()
# Insert some more data encrypted with the new key.
row_id_2, raw_data_2 = insert_random_data()
# It should return expected decrypted data.
result = self.engine1.execute(
sa.select([test_table])
.where(test_table.c.id == row_id_1))
row = result.fetchone()
self.assertEqual(row['data'], raw_data_1.encode('utf-8'))
self.assertEqual(row['empty'], b'')
result = self.engine1.execute(
sa.select([test_table])
.where(test_table.c.id == row_id_2))
row = result.fetchone()
self.assertEqual(row['data'], raw_data_2.encode('utf-8'))
self.assertEqual(row['empty'], b'')
class TestAcraRollback(BaseTestCase):
DATA_COUNT = 5
def checkSkip(self):
super(TestAcraRollback, self).checkSkip()
go_version = get_go_version()
GREATER, EQUAL, LESS = (1, 0, -1)
if semver.compare(go_version, ACRAROLLBACK_MIN_VERSION) == LESS:
self.skipTest("not supported go version")
def setUp(self):
self.checkSkip()
self.engine_raw = sa.create_engine(
'{}://{}:{}/{}'.format(DB_DRIVER, DB_HOST, DB_PORT,
DB_NAME),
connect_args=connect_args)
metadata.create_all(self.engine_raw)
self.output_filename = 'acra-rollback_output.txt'
acrarollback_output_table.create(self.engine_raw, checkfirst=True)
if TEST_WITH_TLS:
self.sslmode='require'
else:
self.sslmode='disable'
if TEST_MYSQL:
# https://github.com/go-sql-driver/mysql/
connection_string = "{user}:{password}@tcp({host}:{port})/{dbname}".format(
user=DB_USER, password=DB_USER_PASSWORD, dbname=DB_NAME,
port=DB_PORT, host=DB_HOST
)
# https://github.com/ziutek/mymysql
# connection_string = "tcp:{host}:{port}*{dbname}/{user}/{password}".format(
# user=DB_USER, password=DB_USER_PASSWORD, dbname=DB_NAME,
# port=DB_PORT, host=DB_HOST
# )
else:
connection_string = (
'dbname={dbname} user={user} '
'sslmode={sslmode} password={password} host={host} '
'port={port}').format(
sslmode=self.sslmode, dbname=DB_NAME,
user=DB_USER, port=DB_PORT,
password=DB_USER_PASSWORD, host=DB_HOST
)
if TEST_MYSQL:
self.placeholder = "?"
DB_ARGS = ['--mysql_enable']
else:
self.placeholder = "$1"
DB_ARGS = ['--postgresql_enable']
self.default_acrarollback_args = [
'--client_id=keypair1',
'--connection_string={}'.format(connection_string),
'--output_file={}'.format(self.output_filename),
'--keys_dir={}'.format(KEYS_FOLDER.name),
] + DB_ARGS
def tearDown(self):
try:
self.engine_raw.execute(acrarollback_output_table.delete())
self.engine_raw.execute(test_table.delete())
except Exception as exc:
print(exc)
self.engine_raw.dispose()
if os.path.exists(self.output_filename):
os.remove(self.output_filename)
def run_acrarollback(self, extra_args):
args = [os.path.join(BINARY_OUTPUT_FOLDER, 'acra-rollback')] + self.default_acrarollback_args + extra_args
try:
subprocess.check_call(
args, cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT)
except subprocess.CalledProcessError as exc:
if exc.stderr:
print(exc.stderr, file=sys.stderr)
else:
print(exc.stdout, file=sys.stdout)
raise
def test_without_zone_to_file(self):
server_public1 = read_storage_public_key(TLS_CERT_CLIENT_ID_1, KEYS_FOLDER.name)
rows = []
for _ in range(self.DATA_COUNT):
data = get_pregenerated_random_data()
row = {
'raw_data': data,
'data': create_acrastruct(data.encode('ascii'), server_public1),
'id': get_random_id()
}
rows.append(row)
self.engine_raw.execute(test_table.insert(), rows)
args = [
'--select=select data from {};'.format(test_table.name),
'--insert=insert into {} values({});'.format(
acrarollback_output_table.name, self.placeholder)
]
self.run_acrarollback(args)
# execute file
with open(self.output_filename, 'r') as f:
for line in f:
self.engine_raw.execute(line)
source_data = set([i['raw_data'].encode('ascii') for i in rows])
result = self.engine_raw.execute(acrarollback_output_table.select())
result = result.fetchall()
for data in result:
self.assertIn(data[0], source_data)
def test_with_zone_to_file(self):
zone_public = b64decode(zones[0][ZONE_PUBLIC_KEY].encode('ascii'))
rows = []
for _ in range(self.DATA_COUNT):
data = get_pregenerated_random_data()
row = {
'raw_data': data,
'data': create_acrastruct(
data.encode('ascii'), zone_public,
context=zones[0][ZONE_ID].encode('ascii')),
'id': get_random_id()
}
rows.append(row)
self.engine_raw.execute(test_table.insert(), rows)
if TEST_MYSQL:
select_query = '--select=select \'{id}\', data from {table};'.format(
id=zones[0][ZONE_ID], table=test_table.name)
else:
select_query = '--select=select \'{id}\'::bytea, data from {table};'.format(
id=zones[0][ZONE_ID], table=test_table.name)
args = [
select_query,
'--zonemode_enable=true',
'--insert=insert into {} values({});'.format(
acrarollback_output_table.name, self.placeholder)
]
self.run_acrarollback(args)
# execute file
with open(self.output_filename, 'r') as f:
for line in f:
self.engine_raw.execute(line)
source_data = set([i['raw_data'].encode('ascii') for i in rows])
result = self.engine_raw.execute(acrarollback_output_table.select())
result = result.fetchall()
for data in result:
self.assertIn(data[0], source_data)
def test_without_zone_execute(self):
server_public1 = read_storage_public_key(TLS_CERT_CLIENT_ID_1, KEYS_FOLDER.name)
rows = []
for _ in range(self.DATA_COUNT):
data = get_pregenerated_random_data()
row = {
'raw_data': data,
'data': create_acrastruct(data.encode('ascii'), server_public1),
'id': get_random_id()
}
rows.append(row)
self.engine_raw.execute(test_table.insert(), rows)
args = [
'--execute=true',
'--select=select data from {};'.format(test_table.name),
'--insert=insert into {} values({});'.format(
acrarollback_output_table.name, self.placeholder)
]
self.run_acrarollback(args)
source_data = set([i['raw_data'].encode('ascii') for i in rows])
result = self.engine_raw.execute(acrarollback_output_table.select())
result = result.fetchall()
for data in result:
self.assertIn(data[0], source_data)
def test_with_zone_execute(self):
zone_public = b64decode(zones[0][ZONE_PUBLIC_KEY].encode('ascii'))
rows = []
for _ in range(self.DATA_COUNT):
data = get_pregenerated_random_data()
row = {
'raw_data': data,
'data': create_acrastruct(
data.encode('ascii'), zone_public,
context=zones[0][ZONE_ID].encode('ascii')),
'id': get_random_id()
}
rows.append(row)
self.engine_raw.execute(test_table.insert(), rows)
if TEST_MYSQL:
select_query = '--select=select \'{id}\', data from {table};'.format(
id=zones[0][ZONE_ID], table=test_table.name)
else:
select_query = '--select=select \'{id}\'::bytea, data from {table};'.format(
id=zones[0][ZONE_ID], table=test_table.name)
args = [
'--execute=true',
select_query,
'--zonemode_enable=true',
'--insert=insert into {} values({});'.format(
acrarollback_output_table.name, self.placeholder)
]
self.run_acrarollback(args)
source_data = set([i['raw_data'].encode('ascii') for i in rows])
result = self.engine_raw.execute(acrarollback_output_table.select())
result = result.fetchall()
for data in result:
self.assertIn(data[0], source_data)
def test_without_placeholder(self):
args = [os.path.join(BINARY_OUTPUT_FOLDER, 'acra-rollback'),
'--execute=true',
'--select=select data from {};'.format(test_table.name),
'--insert=query without placeholders;',
'--postgresql_enable',
'--keys_dir={}'.format(KEYS_FOLDER.name),
]
log_file = tempfile.NamedTemporaryFile('w+', encoding='utf-8')
popen_args = {
'stderr': subprocess.PIPE,
'stdout': subprocess.PIPE,
'close_fds': True
}
process = subprocess.Popen(args, **popen_args)
_, err = process.communicate(timeout=5)
stop_process(process)
self.assertIn(b"SQL INSERT statement doesn't contain any placeholders", err)
def test_with_rotated_keys(self):
# TODO(ilammy, 2020-03-13): test with rotated zone keys as well
# That is, as soon as it is possible to rotate them (T1581)
def insert_random_data():
rows = []
public_key = read_storage_public_key(TLS_CERT_CLIENT_ID_1, KEYS_FOLDER.name)
for _ in range(self.DATA_COUNT):
data = get_pregenerated_random_data()
row = {
'raw_data': data,
'data': create_acrastruct(data.encode('ascii'), public_key),
'id': get_random_id()
}
rows.append(row)
self.engine_raw.execute(test_table.insert(), rows)
return rows
# Insert some encrypted test data into the table
rows = insert_random_data()
# Rotate storage keys for 'keypair1'
create_client_keypair('keypair1', only_storage=True)
# Insert some more data encrypted with new key
rows = rows + insert_random_data()
# Run acra-rollback for the test table
self.run_acrarollback([
'--select=select data from {};'.format(test_table.name),
'--insert=insert into {} values({});'.format(
acrarollback_output_table.name, self.placeholder)
])
# Rollback should successfully use previous keys to decrypt data
source_data = set([i['raw_data'].encode('ascii') for i in rows])
result = self.engine_raw.execute(acrarollback_output_table.select())
result = result.fetchall()
for data in result:
self.assertIn(data[0], source_data)
class TestAcraKeyMakers(unittest.TestCase):
def test_only_alpha_client_id(self):
# call with directory separator in key name
self.assertEqual(create_client_keypair(POISON_KEY_PATH), 1)
class SSLPostgresqlMixin(AcraCatchLogsMixin):
ACRASERVER2_PORT = BaseTestCase.ACRASERVER_PORT + 1000
ACRASERVER2_PROMETHEUS_PORT = BaseTestCase.ACRASERVER_PROMETHEUS_PORT + 1000
DEBUG_LOG = True
def with_tls(self):
return False
def get_acraserver_connection_string(self, port=None):
return get_tcp_connection_string(port if port else self.ACRASERVER_PORT)
def wait_acraserver_connection(self, *args, **kwargs):
wait_connection(self.ACRASERVER_PORT)
def checkSkip(self):
if not (TEST_WITH_TLS and TEST_POSTGRESQL):
self.skipTest("running tests without TLS")
def get_ssl_engine(self):
return sa.create_engine(
get_postgresql_tcp_connection_string(self.ACRASERVER2_PORT, DB_NAME),
connect_args=get_connect_args(port=self.ACRASERVER2_PORT, sslmode='require'))
def testConnectionCloseOnTls(self):
engine = self.get_ssl_engine()
try:
with self.assertRaises(sa.exc.OperationalError):
with engine.connect():
pass
self.log_files[self.acra2].flush()
self.assertIn('tls: no certificates configured', self.read_log(self.acra2))
finally:
engine.dispose()
def setUp(self):
self.checkSkip()
"""connect directly to acra, use sslmode=require in connections and tcp protocol on acra side
because postgresql support tls only over tcp
"""
try:
if not self.EXTERNAL_ACRA:
self.acra = self.fork_acra(
tls_key=abs_path(TEST_TLS_SERVER_KEY),
tls_cert=abs_path(TEST_TLS_SERVER_CERT),
tls_ca=TEST_TLS_CA,
client_id=TLS_CERT_CLIENT_ID_1)
# create second acra without settings for tls to check that
# connection will be closed on tls handshake
self.acra2 = self.fork_acra(
client_id=TLS_CERT_CLIENT_ID_1,
incoming_connection_api_string=self.get_acraserver_api_connection_string(port=self.ACRASERVER2_PORT+5),
incoming_connection_port=self.ACRASERVER2_PORT,
incoming_connection_prometheus_metrics_string=self.get_prometheus_address(self.ACRASERVER2_PROMETHEUS_PORT))
self.engine1 = sa.create_engine(
get_postgresql_tcp_connection_string(self.ACRASERVER_PORT, DB_NAME), connect_args=get_connect_args(port=self.ACRASERVER_PORT))
self.engine_raw = sa.create_engine(
'{}://{}:{}/{}'.format(DB_DRIVER, DB_HOST, DB_PORT, DB_NAME),
connect_args=get_connect_args(DB_PORT))
# test case from HexFormatTest expect two engines with different client_id but here enough one and
# raw connection
self.engine2 = self.engine_raw
self.engines = [self.engine1, self.engine_raw]
metadata.create_all(self.engine_raw)
self.engine_raw.execute('delete from test;')
for engine in self.engines:
count = 0
# try with sleep if acra not up yet
while True:
try:
engine.execute(
"UPDATE pg_settings SET setting = '{}' "
"WHERE name = 'bytea_output'".format(self.DB_BYTEA))
break
except Exception:
time.sleep(SETUP_SQL_COMMAND_TIMEOUT)
count += 1
if count == SQL_EXECUTE_TRY_COUNT:
raise
except:
self.tearDown()
raise
def tearDown(self):
super(SSLPostgresqlMixin, self).tearDown()
try:
self.engine_raw.execute('delete from test;')
except:
traceback.print_exc()
try:
for engine in getattr(self, 'engines', []):
engine.dispose()
except:
traceback.print_exc()
if not self.EXTERNAL_ACRA:
for process in [getattr(self, attr)
for attr in ['acra', 'acra2']
if hasattr(self, attr)]:
stop_process(process)
class SSLPostgresqlConnectionTest(SSLPostgresqlMixin, HexFormatTest):
pass
class SSLPostgresqlConnectionWithZoneTest(SSLPostgresqlMixin, ZoneHexFormatTest):
pass
class SSLMysqlMixin(SSLPostgresqlMixin):
def checkSkip(self):
if not (TEST_WITH_TLS and TEST_MYSQL):
self.skipTest("running tests without TLS")
def get_ssl_engine(self):
return sa.create_engine(
get_postgresql_tcp_connection_string(self.ACRASERVER2_PORT, DB_NAME),
connect_args=get_connect_args(
port=self.ACRASERVER2_PORT, ssl=self.driver_to_acraserver_ssl_settings))
def setUp(self):
self.checkSkip()
"""connect directly to acra, use ssl for connections and tcp protocol on acra side
because postgresql support tls only over tcp
"""
try:
if not self.EXTERNAL_ACRA:
self.acra = self.fork_acra(
tls_key=abs_path(TEST_TLS_SERVER_KEY),
tls_cert=abs_path(TEST_TLS_SERVER_CERT),
tls_ca=TEST_TLS_CA,
tls_auth=ACRA_TLS_AUTH,
#tls_db_sni="127.0.0.1",
client_id=TLS_CERT_CLIENT_ID_1)
# create second acra without settings for tls to check that
# connection will be closed on tls handshake
self.acra2 = self.fork_acra(
client_id=TLS_CERT_CLIENT_ID_1,
incoming_connection_port=self.ACRASERVER2_PORT,
incoming_connection_api_string=self.get_acraserver_api_connection_string(port=self.ACRASERVER2_PORT+5),
incoming_connection_prometheus_metrics_string=self.get_prometheus_address(
self.ACRASERVER2_PROMETHEUS_PORT))
self.driver_to_acraserver_ssl_settings = {
'ca': TEST_TLS_CA,
'cert': TEST_TLS_CLIENT_CERT,
'key': TEST_TLS_CLIENT_KEY,
'check_hostname': False
}
self.engine_raw = sa.create_engine(
'{}://{}:{}/{}'.format(DB_DRIVER, DB_HOST,
DB_PORT, DB_NAME),
# don't provide any client's certificates to driver that connects
# directly to mysql to avoid verifying by mysql server
connect_args=get_connect_args(DB_PORT, ssl={'ca': None}))
self.engine1 = sa.create_engine(
get_postgresql_tcp_connection_string(self.ACRASERVER_PORT, DB_NAME),
connect_args=get_connect_args(
port=self.ACRASERVER_PORT, ssl=self.driver_to_acraserver_ssl_settings))
# test case from HexFormatTest expect two engines with different
# client_id but here enough one and raw connection
self.engine2 = self.engine_raw
self.engines = [self.engine1, self.engine_raw]
metadata.create_all(self.engine_raw)
self.engine_raw.execute('delete from test;')
for engine in self.engines:
count = 0
# try with sleep if acra not up yet
while True:
try:
engine.execute("select 1")
break
except Exception:
time.sleep(SETUP_SQL_COMMAND_TIMEOUT)
count += 1
if count == SQL_EXECUTE_TRY_COUNT:
raise
except:
self.tearDown()
raise
class SSLMysqlConnectionTest(SSLMysqlMixin, HexFormatTest):
pass
class SSLMysqlConnectionWithZoneTest(SSLMysqlMixin, ZoneHexFormatTest):
pass
class BasePrepareStatementMixin:
def checkSkip(self):
return
def executePreparedStatement(self, query):
raise NotImplementedError
def testClientRead(self):
"""test decrypting with correct client_id and not decrypting with
incorrect client_id or using direct connection to db"""
client_id = TLS_CERT_CLIENT_ID_1
server_public1 = read_storage_public_key(client_id, KEYS_FOLDER.name)
data = get_pregenerated_random_data()
acra_struct = create_acrastruct(
data.encode('ascii'), server_public1)
row_id = get_random_id()
self.log(storage_client_id=client_id,
data=acra_struct, expected=data.encode('ascii'))
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': acra_struct, 'raw_data': data})
query = sa.select([test_table]).where(test_table.c.id == row_id).compile(compile_kwargs={"literal_binds": True}).string
row = self.executePreparedStatement(query)[0]
self.assertEqual(row['data'], safe_string(row['raw_data']).encode('utf-8'))
self.assertEqual(row['empty'], b'')
result = self.engine2.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
result = self.engine_raw.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
def testReadAcrastructInAcrastruct(self):
"""test correct decrypting acrastruct when acrastruct concatenated to
partial another acrastruct"""
client_id = TLS_CERT_CLIENT_ID_1
server_public1 = read_storage_public_key(client_id, KEYS_FOLDER.name)
incorrect_data = get_pregenerated_random_data()
correct_data = get_pregenerated_random_data()
suffix_data = get_pregenerated_random_data()[:10]
fake_offset = (3+45+84) - 4
fake_acra_struct = create_acrastruct(
incorrect_data.encode('ascii'), server_public1)[:fake_offset]
inner_acra_struct = create_acrastruct(
correct_data.encode('ascii'), server_public1)
data = fake_acra_struct + inner_acra_struct + suffix_data.encode('ascii')
correct_data = correct_data + suffix_data
row_id = get_random_id()
self.log(storage_client_id=client_id,
data=data,
expected=fake_acra_struct+correct_data.encode('ascii'))
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': correct_data})
query = (sa.select([test_table])
.where(test_table.c.id == row_id)
.compile(compile_kwargs={"literal_binds": True}).string)
row = self.executePreparedStatement(query)[0]
try:
self.assertEqual(row['data'][fake_offset:],
safe_string(row['raw_data']).encode('utf-8'))
self.assertEqual(row['data'][:fake_offset], fake_acra_struct[:fake_offset])
self.assertEqual(row['empty'], b'')
except:
print('incorrect data: {}\ncorrect data: {}\ndata: {}\n data len: {}'.format(
incorrect_data, correct_data, row['data'], len(row['data'])))
raise
result = self.engine2.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'][fake_offset:].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
result = self.engine_raw.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'][fake_offset:].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
class TestMysqlTextPreparedStatement(BasePrepareStatementMixin, BaseTestCase):
def checkSkip(self):
if not TEST_MYSQL:
self.skipTest("run test only for mysql")
elif not TEST_WITH_TLS:
self.skipTest("running tests only with TLS")
def executePreparedStatement(self, query):
return PyMysqlExecutor(
ConnectionArgs(host=get_db_host(), port=self.ACRASERVER_PORT,
user=DB_USER, password=DB_USER_PASSWORD,
dbname=DB_NAME, ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT)
).execute_prepared_statement(query)
class TestMysqlTextPreparedStatementWholeCell(TestMysqlTextPreparedStatement):
WHOLECELL_MODE = True
class TestMysqlBinaryPreparedStatement(BasePrepareStatementMixin, BaseTestCase):
def checkSkip(self):
if not TEST_MYSQL:
self.skipTest("run test only for mysql")
elif not TEST_WITH_TLS:
self.skipTest("running tests only with TLS")
def executePreparedStatement(self, query, args=None):
return MysqlExecutor(
ConnectionArgs(host=get_db_host(), port=self.ACRASERVER_PORT,
user=DB_USER, password=DB_USER_PASSWORD,
dbname=DB_NAME, ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT)
).execute_prepared_statement(query, args=args)
class TestMysqlBinaryPreparedStatementWholeCell(TestMysqlBinaryPreparedStatement):
WHOLECELL_MODE = True
class TestPostgresqlTextPreparedStatement(BasePrepareStatementMixin, BaseTestCase):
def checkSkip(self):
if not TEST_POSTGRESQL:
self.skipTest("run test only for postgresql")
elif not TEST_WITH_TLS:
self.skipTest("running tests only with TLS")
def executePreparedStatement(self, query, args=None):
if not args:
args = []
return Psycopg2Executor(ConnectionArgs(host=get_db_host(), port=self.ACRASERVER_PORT,
user=DB_USER, password=DB_USER_PASSWORD,
dbname=DB_NAME, ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT)
).execute_prepared_statement(query, args)
class TestPostgresqlTextPreparedStatementWholeCell(TestPostgresqlTextPreparedStatement):
WHOLECELL_MODE = True
class TestPostgresqlBinaryPreparedStatement(BaseBinaryPostgreSQLTestCase, BasePrepareStatementMixin):
def executePreparedStatement(self, query):
return self.executor1.execute_prepared_statement(query)
class TestPostgresqlBinaryPreparedStatementWholeCell(TestPostgresqlBinaryPreparedStatement):
WHOLECELL_MODE = True
class ProcessContextManager(object):
"""wrap subprocess.Popen result to use as context manager that call
stop_process on __exit__
"""
def __init__(self, process):
self.process = process
def __enter__(self):
return self.process
def __exit__(self, exc_type, exc_val, exc_tb):
stop_process(self.process)
class TestClientIDDecryptionWithVaultMasterKeyLoader(HashiCorpVaultMasterKeyLoaderMixin, HexFormatTest):
pass
class TestZoneIDDecryptionWithVaultMasterKeyLoader(HashiCorpVaultMasterKeyLoaderMixin, ZoneHexFormatTest):
pass
class AcraTranslatorTest(AcraTranslatorMixin, BaseTestCase):
def apiEncryptionTest(self, request_func, use_http=False, use_grpc=False):
# one is set
self.assertTrue(use_http or use_grpc)
# two is not acceptable
self.assertFalse(use_http and use_grpc)
translator_port = 3456
key_folder = tempfile.TemporaryDirectory()
try:
client_id = extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_CERT, extractor=self.get_identifier_extractor_type())
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT,
extractor=self.get_identifier_extractor_type(), keys_dir=key_folder.name), 0)
data = get_pregenerated_random_data().encode('ascii')
client_id_private_key = read_storage_private_key(key_folder.name, client_id)
connection_string = 'tcp://127.0.0.1:{}'.format(translator_port)
translator_kwargs = {
'incoming_connection_http_string': connection_string if use_http else '',
# turn off grpc to avoid check connection to it
'incoming_connection_grpc_string': connection_string if use_grpc else '',
'tls_key': abs_path(TEST_TLS_SERVER_KEY),
'tls_cert': abs_path(TEST_TLS_SERVER_CERT),
'tls_ca': TEST_TLS_CA,
'keys_dir': key_folder.name,
'tls_identifier_extractor_type': self.get_identifier_extractor_type(),
'acratranslator_client_id_from_connection_enable': 'true',
'tls_ocsp_from_cert': 'ignore',
'tls_crl_from_cert': 'ignore',
}
incorrect_client_id = TLS_CERT_CLIENT_ID_2
with ProcessContextManager(self.fork_translator(translator_kwargs)):
response = request_func(translator_port, incorrect_client_id, None, data)
decrypted = deserialize_and_decrypt_acrastruct(response, client_id_private_key, client_id)
self.assertEqual(data, decrypted)
finally:
shutil.rmtree(key_folder.name)
def apiDecryptionTest(self, request_func, use_http=False, use_grpc=False):
# one is set
self.assertTrue(use_http or use_grpc)
# two is not acceptable
self.assertFalse(use_http and use_grpc)
translator_port = 3456
key_folder = tempfile.TemporaryDirectory()
try:
client_id = extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_CERT, extractor=self.get_identifier_extractor_type())
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT,
extractor=self.get_identifier_extractor_type(), keys_dir=key_folder.name), 0)
data = get_pregenerated_random_data().encode('ascii')
encryption_key = read_storage_public_key(client_id, keys_dir=key_folder.name)
acrastruct = create_acrastruct(data, encryption_key)
connection_string = 'tcp://127.0.0.1:{}'.format(translator_port)
translator_kwargs = {
'incoming_connection_http_string': connection_string if use_http else '',
# turn off grpc to avoid check connection to it
'incoming_connection_grpc_string': connection_string if use_grpc else '',
'tls_key': abs_path(TEST_TLS_SERVER_KEY),
'tls_cert': abs_path(TEST_TLS_SERVER_CERT),
'tls_ca': TEST_TLS_CA,
'keys_dir': key_folder.name,
'tls_identifier_extractor_type': self.get_identifier_extractor_type(),
'acratranslator_client_id_from_connection_enable': 'true',
'tls_ocsp_from_cert': 'ignore',
'tls_crl_from_cert': 'ignore',
}
incorrect_client_id = TLS_CERT_CLIENT_ID_2
with ProcessContextManager(self.fork_translator(translator_kwargs)):
response = request_func(translator_port, incorrect_client_id, None, acrastruct)
self.assertEqual(data, response)
finally:
shutil.rmtree(key_folder.name)
def testHTTPSApiResponses(self):
translator_port = 3456
data = get_pregenerated_random_data().encode('ascii')
encryption_key = read_storage_public_key(
TLS_CERT_CLIENT_ID_1, keys_dir=KEYS_FOLDER.name)
acrastruct = create_acrastruct(data, encryption_key)
connection_string = 'tcp://127.0.0.1:{}'.format(translator_port)
translator_kwargs = {
'incoming_connection_http_string': connection_string,
'tls_key': abs_path(TEST_TLS_SERVER_KEY),
'tls_cert': abs_path(TEST_TLS_SERVER_CERT),
'tls_ca': TEST_TLS_CA,
'tls_identifier_extractor_type': TLS_CLIENT_ID_SOURCE_DN,
'acratranslator_client_id_from_connection_enable': 'true',
'tls_ocsp_from_cert': 'ignore',
'tls_crl_from_cert': 'ignore',
}
api_url = 'https://localhost:{}/v1/decrypt'.format(translator_port)
with ProcessContextManager(self.fork_translator(translator_kwargs)):
cert = (TEST_TLS_CLIENT_CERT, TEST_TLS_CLIENT_KEY)
# test incorrect HTTP method
response = requests.get(api_url, data=acrastruct, cert=cert, verify=TEST_TLS_CA,
timeout=REQUEST_TIMEOUT)
self.assertEqual(
response.status_code, http.HTTPStatus.METHOD_NOT_ALLOWED)
self.assertIn('405 method not allowed'.lower(),
response.text.lower())
self.assertEqual(response.headers['Content-Type'], 'text/plain')
# test without api version
without_version_api_url = api_url.replace('v1/', '')
response = requests.post(
without_version_api_url, data=acrastruct, cert=cert, verify=TEST_TLS_CA,
timeout=REQUEST_TIMEOUT)
self.assertEqual(response.status_code, http.HTTPStatus.NOT_FOUND)
self.assertIn('404 Page Not Found'.lower(), response.text.lower())
self.assertEqual(response.headers['Content-Type'], 'text/plain')
# incorrect version
without_version_api_url = api_url.replace('v1/', 'v3/')
response = requests.post(
without_version_api_url, data=acrastruct, cert=cert, verify=TEST_TLS_CA,
timeout=REQUEST_TIMEOUT)
self.assertEqual(response.status_code,
http.HTTPStatus.NOT_FOUND)
self.assertIn('404 Page Not Found'.lower(), response.text.lower())
self.assertEqual(response.headers['Content-Type'], 'text/plain')
# incorrect url
incorrect_url = 'https://localhost:{}/v1/someurl'.format(translator_port)
response = requests.post(
incorrect_url, data=acrastruct, cert=cert, verify=TEST_TLS_CA, timeout=REQUEST_TIMEOUT)
self.assertEqual(
response.status_code, http.HTTPStatus.NOT_FOUND)
self.assertEqual('404 Page Not Found'.lower(), response.text.lower())
self.assertEqual(response.headers['Content-Type'], 'text/plain')
# without acrastruct (http body), pass empty byte array as data
response = requests.post(api_url, data=b'', cert=cert, verify=TEST_TLS_CA,
timeout=REQUEST_TIMEOUT)
self.assertEqual(response.status_code,
http.HTTPStatus.UNPROCESSABLE_ENTITY)
self.assertIn("Can't decrypt AcraStruct".lower(),
response.text.lower())
self.assertEqual(response.headers['Content-Type'], 'text/plain; charset=utf-8')
# test with correct acrastruct
response = requests.post(api_url, data=acrastruct, cert=cert, verify=TEST_TLS_CA,
timeout=REQUEST_TIMEOUT)
self.assertEqual(data, response.content)
self.assertEqual(response.status_code, http.HTTPStatus.OK)
self.assertEqual(response.headers['Content-Type'],
'application/octet-stream')
def testGRPCApi(self):
self.apiDecryptionTest(self.grpc_decrypt_request, use_grpc=True)
self.apiEncryptionTest(self.grpc_encrypt_request, use_grpc=True)
def testHTTPApi(self):
self.apiDecryptionTest(self.http_decrypt_request, use_http=True)
self.apiEncryptionTest(self.http_encrypt_request, use_http=True)
class TestTranslatorDisableCachedOnStartup(AcraTranslatorMixin, BaseTestCase):
def checkSkip(self):
super().checkSkip()
if KEYSTORE_VERSION == 'v2':
self.skipTest("test only for keystore Version v1")
def setUp(self):
self.cached_dir = tempfile.TemporaryDirectory()
# fill temp dir with all keys
copy_tree(KEYS_FOLDER.name, self.cached_dir.name)
super().setUp()
def fork_translator(self, translator_kwargs, popen_kwargs=None):
args = {
'keystore_cache_on_start_enable': 'false',
'keys_dir': self.cached_dir.name
}
translator_kwargs.update(args)
return super().fork_translator(translator_kwargs, popen_kwargs)
def testApiEncryptionDisableCacheWithoutKeysDir(self):
translator_port = 3456
client_id = extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_CERT, extractor=self.get_identifier_extractor_type())
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT,
extractor=self.get_identifier_extractor_type(), keys_dir=self.cached_dir.name), 0)
data = get_pregenerated_random_data().encode('ascii')
client_id_private_key = read_storage_private_key(self.cached_dir.name, client_id)
connection_string = 'tcp://127.0.0.1:{}'.format(translator_port)
translator_kwargs = {
'incoming_connection_http_string': connection_string,
'tls_key': abs_path(TEST_TLS_SERVER_KEY),
'tls_cert': abs_path(TEST_TLS_SERVER_CERT),
'tls_ca': TEST_TLS_CA,
'keys_dir': self.cached_dir.name,
'tls_identifier_extractor_type': self.get_identifier_extractor_type(),
'acratranslator_client_id_from_connection_enable': 'true',
'tls_ocsp_from_cert': 'ignore',
'tls_crl_from_cert': 'ignore',
}
incorrect_client_id = TLS_CERT_CLIENT_ID_2
with ProcessContextManager(self.fork_translator(translator_kwargs)):
self.cached_dir.cleanup()
response = self.http_encrypt_request(translator_port, incorrect_client_id, None, data)
# we cant encrypt data because AcraServer doest have access to encryption key with disabled keystore caching
self.assertEqual(response, b"Can't encrypt data")
with self.assertRaises(ValueError):
deserialize_and_decrypt_acrastruct(response, client_id_private_key, client_id)
class TestTranslatorEnableCachedOnStartup(AcraTranslatorMixin, BaseTestCase):
def checkSkip(self):
super().checkSkip()
if KEYSTORE_VERSION == 'v2':
self.skipTest("test only for keystore Version v1")
def setUp(self):
self.cached_dir = tempfile.TemporaryDirectory()
# fill temp dir with all keys
copy_tree(KEYS_FOLDER.name, self.cached_dir.name)
super().setUp()
def fork_translator(self, translator_kwargs, popen_kwargs=None):
args = {
'keystore_cache_on_start_enable': 'true',
'keys_dir': self.cached_dir.name
}
translator_kwargs.update(args)
return super().fork_translator(translator_kwargs, popen_kwargs)
def testApiEncryptionEnabledCacheWithoutKeysDir(self):
translator_port = 3456
client_id = extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_CERT, extractor=self.get_identifier_extractor_type())
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT,
extractor=self.get_identifier_extractor_type(), keys_dir=self.cached_dir.name), 0)
data = get_pregenerated_random_data().encode('ascii')
client_id_private_key = read_storage_private_key(self.cached_dir.name, client_id)
connection_string = 'tcp://127.0.0.1:{}'.format(translator_port)
translator_kwargs = {
'incoming_connection_http_string': connection_string,
'tls_key': abs_path(TEST_TLS_SERVER_KEY),
'tls_cert': abs_path(TEST_TLS_SERVER_CERT),
'tls_ca': TEST_TLS_CA,
'keys_dir': self.cached_dir.name,
'tls_identifier_extractor_type': self.get_identifier_extractor_type(),
'acratranslator_client_id_from_connection_enable': 'true',
'tls_ocsp_from_cert': 'ignore',
'tls_crl_from_cert': 'ignore',
}
incorrect_client_id = TLS_CERT_CLIENT_ID_2
with ProcessContextManager(self.fork_translator(translator_kwargs)):
self.cached_dir.cleanup()
response = self.http_encrypt_request(translator_port, incorrect_client_id, None, data)
decrypted = deserialize_and_decrypt_acrastruct(response, client_id_private_key, client_id)
self.assertEqual(data, decrypted)
class TestAcraTranslatorWithVaultMasterKeyLoaderByDistinguishedName(HashiCorpVaultMasterKeyLoaderMixin,
TLSAuthenticationByDistinguishedNameMixin, AcraTranslatorTest):
pass
class TestAcraTranslatorWithVaultMasterKeyLoaderBySerialNumber(HashiCorpVaultMasterKeyLoaderMixin,
TLSAuthenticationBySerialNumberMixin, AcraTranslatorTest):
pass
class TestAcraTranslatorClientIDFromTLSByDistinguishedName(TLSAuthenticationByDistinguishedNameMixin, AcraTranslatorTest):
pass
class TestAcraTranslatorClientIDFromTLSByDistinguishedNameVaultMasterKeyLoader(HashiCorpVaultMasterKeyLoaderMixin, TestAcraTranslatorClientIDFromTLSByDistinguishedName):
pass
class TestKeyRotationWithVaultMasterKeyLoader(HashiCorpVaultMasterKeyLoaderMixin, TestKeyRotation):
pass
class TestAcraTranslatorClientIDFromTLSBySerialNumber(TLSAuthenticationBySerialNumberMixin, TestAcraTranslatorClientIDFromTLSByDistinguishedName):
pass
class TestAcraTranslatorClientIDFromTLSBySerialNumberVaultMasterKeyLoader(HashiCorpVaultMasterKeyLoaderMixin, TLSAuthenticationBySerialNumberMixin, TestAcraTranslatorClientIDFromTLSByDistinguishedName):
pass
class TestAcraRotateWithZone(BaseTestCase):
ZONE = True
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
acra_kwargs['keystore_cache_size'] = -1 # no cache
return super(TestAcraRotateWithZone, self).fork_acra(
popen_kwargs, **acra_kwargs)
def read_public_key(self, key_id, keys_folder):
return read_zone_public_key(key_id, keys_folder)
def isSamePublicKeys(self, keys_folder, keys_data):
"""check is equal zone public key on filesystem and from zone_data"""
for key_id, public_key in keys_data.items():
current_public = self.read_public_key(key_id, keys_folder)
if b64decode(public_key) != current_public:
return False
return True
def testFileRotation(self):
"""
generate some zones, create AcraStructs with them and save to files
call acra-rotate and check that public keys of zones different,
AcraStructs different and decrypted AcraStructs (raw data) the same"""
TestData = collections.namedtuple("TestData", ["acrastruct", "data"])
zone_map = collections.defaultdict(list)
# how much generate acrastructs per zone
zone_file_count = 3
# count of different zones
zone_id_count = 3
filename_template = '{dir}/{id}_{num}.acrastruct'
zones_before_rotate = {}
# generated acrastructs to compare with rotated
acrastructs = {}
with tempfile.TemporaryDirectory() as keys_folder, \
tempfile.TemporaryDirectory() as data_folder:
# generate zones in separate folder
# create acrastructs with this zones
for i in range(zone_id_count):
zone_data = json.loads(
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-addzone'),
'--keys_output_dir={}'.format(keys_folder)],
cwd=os.getcwd(),
timeout=PROCESS_CALL_TIMEOUT).decode('utf-8'))
public_key = b64decode(zone_data[ZONE_PUBLIC_KEY])
zone_id = zone_data[ZONE_ID]
zones_before_rotate[zone_id] = zone_data[ZONE_PUBLIC_KEY]
for i in range(zone_file_count):
data = get_pregenerated_random_data().encode('ascii')
acrastruct = create_acrastruct(
data, public_key, zone_id.encode("ascii"))
filename = filename_template.format(
dir=data_folder, id=zone_id, num=i)
acrastructs[filename] = TestData(
acrastruct=acrastruct, data=data)
with open(filename, 'wb') as f:
f.write(acrastruct)
zone_map[zone_id].append(filename)
# keys of json objects that will be in output
PUBLIC_KEY = 'new_public_key'
FILES = 'file_paths'
# True must be first because code below depends on it
for dryRun in (True, False):
with contextlib.closing(tempfile.NamedTemporaryFile(
'w', delete=False)) as zone_map_file:
json.dump(zone_map, zone_map_file)
zone_map_file.close()
result = subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-rotate'), '--keys_dir={}'.format(keys_folder),
'--file_map_config={}'.format(zone_map_file.name),
'--dry-run={}'.format(1 if dryRun else 0)])
if not isinstance(result, str):
result = result.decode('utf-8')
result = json.loads(result)
if dryRun:
# keys on filesystem should not changed
self.assertTrue(
self.isSamePublicKeys(
keys_folder, zones_before_rotate))
else:
# keys on filesystem must be changed
self.assertFalse(
self.isSamePublicKeys(
keys_folder, zones_before_rotate))
for zone_id in result:
self.assertIn(zone_id, zones_before_rotate)
# new public key in output must be different from
# previous
self.assertNotEqual(
result[zone_id][PUBLIC_KEY],
zones_before_rotate[zone_id])
# check that all files was processed and are in result
self.assertEqual(
zone_map[zone_id], # already sorted by loop index
sorted(result[zone_id][FILES]))
# compare rotated acrastructs
for path in result[zone_id][FILES]:
with open(path, 'rb') as acrastruct_file:
rotated_acrastruct = acrastruct_file.read()
zone_private = read_zone_private_key(keys_folder, zone_id)
if dryRun:
decrypted_rotated = decrypt_acrastruct(
rotated_acrastruct, zone_private,
zone_id=zone_id.encode('ascii'))
self.assertEqual(
rotated_acrastruct,
acrastructs[path].acrastruct)
else:
decrypted_rotated = deserialize_and_decrypt_acrastruct(
rotated_acrastruct, zone_private,
zone_id=zone_id.encode('ascii'))
self.assertNotEqual(
rotated_acrastruct,
acrastructs[path].acrastruct)
# data should be unchanged
self.assertEqual(
decrypted_rotated, acrastructs[path].data)
def testDatabaseRotation(self):
# TODO(ilammy, 2020-03-13): test with rotated zone keys
# That is, as soon as it is possible to rotate them (T1581)
def load_zones_from_folder(keys_folder, zone_ids):
"""load zone public keys from filesystem"""
output = {}
for id in zone_ids:
output[id] = b64encode(self.read_public_key(id, keys_folder))
return output
rotate_test_table = sa.Table(
'rotate_zone_test',
metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('zone_id', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('data', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('raw_data', sa.Text),
)
metadata.create_all(self.engine_raw)
self.engine_raw.execute(sa.delete(rotate_test_table))
zones = []
zone_count = 5
data_per_zone_count = 2
for i in range(zone_count):
zones.append(
json.loads(subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-addzone'),
'--keys_output_dir={}'.format(KEYS_FOLDER.name)],
cwd=os.getcwd(),
timeout=PROCESS_CALL_TIMEOUT).decode('utf-8')))
zone_ids = [data[ZONE_ID] for data in zones]
data_before_rotate = {}
for zone in zones:
for _ in range(data_per_zone_count):
data = get_pregenerated_random_data()
zone_public = b64decode(zone[ZONE_PUBLIC_KEY].encode('ascii'))
acra_struct = create_acrastruct(
data.encode('ascii'), zone_public,
context=zone[ZONE_ID].encode('ascii'))
row_id = get_random_id()
data_before_rotate[row_id] = acra_struct
self.engine_raw.execute(
rotate_test_table.insert(),
{'id': row_id, 'data': acra_struct, 'raw_data': data,
'zone_id': zone[ZONE_ID].encode('ascii')})
if TEST_MYSQL:
# test:test@tcp(127.0.0.1:3306)/test
connection_string = "{user}:{password}@tcp({host}:{port})/{db_name}".format(
user=DB_USER, password=DB_USER_PASSWORD, host=DB_HOST,
port=DB_PORT, db_name=DB_NAME)
mode_arg = '--mysql_enable'
elif TEST_POSTGRESQL:
if TEST_WITH_TLS:
sslmode = "require"
else:
sslmode = "disable"
connection_string = "postgres://{user}:{password}@{db_host}:{db_port}/{db_name}?sslmode={sslmode}".format(
sslmode=sslmode, user=DB_USER, password=DB_USER_PASSWORD,
db_host=DB_HOST, db_port=DB_PORT, db_name=DB_NAME)
mode_arg = '--postgresql_enable'
else:
self.fail("unsupported settings of tested db")
for dry_run in (True, False):
if TEST_MYSQL:
sql_update = "update {} set data=? where id=?;".format(rotate_test_table.name)
sql_select = 'select id, zone_id, data from {} order by id;'.format(rotate_test_table.name)
elif TEST_POSTGRESQL:
sql_update = "update {} set data=$1 where id=$2;".format(rotate_test_table.name)
sql_select = 'select id, zone_id::bytea, data from {} order by id;'.format(rotate_test_table.name)
else:
self.fail("unsupported settings of tested db")
default_args = [
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-rotate'),
'--keys_dir={}'.format(KEYS_FOLDER.name),
'--db_connection_string={}'.format(connection_string),
'--dry-run={}'.format(1 if dry_run else 0),
mode_arg
]
zone_map = load_zones_from_folder(KEYS_FOLDER.name, zone_ids)
# use extra arg in select and update
subprocess.check_output(
default_args + [
'--sql_select={}'.format(sql_select),
'--sql_update={}'.format(sql_update)
]
)
if dry_run:
self.assertTrue(
self.isSamePublicKeys(KEYS_FOLDER.name, zone_map))
else:
self.assertFalse(
self.isSamePublicKeys(KEYS_FOLDER.name, zone_map))
result = self.engine1.execute(sa.select([rotate_test_table]))
self.check_decrypted_data(result)
result = self.engine_raw.execute(sa.select([rotate_test_table]))
self.check_rotation(result, data_before_rotate, dry_run)
some_id = list(data_before_rotate.keys())[0]
# chose any id to operate with specific row
if TEST_MYSQL:
sql_update = "update {} set data=? where id={{}};".format(rotate_test_table.name)
sql_select = 'select zone_id, data from {} where id={};'.format(rotate_test_table.name, some_id)
elif TEST_POSTGRESQL:
sql_update = "update {} set data=$1 where id={{}};".format(rotate_test_table.name)
sql_select = 'select zone_id::bytea, data from {} where id={};'.format(rotate_test_table.name, some_id)
else:
self.fail("unsupported settings of tested db")
sql_update = sql_update.format(some_id)
zone_map = load_zones_from_folder(KEYS_FOLDER.name, zone_ids)
# rotate with select without extra arg
subprocess.check_output(
default_args + [
'--sql_select={}'.format(sql_select),
'--sql_update={}'.format(sql_update)
]
)
if dry_run:
self.assertTrue(
self.isSamePublicKeys(KEYS_FOLDER.name, zone_map))
else:
self.assertFalse(
self.isSamePublicKeys(KEYS_FOLDER.name, zone_map))
result = self.engine1.execute(
sa.select([rotate_test_table],
whereclause=rotate_test_table.c.id==some_id))
self.check_decrypted_data(result)
# check that after rotation we can read actual data
result = self.engine_raw.execute(
sa.select([rotate_test_table],
whereclause=rotate_test_table.c.id==some_id))
self.check_rotation(result, data_before_rotate, dry_run)
def check_decrypted_data(self, result):
data = result.fetchall()
self.assertTrue(data)
for row in data:
# check that data was not changed
self.assertEqual(row['data'], row['raw_data'].encode('utf-8'))
def check_rotation(self, result, data_before_rotate, dry_run):
data = result.fetchall()
self.assertTrue(data)
for row in data:
# check that after rotation encrypted data != raw data
self.assertNotEqual(row['data'], row['raw_data'].encode('utf-8'))
if dry_run:
# check that data was not changed
self.assertEqual(row['data'], data_before_rotate[row['id']])
else:
# check that data was changed
self.assertNotEqual(row['data'], data_before_rotate[row['id']])
# update with new data to check on next stage
data_before_rotate[row['id']] = row['data']
@ddt
class TestAcraRotate(TestAcraRotateWithZone):
ZONE = False
def read_public_key(self, key_id, keys_folder):
return read_storage_public_key(key_id, keys_folder)
def testFileRotation(self):
"""
create AcraStructs with them and save to files
call acra-rotate and check that public keys for client_ids different,
AcraStructs different and decrypted AcraStructs (raw data) the same"""
TestData = collections.namedtuple("TestData", ["acrastruct", "data"])
filename_template = '{dir}/{id}_{num}.acrastruct'
key_before_rotate = {}
client_id = 'keypair1'
keys_map = collections.defaultdict(list)
keys_file_count = 3
# generated acrastructs to compare with rotated
acrastructs = {}
with tempfile.TemporaryDirectory() as keys_folder, \
tempfile.TemporaryDirectory() as data_folder:
# generate keys in separate folder
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'),
'--client_id={}'.format(client_id),
'--keys_output_dir={}'.format(keys_folder),
'--keys_public_output_dir={}'.format(keys_folder),
'--keystore={}'.format(KEYSTORE_VERSION)],
cwd=os.getcwd(),
timeout=PROCESS_CALL_TIMEOUT).decode('utf-8')
# create acrastructs with this client_id
key_before_rotate = {client_id: b64encode(self.read_public_key(client_id, keys_folder))}
for i in range(keys_file_count):
data = get_pregenerated_random_data().encode('ascii')
acrastruct = create_acrastruct(data, b64decode(key_before_rotate[client_id]))
filename = filename_template.format(
dir=data_folder, id=client_id, num=i)
acrastructs[filename] = TestData(acrastruct=acrastruct, data=data)
with open(filename, 'wb') as f:
f.write(acrastruct)
keys_map[client_id].append(filename)
# keys of json objects that will be in output
PUBLIC_KEY = 'new_public_key'
FILES = 'file_paths'
# True must be first because code below depends on it
for dryRun in (True, False):
with contextlib.closing(tempfile.NamedTemporaryFile(
'w', delete=False)) as keys_map_file:
json.dump(keys_map, keys_map_file)
keys_map_file.close()
result = subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-rotate'), '--keys_dir={}'.format(keys_folder),
'--file_map_config={}'.format(keys_map_file.name),
'--dry-run={}'.format(1 if dryRun else 0),
'--zonemode_enable=false'])
if not isinstance(result, str):
result = result.decode('utf-8')
result = json.loads(result)
if dryRun:
# keys on filesystem should not changed
self.assertTrue(
self.isSamePublicKeys(
keys_folder, key_before_rotate))
else:
# keys on filesystem must be changed
self.assertFalse(
self.isSamePublicKeys(
keys_folder, key_before_rotate))
for key_id in result:
self.assertIn(key_id, key_before_rotate)
# new public key in output must be different from
# previous
self.assertNotEqual(
result[key_id][PUBLIC_KEY],
key_before_rotate[key_id])
# check that all files was processed and are in result
self.assertEqual(
keys_map[key_id], # already sorted by loop index
sorted(result[key_id][FILES]))
# compare rotated acrastructs
for path in result[key_id][FILES]:
with open(path, 'rb') as acrastruct_file:
rotated_acrastruct = acrastruct_file.read()
client_id_private = read_storage_private_key(keys_folder, key_id)
if dryRun:
decrypted_rotated = decrypt_acrastruct(
rotated_acrastruct, client_id_private)
self.assertEqual(
rotated_acrastruct,
acrastructs[path].acrastruct)
else:
decrypted_rotated = deserialize_and_decrypt_acrastruct(
rotated_acrastruct, client_id_private)
self.assertNotEqual(
rotated_acrastruct,
acrastructs[path].acrastruct)
# data should be unchanged
self.assertEqual(
decrypted_rotated, acrastructs[path].data)
# Skip inherited non-decorated test
def testDatabaseRotation(self):
pass
@data(False, True)
def testDatabaseRotation2(self, rotate_storage_keys):
def load_keys_from_folder(keys_folder, ids):
"""load public keys from filesystem"""
output = {}
for id in ids:
output[id] = b64encode(self.read_public_key(id, keys_folder))
return output
rotate_test_table = sa.Table(
'rotate_client_id_test',
metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('key_id', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('data', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('raw_data', sa.Text),
keep_existing=True,
)
metadata.create_all(self.engine_raw)
self.engine_raw.execute(sa.delete(rotate_test_table))
data_before_rotate = {}
data = get_pregenerated_random_data()
client_id = TLS_CERT_CLIENT_ID_1
acra_struct = create_acrastruct_with_client_id(data.encode('ascii'), client_id)
row_id = get_random_id()
data_before_rotate[row_id] = acra_struct
self.engine_raw.execute(
rotate_test_table.insert(),
{'id': row_id, 'data': acra_struct, 'raw_data': data,
'key_id': client_id.encode('ascii')})
if rotate_storage_keys:
create_client_keypair(client_id, only_storage=True)
if TEST_MYSQL:
# test:test@tcp(127.0.0.1:3306)/test
connection_string = "{user}:{password}@tcp({host}:{port})/{db_name}".format(
user=DB_USER, password=DB_USER_PASSWORD, host=DB_HOST,
port=DB_PORT, db_name=DB_NAME)
mode_arg = '--mysql_enable'
elif TEST_POSTGRESQL:
if TEST_WITH_TLS:
sslmode = "require"
else:
sslmode = "disable"
connection_string = "postgres://{user}:{password}@{db_host}:{db_port}/{db_name}?sslmode={sslmode}".format(
sslmode=sslmode, user=DB_USER, password=DB_USER_PASSWORD,
db_host=DB_HOST, db_port=DB_PORT, db_name=DB_NAME)
mode_arg = '--postgresql_enable'
else:
self.fail("unsupported settings of tested db")
for dry_run in (True, False):
if TEST_MYSQL:
sql_update = "update {} set data=? where id=?;".format(rotate_test_table.name)
sql_select = "select id, '{}', data from {} order by id;".format(client_id, rotate_test_table.name)
elif TEST_POSTGRESQL:
sql_update = "update {} set data=$1 where id=$2;".format(rotate_test_table.name)
sql_select = "select id, '{}'::bytea, data from {} order by id;".format(client_id, rotate_test_table.name)
else:
self.fail("unsupported settings of tested db")
default_args = [
os.path.join(BINARY_OUTPUT_FOLDER, 'acra-rotate'),
'--keys_dir={}'.format(KEYS_FOLDER.name),
'--db_connection_string={}'.format(connection_string),
'--dry-run={}'.format(1 if dry_run else 0),
'--zonemode_enable=false',
mode_arg
]
keys_map = load_keys_from_folder(KEYS_FOLDER.name, [client_id])
try:
# use extra arg in select and update
subprocess.check_output(
default_args + [
"--sql_select={}".format(sql_select),
'--sql_update={}'.format(sql_update),
]
)
except subprocess.CalledProcessError as exc:
print(exc.output)
raise
if dry_run:
self.assertTrue(
self.isSamePublicKeys(KEYS_FOLDER.name, keys_map))
else:
self.assertFalse(
self.isSamePublicKeys(KEYS_FOLDER.name, keys_map))
result = self.engine1.execute(sa.select([rotate_test_table]))
self.check_decrypted_data(result)
result = self.engine_raw.execute(sa.select([rotate_test_table]))
self.check_rotation(result, data_before_rotate, dry_run)
some_id = list(data_before_rotate.keys())[0]
# chose any id to operate with specific row
if TEST_MYSQL:
sql_update = "update {} set data=? where id={{}};".format(rotate_test_table.name)
sql_select = "select '{}', data from {} where id={};".format(client_id, rotate_test_table.name, some_id)
elif TEST_POSTGRESQL:
sql_update = "update {} set data=$1 where id={{}};".format(rotate_test_table.name)
sql_select = "select '{}'::bytea, data from {} where id={};".format(client_id, rotate_test_table.name, some_id)
else:
self.fail("unsupported settings of tested db")
sql_update = sql_update.format(some_id)
keys_map = load_keys_from_folder(KEYS_FOLDER.name, [client_id])
# rotate with select without extra arg
subprocess.check_output(
default_args + [
"--sql_select={}".format(sql_select),
'--sql_update={}'.format(sql_update)
]
)
if dry_run:
self.assertTrue(
self.isSamePublicKeys(KEYS_FOLDER.name, keys_map))
else:
self.assertFalse(
self.isSamePublicKeys(KEYS_FOLDER.name, keys_map))
result = self.engine1.execute(
sa.select([rotate_test_table],
whereclause=rotate_test_table.c.id==some_id))
self.check_decrypted_data(result)
# check that after rotation we can read actual data
result = self.engine_raw.execute(
sa.select([rotate_test_table],
whereclause=rotate_test_table.c.id==some_id))
self.check_rotation(result, data_before_rotate, dry_run)
class TestPrometheusMetrics(AcraTranslatorMixin, BaseTestCase):
LOG_METRICS = True
# some small value but greater than 0 to compare with metrics value of time of processing
MIN_EXECUTION_TIME = 0.0000001
def checkMetrics(self, url, labels=None):
"""
check that output of prometheus exporter contains all labels
"""
exporter_metrics = [
'go_memstats',
'go_threads',
'go_info',
'go_goroutines',
'go_gc_duration_seconds',
'process_',
'promhttp_',
]
# check that need_skip
def skip(need_skip):
for label in exporter_metrics:
if need_skip.startswith(label):
return True
return False
labels = labels if labels else {}
response = requests.get(url)
self.assertEqual(response.status_code, http.HTTPStatus.OK)
# check that all labels were exported
for label in labels.keys():
self.assertIn(label, response.text)
# check that labels have minimal expected value
for family in text_string_to_metric_families(response.text):
if skip(family.name):
continue
for sample in family.samples:
try:
self.assertGreaterEqual(sample.value, labels[sample.name]['min_value'],
'{} - {}'.format(sample.name, sample.value))
except KeyError:
# python prometheus client append _total for sample names if they have type <counter> and
# have not _total suffix
if not sample.name.endswith('_total'):
raise
name = sample.name[:-len('_total')]
self.assertGreaterEqual(sample.value, labels[name]['min_value'],
'{} - {}'.format(name, sample.value))
def testAcraServer(self):
# run some queries to set some values for counters
HexFormatTest.testClientIDRead(self)
labels = {
# TEST_TLS_CLIENT_CERT + TEST_TLS_CLIENT_2_CERT
'acraserver_connections_total': {'min_value': 2},
'acraserver_connections_processing_seconds_bucket': {'min_value': 0},
'acraserver_connections_processing_seconds_sum': {'min_value': TestPrometheusMetrics.MIN_EXECUTION_TIME},
'acraserver_connections_processing_seconds_count': {'min_value': 1},
'acraserver_response_processing_seconds_sum': {'min_value': TestPrometheusMetrics.MIN_EXECUTION_TIME},
'acraserver_response_processing_seconds_bucket': {'min_value': 0},
'acraserver_response_processing_seconds_count': {'min_value': 1},
'acraserver_request_processing_seconds_sum': {'min_value': TestPrometheusMetrics.MIN_EXECUTION_TIME},
'acraserver_request_processing_seconds_count': {'min_value': 1},
'acraserver_request_processing_seconds_bucket': {'min_value': 0},
'acra_acrastruct_decryptions_total': {'min_value': 1},
'acraserver_version_major': {'min_value': 0},
'acraserver_version_minor': {'min_value': 0},
'acraserver_version_patch': {'min_value': 0},
'acraserver_build_info': {'min_value': 1},
}
self.checkMetrics('http://localhost:{}/metrics'.format(
self.ACRASERVER_PROMETHEUS_PORT), labels)
def testAcraTranslator(self):
labels = {
'acratranslator_connections_total': {'min_value': 1},
# sometimes request processing so fast that it not rounded to 1 and we have flappy tests
# so check only that output contains such metrics
'acratranslator_connections_processing_seconds_bucket': {'min_value': 0},
'acratranslator_connections_processing_seconds_sum': {'min_value': 0},
'acratranslator_connections_processing_seconds_count': {'min_value': 0},
'acratranslator_request_processing_seconds_bucket': {'min_value': 0},
'acratranslator_request_processing_seconds_sum': {'min_value': TestPrometheusMetrics.MIN_EXECUTION_TIME},
'acratranslator_request_processing_seconds_count': {'min_value': 1},
'acratranslator_version_major': {'min_value': 0},
'acratranslator_version_minor': {'min_value': 0},
'acratranslator_version_patch': {'min_value': 0},
'acra_acrastruct_decryptions_total': {'min_value': 1},
'acratranslator_build_info': {'min_value': 1},
}
translator_port = 3456
metrics_port = translator_port+1
data = get_pregenerated_random_data().encode('ascii')
client_id = TLS_CERT_CLIENT_ID_1
encryption_key = read_storage_public_key(
client_id, keys_dir=KEYS_FOLDER.name)
acrastruct = create_acrastruct(data, encryption_key)
prometheus_metrics_address = 'tcp://localhost:{}'.format(metrics_port)
connection_string = 'tcp://127.0.0.1:{}'.format(translator_port)
base_translator_kwargs = {
'incoming_connection_http_string': connection_string,
'incoming_connection_prometheus_metrics_string': prometheus_metrics_address,
'tls_key': abs_path(TEST_TLS_SERVER_KEY),
'tls_cert': abs_path(TEST_TLS_SERVER_CERT),
'tls_ca': TEST_TLS_CA,
'tls_identifier_extractor_type': TLS_CLIENT_ID_SOURCE_DN,
'acratranslator_client_id_from_connection_enable': 'true',
'tls_ocsp_from_cert': 'ignore',
'tls_crl_from_cert': 'ignore',
}
metrics_url = 'http://localhost:{}/metrics'.format(metrics_port)
api_url = 'https://localhost:{}/v1/decrypt'.format(translator_port)
with ProcessContextManager(self.fork_translator(base_translator_kwargs)):
# test with correct acrastruct
cert = (TEST_TLS_CLIENT_CERT, TEST_TLS_CLIENT_KEY)
response = requests.post(api_url, data=acrastruct, cert=cert, verify=TEST_TLS_CA,
timeout=REQUEST_TIMEOUT)
self.assertEqual(response.status_code, http.HTTPStatus.OK)
self.checkMetrics(metrics_url, labels)
grpc_translator_kwargs = {
'incoming_connection_grpc_string': connection_string,
'incoming_connection_http_string': '',
'incoming_connection_prometheus_metrics_string': prometheus_metrics_address,
}
base_translator_kwargs.update(grpc_translator_kwargs)
with ProcessContextManager(self.fork_translator(base_translator_kwargs)):
AcraTranslatorTest.grpc_decrypt_request(
self, translator_port, client_id, None, acrastruct)
self.checkMetrics(metrics_url, labels)
class TestTransparentEncryption(BaseTestCase):
WHOLECELL_MODE = True
encryptor_table = sa.Table('test_transparent_encryption', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('specified_client_id',
sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('default_client_id',
sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('number', sa.Integer),
sa.Column('zone_id', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('raw_data', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('nullable', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
)
ENCRYPTOR_CONFIG = get_encryptor_config('tests/encryptor_config.yaml')
def setUp(self):
self.prepare_encryptor_config(client_id=TLS_CERT_CLIENT_ID_1)
super(TestTransparentEncryption, self).setUp()
def prepare_encryptor_config(self, client_id=None):
prepare_encryptor_config(zone_id=zones[0][ZONE_ID], config_path=self.ENCRYPTOR_CONFIG, client_id=client_id)
def tearDown(self):
self.engine_raw.execute(self.encryptor_table.delete())
super(TestTransparentEncryption, self).tearDown()
try:
os.remove(get_test_encryptor_config(self.ENCRYPTOR_CONFIG))
except FileNotFoundError:
pass
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
acra_kwargs['encryptor_config_file'] = get_test_encryptor_config(
self.ENCRYPTOR_CONFIG)
return super(TestTransparentEncryption, self).fork_acra(
popen_kwargs, **acra_kwargs)
def get_context_data(self):
context = {
'id': get_random_id(),
'default_client_id': get_pregenerated_random_data().encode('ascii'),
'number': get_random_id(),
'zone_id': get_pregenerated_random_data().encode('ascii'),
'specified_client_id': get_pregenerated_random_data().encode('ascii'),
'raw_data': get_pregenerated_random_data().encode('ascii'),
'zone': zones[0],
'empty': b'',
}
return context
def checkDefaultIdEncryption(self, id, default_client_id,
specified_client_id, number, zone_id, zone, raw_data,
*args, **kwargs):
result = self.engine2.execute(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.id == id))
row = result.fetchone()
self.assertIsNotNone(row)
# should be decrypted
self.assertEqual(row['default_client_id'], default_client_id)
# should be as is
self.assertEqual(row['number'], number)
self.assertEqual(row['raw_data'], raw_data)
# other data should be encrypted
self.assertNotEqual(row['specified_client_id'], specified_client_id)
self.assertNotEqual(row['zone_id'], zone_id)
self.assertEqual(row['empty'], b'')
def checkSpecifiedIdEncryption(
self, id, default_client_id, specified_client_id, zone_id,
zone, raw_data, *args, **kwargs):
# fetch using another client_id that will authenticated as TEST_TLS_CLIENT_2_CERT
result = self.engine1.execute(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.id == id))
row = result.fetchone()
self.assertIsNotNone(row)
# should be decrypted
self.assertEqual(row['specified_client_id'], specified_client_id)
# should be as is
self.assertEqual(row['raw_data'], raw_data)
# other data should be encrypted
self.assertNotEqual(row['default_client_id'], default_client_id)
self.assertNotEqual(row['zone_id'], zone_id)
self.assertEqual(row['empty'], b'')
def insertRow(self, data):
# send through acra-server that authenticates as client_id=keypair2
self.engine2.execute(self.encryptor_table.insert(), data)
def check_all_decryptions(self, **context):
self.checkDefaultIdEncryption(**context)
self.checkSpecifiedIdEncryption(**context)
def testEncryptedInsert(self):
context = self.get_context_data()
self.insertRow(context)
self.check_all_decryptions(**context)
encrypted_data = self.fetch_raw_data(context)
# update with acrastructs and AcraServer should not
# re-encrypt
data_fields = ['default_client_id', 'specified_client_id', 'zone_id',
'raw_data', 'empty']
data = {k: encrypted_data[k] for k in data_fields}
data['id'] = context['id']
self.update_data(data)
data = self.fetch_raw_data(context)
for field in data_fields:
# check that acrastructs the same
self.assertEqual(data[field], encrypted_data[field])
# generate new data
new_context = self.get_context_data()
# use same id and integer
new_context['id'] = context['id']
new_context['number'] = context['number']
# update with not encrypted raw data
self.update_data(new_context)
# check that data re-encrypted
new_data = self.fetch_raw_data(new_context)
for field in ['default_client_id', 'specified_client_id', 'zone_id']:
# not equal with previously encrypted
self.assertNotEqual(new_data[field], encrypted_data[field])
# not equal with raw data
self.assertNotEqual(new_data[field], new_context[field])
# check that can decrypt after re-encryption
self.check_all_decryptions(**new_context)
def update_data(self, context):
self.engine2.execute(
sa.update(self.encryptor_table)
.where(self.encryptor_table.c.id == context['id'])
.values(default_client_id=context['default_client_id'],
specified_client_id=context['specified_client_id'],
zone_id=context['zone_id'],
raw_data=context['raw_data'])
)
def fetch_raw_data(self, context):
result = self.engine_raw.execute(
sa.select([self.encryptor_table.c.default_client_id,
self.encryptor_table.c.specified_client_id,
sa.cast(context['zone'][ZONE_ID].encode('ascii'), BYTEA),
self.encryptor_table.c.number,
self.encryptor_table.c.zone_id,
self.encryptor_table.c.raw_data,
self.encryptor_table.c.nullable,
self.encryptor_table.c.empty])
.where(self.encryptor_table.c.id == context['id']))
data = result.fetchone()
return data
class TransparentEncryptionNoKeyMixin(AcraCatchLogsMixin):
def setUp(self):
self.checkSkip()
try:
self.init_key_stores()
super().setUp()
except:
self.tearDown()
raise
def prepare_encryptor_config(self, client_id=None):
return super().prepare_encryptor_config(client_id=self.client_id)
def tearDown(self):
if hasattr(self, 'acra'):
stop_process(self.acra)
send_signal_by_process_name('acra-server', signal.SIGKILL)
self.server_keystore.cleanup()
super().tearDown()
def init_key_stores(self):
self.client_id = 'test_client_ID'
self.server_keystore = tempfile.TemporaryDirectory()
self.server_keys_dir = os.path.join(self.server_keystore.name, '.acrakeys')
create_client_keypair(name=self.client_id, keys_dir=self.server_keys_dir, only_storage=True)
zones.append(json.loads(subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-addzone'), '--keys_output_dir={}'.format(self.server_keys_dir)],
cwd=os.getcwd(), timeout=PROCESS_CALL_TIMEOUT).decode('utf-8')))
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
args = {'keys_dir': self.server_keys_dir, 'client_id': self.client_id}
acra_kwargs.update(args)
return super().fork_acra(popen_kwargs, **acra_kwargs)
def testEncryptedInsert(self):
destroy_server_storage_key(client_id=self.client_id, keys_dir=self.server_keys_dir, keystore_version=KEYSTORE_VERSION)
try:
super().testEncryptedInsert()
except:
log = self.read_log(self.acra)
if KEYSTORE_VERSION == 'v1':
no_key_error_msg = 'open {}/.acrakeys/{}_storage_sym: no such file or directory'.format(self.server_keystore.name, self.client_id)
else:
no_key_error_msg = 'key path does not exist'
self.assertIn(no_key_error_msg, log)
pass
class TestTransparentEncryptionWithNoEncryptionKey(TransparentEncryptionNoKeyMixin, TestTransparentEncryption):
pass
class TestTransparentEncryptionWithZone(TestTransparentEncryption):
ZONE = True
def testSearch(self):
self.skipTest("searching with encryption with zones not supported yet")
def testSearchWithEncryptedData(self):
self.skipTest("searching with encryption with zones not supported yet")
def checkZoneIdEncryption(self, zone, id, default_client_id,
specified_client_id, number, zone_id, raw_data,
*args, **kwargs):
result = self.engine1.execute(
sa.select([self.encryptor_table.c.default_client_id,
self.encryptor_table.c.specified_client_id,
sa.cast(zone[ZONE_ID].encode('ascii'), BYTEA),
self.encryptor_table.c.number,
self.encryptor_table.c.zone_id,
self.encryptor_table.c.raw_data,
self.encryptor_table.c.nullable,
self.encryptor_table.c.empty])
.where(self.encryptor_table.c.id == id))
row = result.fetchone()
self.assertIsNotNone(row)
# should be decrypted
self.assertEqual(row['zone_id'], zone_id)
# should be as is
self.assertEqual(row['number'], number)
self.assertEqual(row['raw_data'], raw_data)
# other data should be encrypted
self.assertNotEqual(row['default_client_id'], default_client_id)
self.assertNotEqual(row['specified_client_id'], specified_client_id)
self.assertEqual(row['empty'], b'')
def check_all_decryptions(self, **context):
self.checkZoneIdEncryption(**context)
class TestTransparentEncryptionWithZoneWithNoEncryptionKey(TransparentEncryptionNoKeyMixin, TestTransparentEncryptionWithZone):
pass
class TestPostgresqlBinaryPreparedTransparentEncryption(BaseBinaryPostgreSQLTestCase, TestTransparentEncryption):
"""Testing transparent encryption of prepared statements in PostgreSQL (binary format)."""
FORMAT = AsyncpgExecutor.BinaryFormat
def filterContext(self, context):
# Context contains some extra fields which do not correspond
# to table columns. Remove them for prepared queries.
return {column: value for column, value in context.items()
if column in self.encryptor_table.columns}
def insertRow(self, context):
context = self.filterContext(context)
query, parameters = self.compileQuery(
self.encryptor_table.insert(context),
context,
)
self.executor2.execute_prepared_statement(query, parameters)
def update_data(self, context):
context = self.filterContext(context)
# Exclude the "id" column which is a key.
dataColumns = {column: value for column, value in context.items()
if column != 'id'}
query, parameters = self.compileQuery(
self.encryptor_table.update().
where(self.encryptor_table.c.id == sa.bindparam('id')).
values(dataColumns),
context,
)
self.executor2.execute_prepared_statement(query, parameters)
class TestPostgresqlTextPreparedTransparentEncryption(TestPostgresqlBinaryPreparedTransparentEncryption):
"""Testing transparent encryption of prepared statements in PostgreSQL (text format)."""
FORMAT = AsyncpgExecutor.TextFormat
class TestSetupCustomApiPort(BaseTestCase):
def setUp(self):
pass
def tearDown(self):
pass
def get_acraserver_api_connection_string(self, port=None):
# use tcp instead unix socket which set as default in tests
return 'tcp://localhost:{}'.format(port)
def testCustomPort(self):
custom_port = 7373
acra = self.fork_acra(
None, incoming_connection_api_port=custom_port)
try:
wait_connection(custom_port)
finally:
stop_process(acra)
def check_all_decryptions(self, **context):
self.checkZoneIdEncryption(**context)
class TestEmptyValues(BaseTestCase):
temp_table = sa.Table('test_empty_values', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('binary', sa.LargeBinary(length=10), nullable=True),
sa.Column('text', sa.Text, nullable=True),
)
def testEmptyValues(self):
null_value_id = get_random_id()
empty_value_id = get_random_id()
# insert with NULL value
self.engine1.execute(
self.temp_table.insert(),
{'id': null_value_id, 'text': None, 'binary': None})
# insert with empty value
self.engine1.execute(
self.temp_table.insert(),
{'id': empty_value_id, 'text': '', 'binary': b''})
# check null values
result = self.engine1.execute(sa.select([self.temp_table]).where(self.temp_table.c.id == null_value_id))
row = result.fetchone()
if TEST_MYSQL:
# PyMySQL returns empty strings for NULL values
self.assertEqual(row['text'], '')
self.assertEqual(row['binary'], b'')
else:
self.assertIsNone(row['text'])
self.assertIsNone(row['binary'])
# check empty values
result = self.engine1.execute(sa.select([self.temp_table]).where(self.temp_table.c.id == empty_value_id))
row = result.fetchone()
self.assertEqual(row['text'], '')
self.assertEqual(row['binary'], b'')
class TestEncryptionWithIntFields(BaseTestCase):
temp_table = sa.Table('test_integer_data_fields', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('data', sa.LargeBinary(length=10), nullable=True),
sa.Column('number', sa.Integer),
sa.Column('binary', sa.LargeBinary(length=10), nullable=True),
)
def testEncryptWithIntFields(self):
value_id = get_random_id()
data = b'42 is the answer'
number = 8800
binary = b'some\x00binary\x01data'
data_encrypted = create_acrastruct(
data,
read_storage_public_key(TLS_CERT_CLIENT_ID_1, KEYS_FOLDER.name)
)
# insert some data
self.engine1.execute(
self.temp_table.insert(),
{'id': value_id, 'data': data_encrypted, 'number': number, 'binary': binary})
# check values (select all)
result = self.engine1.execute(sa.select([self.temp_table]).where(self.temp_table.c.id == value_id))
row = result.fetchone()
self.assertEqual(row['id'], value_id)
self.assertEqual(row['data'], data)
self.assertEqual(row['number'], number)
self.assertEqual(row['binary'], binary)
# check values (select numbers only)
result = self.engine1.execute(
sa
.select([self.temp_table.columns.id, self.temp_table.columns.number])
.where(self.temp_table.c.id == value_id)
)
row = result.fetchone()
self.assertEqual(row['id'], value_id)
self.assertEqual(row['number'], number)
# check values (select encrypted only)
result = self.engine1.execute(
sa
.select([self.temp_table.columns.data])
.where(self.temp_table.c.id == value_id)
)
row = result.fetchone()
self.assertEqual(row['data'], data)
# check values (select everything except encrypted)
result = self.engine1.execute(
sa
.select([self.temp_table.columns.id, self.temp_table.columns.number, self.temp_table.columns.binary])
.where(self.temp_table.c.id == value_id)
)
row = result.fetchone()
self.assertEqual(row['id'], value_id)
self.assertEqual(row['number'], number)
self.assertEqual(row['binary'], binary)
class TestOutdatedServiceConfigs(BaseTestCase, FailedRunProcessMixin):
def setUp(self):
return
def tearDown(self):
return
def remove_version_from_config(self, path):
config = load_yaml_config(path)
del config['version']
dump_yaml_config(config, path)
def replace_version_in_config(self, version, path):
config = load_yaml_config(path)
config['version'] = version
dump_yaml_config(config, path)
def testStartupWithoutVersionInConfig(self):
files = os.listdir('cmd/')
services = [i for i in files if os.path.isdir(os.path.join('cmd', i))]
self.assertTrue(services)
with tempfile.TemporaryDirectory() as tmp_dir:
# generate configs for tests
subprocess.check_output(['configs/regenerate.sh', tmp_dir], env={'BINARY_FOLDER': BINARY_OUTPUT_FOLDER})
for service in services:
self.remove_version_from_config(os.path.join(tmp_dir, service + '.yaml'))
default_args = {
'acra-server': ['-db_host=127.0.0.1'],
'acra-keys': [],
'acra-heartbeat': ['--logging_format=plaintext'],
}
for service in services:
config_param = '-config_file={}'.format(os.path.join(tmp_dir, '{}.yaml'.format(service)))
args = [os.path.join(BINARY_OUTPUT_FOLDER, service), config_param] + default_args.get(service, [])
stderr = self.getOutputFromProcess(args)
self.assertIn('error="config hasn\'t version key"', stderr)
def testStartupWithOutdatedConfigVersion(self):
files = os.listdir('cmd/')
services = [i for i in files if os.path.isdir(os.path.join('cmd', i))]
self.assertTrue(services)
with tempfile.TemporaryDirectory() as tmp_dir:
# generate configs for tests
subprocess.check_output(['configs/regenerate.sh', tmp_dir], env={'BINARY_FOLDER': BINARY_OUTPUT_FOLDER})
for service in services:
self.replace_version_in_config('0.0.0', os.path.join(tmp_dir, service + '.yaml'))
default_args = {
'acra-server': ['-db_host=127.0.0.1'],
'acra-keys': [],
'acra-heartbeat': ['--logging_format=plaintext'],
}
for service in services:
config_param = '-config_file={}'.format(os.path.join(tmp_dir, '{}.yaml'.format(service)))
args = [os.path.join(BINARY_OUTPUT_FOLDER, service), config_param] + default_args.get(service, [])
stderr = self.getOutputFromProcess(args)
self.assertRegexpMatches(stderr, r'code=508 error="config version \\"0.0.0\\" is not supported, expects \\"[\d.]+\\" version')
def testStartupWithDifferentConfigsPatchVersion(self):
files = os.listdir('cmd/')
services = [i for i in files if os.path.isdir(os.path.join('cmd/', i))]
self.assertTrue(services)
with tempfile.TemporaryDirectory() as tmp_dir:
# generate configs for tests
subprocess.check_output(['configs/regenerate.sh', tmp_dir], env={'BINARY_FOLDER': BINARY_OUTPUT_FOLDER})
for service in services:
config_path = os.path.join(tmp_dir, service + '.yaml')
config = load_yaml_config(config_path)
version = semver.parse(config['version'])
version['patch'] = 100500
config['version'] = semver.format_version(**version)
dump_yaml_config(config, config_path)
default_args = {
'acra-addzone': ['-keys_output_dir={}'.format(KEYS_FOLDER.name)],
'acra-heartbeat': {'args': ['--logging_format=plaintext',
'--connection_string=please-fail'],
'status': 1},
'acra-keymaker': ['-keys_output_dir={}'.format(tmp_dir),
'-keys_public_output_dir={}'.format(tmp_dir),
'--keystore={}'.format(KEYSTORE_VERSION)],
'acra-keys': [],
'acra-poisonrecordmaker': ['-keys_dir={}'.format(tmp_dir)],
'acra-rollback': {'args': ['-keys_dir={}'.format(tmp_dir)],
'status': 1},
'acra-rotate': {'args': ['-keys_dir={}'.format(tmp_dir)],
'status': 0},
'acra-translator': {'connection': 'connection_string',
'args': ['-keys_dir={}'.format(KEYS_FOLDER.name),
# empty id to raise error
'--securesession_id=""'],
'status': 1},
'acra-server': {'args': ['-keys_dir={}'.format(KEYS_FOLDER.name)],
'status': 1},
}
for service in services:
test_data = default_args.get(service, [])
expected_status_code = 0
if isinstance(test_data, dict):
expected_status_code = test_data['status']
service_args = test_data['args']
else:
service_args = test_data
config_param = '-config_file={}'.format(os.path.join(tmp_dir, '{}.yaml'.format(service)))
args = [os.path.join(BINARY_OUTPUT_FOLDER, service), config_param] + service_args
stderr = self.getOutputFromProcess(args)
self.assertNotRegex(stderr, r'code=508 error="config version \\"[\d.+]\\" is not supported, expects \\"[\d.]+\\" version')
def testStartupWithoutConfig(self):
files = os.listdir('cmd/')
services = [i for i in files if os.path.isdir(os.path.join('cmd/', i))]
self.assertTrue(services)
with tempfile.TemporaryDirectory() as tmp_dir:
default_args = {
'acra-addzone': ['-keys_output_dir={}'.format(KEYS_FOLDER.name)],
'acra-heartbeat': {'args': ['--logging_format=plaintext',
'--connection_string=please-fail'],
'status': 1},
'acra-keymaker': ['-keys_output_dir={}'.format(tmp_dir),
'-keys_public_output_dir={}'.format(tmp_dir),
'--keystore={}'.format(KEYSTORE_VERSION)],
'acra-keys': [],
'acra-poisonrecordmaker': ['-keys_dir={}'.format(tmp_dir)],
'acra-rollback': {'args': ['-keys_dir={}'.format(tmp_dir)],
'status': 1},
'acra-rotate': {'args': ['-keys_dir={}'.format(tmp_dir)],
'status': 0},
'acra-translator': {'connection': 'connection_string',
'args': ['-keys_dir={}'.format(KEYS_FOLDER.name),
# empty id to raise error
'--securesession_id=""'],
'status': 1},
'acra-server': {'args': ['-keys_dir={}'.format(KEYS_FOLDER.name)],
'status': 1},
}
for service in services:
test_data = default_args.get(service, [])
expected_status_code = 0
if isinstance(test_data, dict):
expected_status_code = test_data['status']
service_args = test_data['args']
else:
service_args = test_data
args = [os.path.join(BINARY_OUTPUT_FOLDER, service), '-config_file=""'] + service_args
stderr = self.getOutputFromProcess(args)
self.assertNotRegex(stderr, r'code=508 error="config version \\"[\d.]\\" is not supported, expects \\"[\d.]+\\" version')
class TestPgPlaceholders(BaseTestCase):
def checkSkip(self):
if TEST_MYSQL or not TEST_POSTGRESQL:
self.skipTest("test only for postgresql")
elif not TEST_WITH_TLS:
self.skipTest("running tests only with TLS")
def testPgPlaceholders(self):
connection_args = ConnectionArgs(host=get_db_host(), port=self.ACRASERVER_PORT,
user=DB_USER, password=DB_USER_PASSWORD,
dbname=DB_NAME, ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT)
executor = AsyncpgExecutor(connection_args)
# empty table will return 0 rows in first select and return our expected data from union
# we test placeholders in SELECT and WHERE clause in such way
query = "select $1::bytea from {table} where {column}=$1::bytea UNION select $1::bytea;".format(
table=test_table.name, column=test_table.c.data.name)
test_data = b'some data'
data = executor.execute(query, [test_data])
self.assertEqual(len(data), 1)
self.assertEqual(data[0][0], test_data)
executor.execute_prepared_statement(query, [test_data])
self.assertEqual(len(data), 1)
self.assertEqual(data[0][0], test_data)
class TLSAuthenticationDirectlyToAcraMixin:
"""Start acra-server TLS mode and use clientID from certificates
self.engine1 uses TEST_TLS_CLIENT_* and self.engine2 uses TEST_TLS_CLIENT_2_* values as TLS credentials"""
def setUp(self):
if not TEST_WITH_TLS:
self.skipTest("Test works only with TLS support on db side")
self.acra_writer_id = extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_CERT, extractor=self.get_identifier_extractor_type())
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT,
extractor=self.get_identifier_extractor_type(), keys_dir=KEYS_FOLDER.name), 0)
# generate encryption keys for second certificate too
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_2_CERT,
extractor=self.get_identifier_extractor_type(), keys_dir=KEYS_FOLDER.name), 0)
try:
if not self.EXTERNAL_ACRA:
# start acra with configured TLS
self.acra = self.fork_acra(
tls_key=abs_path(TEST_TLS_SERVER_KEY),
tls_cert=abs_path(TEST_TLS_SERVER_CERT),
tls_ca=TEST_TLS_CA,
keys_dir=KEYS_FOLDER.name,
tls_identifier_extractor_type=self.get_identifier_extractor_type())
# create two engines which should use different client's certificates for authentication
base_args = get_connect_args(port=self.ACRASERVER_PORT, sslmode='require')
tls_args_1 = base_args.copy()
tls_args_1.update(get_tls_connection_args(TEST_TLS_CLIENT_KEY, TEST_TLS_CLIENT_CERT))
self.engine1 = sa.create_engine(
get_engine_connection_string(self.get_acraserver_connection_string(self.ACRASERVER_PORT), DB_NAME),
connect_args=tls_args_1)
tls_args_2 = base_args.copy()
tls_args_2.update(get_tls_connection_args(TEST_TLS_CLIENT_2_KEY, TEST_TLS_CLIENT_2_CERT))
self.engine2 = sa.create_engine(
get_engine_connection_string(self.get_acraserver_connection_string(self.ACRASERVER_PORT), DB_NAME),
connect_args=tls_args_2)
self.engine_raw = sa.create_engine(
'{}://{}:{}/{}'.format(DB_DRIVER, DB_HOST, DB_PORT, DB_NAME),
connect_args=connect_args)
self.engines = [self.engine1, self.engine2, self.engine_raw]
metadata.create_all(self.engine_raw)
self.engine_raw.execute('delete from test;')
for engine in self.engines:
count = 0
# try with sleep if acra not up yet
while True:
try:
if TEST_MYSQL:
engine.execute("select 1;")
else:
engine.execute(
"UPDATE pg_settings SET setting = '{}' "
"WHERE name = 'bytea_output'".format(self.DB_BYTEA))
break
except Exception as e:
time.sleep(SETUP_SQL_COMMAND_TIMEOUT)
count += 1
if count == SQL_EXECUTE_TRY_COUNT:
raise
except:
self.tearDown()
raise
def tearDown(self):
try:
self.log_prometheus_metrics()
self.clear_prometheus_addresses()
except:
pass
try:
self.engine_raw.execute('delete from test;')
except:
pass
for engine in getattr(self, 'engines', []):
engine.dispose()
processes = [getattr(self, 'acra', ProcessStub())]
stop_process(processes)
send_signal_by_process_name('acra-server', signal.SIGKILL)
class TestDirectTLSAuthenticationFailures(TLSAuthenticationBySerialNumberMixin, BaseTestCase):
# override setUp/tearDown from BaseTestCase to avoid extra initialization
def setUp(self):
if not TEST_WITH_TLS:
self.skipTest("Test works only with TLS support on db side")
def tearDown(self):
pass
def testInvalidClientAuthConfiguration(self):
# try to start server with --tls_auth=0 and extracting client_id from TLS which is invalid together
# because tls_auth=0 doesn't require client's certificate on handshake
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT, keys_dir=KEYS_FOLDER.name), 0)
# generate encryption keys for second certificate too
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_2_CERT, keys_dir=KEYS_FOLDER.name), 0)
with self.assertRaises(Exception) as exc:
self.fork_acra(
tls_key=abs_path(TEST_TLS_SERVER_KEY),
tls_cert=abs_path(TEST_TLS_SERVER_CERT),
tls_ca=TEST_TLS_CA,
tls_auth=0,
keys_dir=KEYS_FOLDER.name,
tls_identifier_extractor_type=self.get_identifier_extractor_type())
# sometimes process start so fast that fork returns PID and between CLI checks and returning os.Exit(1)
# python code starts connection loop even after process interruption
self.assertIn(exc.exception.args[0], ('Can\'t fork', WAIT_CONNECTION_ERROR_MESSAGE))
def testDirectConnectionWithoutCertificate(self):
# try to start server with --tls_auth >= 1 and extracting client_id from TLS and connect directly without
# providing any certificate
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT, keys_dir=KEYS_FOLDER.name), 0)
# generate encryption keys for second certificate too
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_2_CERT, keys_dir=KEYS_FOLDER.name), 0)
acra = ProcessStub()
for tls_auth in range(1, 5):
try:
acra = self.fork_acra(
tls_key=abs_path(TEST_TLS_SERVER_KEY),
tls_cert=abs_path(TEST_TLS_SERVER_CERT),
tls_ca=TEST_TLS_CA,
tls_auth=tls_auth,
keys_dir=KEYS_FOLDER.name,
tls_identifier_extractor_type=self.get_identifier_extractor_type())
base_args = get_connect_args(port=self.ACRASERVER_PORT, sslmode='require')
tls_args_1 = base_args.copy()
tls_args_1.update(get_tls_connection_args_without_certificate())
if TEST_POSTGRESQL:
expected_exception = psycopg2.OperationalError
else:
expected_exception = pymysql.err.OperationalError
print(expected_exception)
engine1 = sa.create_engine(
get_engine_connection_string(
self.get_acraserver_connection_string(self.ACRASERVER_PORT), DB_NAME),
connect_args=tls_args_1)
with self.assertRaises(expected_exception) as exc:
# test query
engine1.execute('select 1')
except Exception as exc2:
pass
finally:
stop_process(acra)
class TestTLSAuthenticationDirectlyToAcraByDistinguishedName(TLSAuthenticationDirectlyToAcraMixin, TLSAuthenticationByDistinguishedNameMixin, BaseTestCase):
"""
Tests environment when client's app connect to db through acra-server with TLS and acra-server extracts clientID from client's certificate
instead using from --clientID CLI param
"""
def testServerRead(self):
"""test decrypting with correct client_id and not decrypting with
incorrect client_id or using direct connection to db"""
self.assertEqual(create_client_keypair_from_certificate(tls_cert=TEST_TLS_CLIENT_CERT,
extractor=self.get_identifier_extractor_type(), keys_dir=KEYS_FOLDER.name), 0)
server_public1 = read_storage_public_key(self.acra_writer_id, KEYS_FOLDER.name)
data = get_pregenerated_random_data()
acra_struct = create_acrastruct(
data.encode('ascii'), server_public1)
row_id = get_random_id()
self.log(storage_client_id=self.acra_writer_id,
data=acra_struct, expected=data.encode('ascii'))
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': acra_struct, 'raw_data': data})
result = self.engine1.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertEqual(row['data'], row['raw_data'].encode('utf-8'))
self.assertEqual(row['empty'], b'')
result = self.engine2.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
result = self.engine_raw.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
def testReadAcrastructInAcrastruct(self):
"""test correct decrypting acrastruct when acrastruct concatenated to
partial another acrastruct"""
server_public1 = read_storage_public_key(self.acra_writer_id, KEYS_FOLDER.name)
incorrect_data = get_pregenerated_random_data()
correct_data = get_pregenerated_random_data()
suffix_data = get_pregenerated_random_data()[:10]
fake_offset = (3+45+84) - 4
fake_acra_struct = create_acrastruct(
incorrect_data.encode('ascii'), server_public1)[:fake_offset]
inner_acra_struct = create_acrastruct(
correct_data.encode('ascii'), server_public1)
data = fake_acra_struct + inner_acra_struct + suffix_data.encode('ascii')
correct_data = correct_data + suffix_data
row_id = get_random_id()
self.log(storage_client_id=self.acra_writer_id,
data=data,
expected=fake_acra_struct+correct_data.encode('ascii'))
self.engine1.execute(
test_table.insert(),
{'id': row_id, 'data': data, 'raw_data': correct_data})
result = self.engine1.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
try:
self.assertEqual(row['data'][fake_offset:],
row['raw_data'].encode('utf-8'))
self.assertEqual(row['data'][:fake_offset], fake_acra_struct[:fake_offset])
except:
print('incorrect data: {}\ncorrect data: {}\ndata: {}\n data len: {}'.format(
incorrect_data, correct_data, row['data'], len(row['data'])))
raise
self.assertEqual(row['empty'], b'')
result = self.engine2.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'][fake_offset:].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
result = self.engine_raw.execute(
sa.select([test_table])
.where(test_table.c.id == row_id))
row = result.fetchone()
self.assertNotEqual(row['data'][fake_offset:].decode('ascii', errors='ignore'),
row['raw_data'])
self.assertEqual(row['empty'], b'')
class TestTLSAuthenticationDirectlyToAcraBySerialNumber(TLSAuthenticationBySerialNumberMixin,
TestTLSAuthenticationDirectlyToAcraByDistinguishedName):
pass
class TestTLSAuthenticationDirectlyToAcraBySerialNumberConnectionsClosed(AcraCatchLogsMixin,TLSAuthenticationBySerialNumberMixin,
TestTLSAuthenticationDirectlyToAcraByDistinguishedName):
"""
Test AcraServer proper Client/DB connections closing
"""
def testReadAcrastructInAcrastruct(self):
super().testReadAcrastructInAcrastruct()
self.assertIn("Finished processing client's connection", self.read_log(self.acra))
def testServerRead(self):
super().testServerRead()
self.assertIn("Finished processing client's connection", self.read_log(self.acra))
class BaseSearchableTransparentEncryption(TestTransparentEncryption):
encryptor_table = sa.Table(
'test_searchable_transparent_encryption', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('specified_client_id',
sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('default_client_id',
sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('number', sa.Integer),
sa.Column('zone_id', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('raw_data', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('nullable', sa.Text, nullable=True),
sa.Column('searchable', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('searchable_acrablock', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer(), nullable=False, default=1),
sa.Column('token_i64', sa.BigInteger(), nullable=False, default=1),
sa.Column('token_str', sa.Text, nullable=False, default=''),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_email', sa.Text, nullable=False, default=''),
sa.Column('masking', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
)
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_encryptor_config.yaml')
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
# Disable keystore cache since it can interfere with rotation tests
acra_kwargs['keystore_cache_size'] = -1
return super(BaseSearchableTransparentEncryption, self).fork_acra(popen_kwargs, **acra_kwargs)
def fetch_raw_data(self, context):
result = self.engine_raw.execute(
sa.select([self.encryptor_table.c.default_client_id,
self.encryptor_table.c.specified_client_id,
sa.cast(context['zone'][ZONE_ID].encode('ascii'), BYTEA),
self.encryptor_table.c.number,
self.encryptor_table.c.zone_id,
self.encryptor_table.c.raw_data,
self.encryptor_table.c.nullable,
self.encryptor_table.c.searchable,
self.encryptor_table.c.empty])
.where(self.encryptor_table.c.id == context['id']))
data = result.fetchone()
return data
def update_data(self, context):
self.engine2.execute(
sa.update(self.encryptor_table)
.where(self.encryptor_table.c.id == context['id'])
.values(default_client_id=context['default_client_id'],
specified_client_id=context['specified_client_id'],
zone_id=context['zone_id'],
raw_data=context['raw_data'],
searchable=context.get('searchable'),
empty=context.get('empty', b''),
nullable=context.get('nullable', None))
)
def get_context_data(self):
context = {
'id': get_random_id(),
'default_client_id': get_pregenerated_random_data().encode('ascii'),
'number': get_random_id(),
'zone_id': get_pregenerated_random_data().encode('ascii'),
'specified_client_id': get_pregenerated_random_data().encode('ascii'),
'raw_data': get_pregenerated_random_data().encode('ascii'),
'zone': zones[0],
'searchable': get_pregenerated_random_data().encode('ascii'),
'searchable_acrablock': get_pregenerated_random_data().encode('ascii'),
'empty': b'',
'nullable': None,
'masking': get_pregenerated_random_data().encode('ascii'),
'token_bytes': get_pregenerated_random_data().encode('ascii'),
'token_email': get_pregenerated_random_data(),
'token_str': get_pregenerated_random_data(),
'token_i32': random.randint(0, 2 ** 16),
'token_i64': random.randint(0, 2 ** 32),
}
return context
def insertDifferentRows(self, context, count, search_term=None, search_field='searchable'):
if not search_term:
search_term = context[search_field]
temp_context = context.copy()
while count != 0:
new_data = get_pregenerated_random_data().encode('utf-8')
if new_data != search_term:
temp_context[search_field] = new_data
temp_context['id'] = context['id'] + count
self.insertRow(temp_context)
count -= 1
def executeSelect2(self, query, parameters):
"""Execute a SELECT query with parameters via AcraServer for "keypair2"."""
return self.engine2.execute(query, parameters).fetchall()
def executeBulkInsert(self, query, values):
"""Execute a Bulk Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_2_CERT"."""
return self.engine2.execute(query.values(values))
class BaseSearchableTransparentEncryptionBinaryPostgreSQLMixin(BaseBinaryPostgreSQLTestCase, BaseTestCase):
def executeSelect2(self, query, parameters):
query, parameters = self.compileQuery(query, parameters)
return self.executor2.execute_prepared_statement(query, parameters)
def executeBulkInsert(self, query, values):
"""Execute a Bulk Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_2_CERT"."""
query, parameters = self.compileBulkInsertQuery(query.values(values), values)
return self.executor2.execute_prepared_statement(query, parameters)
class BaseSearchableTransparentEncryptionBinaryMySQLMixin(BaseBinaryMySQLTestCase, BaseTestCase):
def executeSelect2(self, query, parameters):
query, parameters = self.compileQuery(query, parameters)
return self.executor2.execute_prepared_statement(query, parameters)
def executeBulkInsert(self, query, values):
"""Execute a Bulk Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_2_CERT"."""
query, parameters = self.compileBulkInsertQuery(query.values(values), values)
return self.executor2.execute_prepared_statement_no_result(query, parameters)
class TestSearchableTransparentEncryption(BaseSearchableTransparentEncryption):
def testSearch(self):
context = self.get_context_data()
search_term = context['searchable']
# Insert searchable data and some additional different rows
self.insertRow(context)
self.insertDifferentRows(context, count=5)
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable == sa.bindparam('searchable')),
{'searchable': search_term},
)
self.assertEqual(len(rows), 1)
self.checkDefaultIdEncryption(**context)
self.assertEqual(rows[0]['searchable'], search_term)
def testHashValidation(self):
context = self.get_context_data()
search_term = context['searchable']
# Insert searchable data and some additional different rows
self.insertRow(context)
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable == sa.bindparam('searchable')),
{'searchable': search_term},
)
self.assertEqual(len(rows), 1)
self.checkDefaultIdEncryption(**context)
self.assertEqual(rows[0]['searchable'], search_term)
encrypted_data = self.fetch_raw_data(context)
searchable_encrypted_data = bytearray(encrypted_data['searchable'])
searchable_encrypted_data[5:10] = os.urandom(5)
tamper_searchable_data = searchable_encrypted_data
self.engine_raw.execute(
sa.update(self.encryptor_table)
.where(self.encryptor_table.c.id == context['id'])
.values(searchable=tamper_searchable_data))
result = self.engine2.execute(
sa.select([self.encryptor_table]).where(self.encryptor_table.c.id == context['id']))
row = result.fetchone()
self.assertIsNotNone(row)
self.assertEqual(row['default_client_id'], context['default_client_id'])
self.assertNotEqual(row['searchable'], context['searchable'])
self.assertNotEqual(row['specified_client_id'], context['specified_client_id'])
def testBulkInsertSearch(self):
context = self.get_context_data()
search_term = context['searchable']
search_context = context.copy()
# we should delete redundant `zone` key to compile bulk insert query
# https://docs.sqlalchemy.org/en/13/changelog/migration_08.html#unconsumed-column-names-warning-becomes-an-exception
del search_context['zone']
values = [search_context]
for idx in range(5):
insert_context = self.get_context_data()
new_data = get_pregenerated_random_data().encode('utf-8')
if new_data != search_term:
insert_context['searchable'] = new_data
del insert_context['zone']
values.append(insert_context.copy())
self.executeBulkInsert(self.encryptor_table.insert(), values)
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable == sa.bindparam('searchable')),
{'searchable': search_term},
)
self.assertEqual(len(rows), 1)
self.checkDefaultIdEncryption(**context)
self.assertEqual(rows[0]['searchable'], search_term)
for value in values:
value['zone'] = zones[0],
self.checkDefaultIdEncryption(**value)
def testSearchAcraBlock(self):
context = self.get_context_data()
row_id = context['id']
search_term = context['searchable_acrablock']
# Insert searchable data and some additional different rows
self.insertRow(context)
self.insertDifferentRows(context, count=5, search_field='searchable_acrablock')
rows = self.engine_raw.execute(
sa.select([self.encryptor_table.c.searchable_acrablock])
.where(self.encryptor_table.c.id == row_id)).fetchall()
self.assertTrue(rows)
temp_acrastruct = create_acrastruct_with_client_id(b'somedata', TLS_CERT_CLIENT_ID_1)
# AcraBlock should have half of AcraStruct begin tag. Check that searchable_acrablock is not AcraStruct
self.assertNotEqual(rows[0]['searchable_acrablock'][:8], temp_acrastruct[:8])
# skip 33 bytes of hash
self.assertEqual(rows[0]['searchable_acrablock'][33:33+3], CRYPTO_ENVELOPE_HEADER)
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable_acrablock == sa.bindparam('searchable_acrablock')),
{'searchable_acrablock': search_term})
self.assertEqual(len(rows), 1)
self.checkDefaultIdEncryption(**context)
self.assertEqual(rows[0]['searchable_acrablock'], search_term)
def testSearchWithEncryptedData(self):
context = self.get_context_data()
not_encrypted_term = context['raw_data']
search_term = context['searchable']
encrypted_term = create_acrastruct_with_client_id(
search_term, TLS_CERT_CLIENT_ID_2)
context['searchable'] = encrypted_term
# Insert searchable data and some additional different rows
self.insertRow(context)
# Use plaintext search term here to avoid mismatches
self.insertDifferentRows(context, count=5, search_term=search_term)
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(sa.and_(
self.encryptor_table.c.searchable == sa.bindparam('searchable'),
self.encryptor_table.c.raw_data == sa.bindparam('raw_data'))),
{'searchable': search_term,
'raw_data': not_encrypted_term},
)
self.assertEqual(len(rows), 1)
self.checkDefaultIdEncryption(**context)
self.assertEqual(rows[0]['searchable'], search_term)
result = self.engine2.execute(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable == encrypted_term))
rows = result.fetchall()
self.assertEqual(len(rows), 1)
self.checkDefaultIdEncryption(**context)
self.assertEqual(rows[0]['searchable'], search_term)
def testSearchAcraBlockWithEncryptedData(self):
context = self.get_context_data()
row_id = context['id']
not_encrypted_term = context['raw_data']
search_term = context['searchable_acrablock']
encrypted_term = create_acrastruct_with_client_id(
search_term, TLS_CERT_CLIENT_ID_2)
context['searchable_acrablock'] = encrypted_term
# Insert searchable data and some additional different rows
self.insertRow(context)
# Use plaintext search term here to avoid mismatches
self.insertDifferentRows(context, count=5, search_term=search_term, search_field='searchable_acrablock')
rows = self.engine_raw.execute(
sa.select([self.encryptor_table.c.searchable_acrablock])
.where(self.encryptor_table.c.id == row_id)).fetchall()
self.assertTrue(rows)
# AcraBlock should have half of AcraStruct begin tag. Check that searchable_acrablock is not AcraStruct
self.assertNotEqual(rows[0]['searchable_acrablock'][:8], encrypted_term[:8])
# skip 33 bytes of hash
self.assertEqual(rows[0]['searchable_acrablock'][33:33+4], encrypted_term[:4])
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(sa.and_(
self.encryptor_table.c.searchable_acrablock == sa.bindparam('searchable_acrablock'),
self.encryptor_table.c.raw_data == sa.bindparam('raw_data'))),
{'searchable_acrablock': search_term,
'raw_data': not_encrypted_term},
)
self.assertEqual(len(rows), 1)
self.checkDefaultIdEncryption(**context)
self.assertEqual(rows[0]['searchable_acrablock'], search_term)
result = self.engine2.execute(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable_acrablock == encrypted_term))
rows = result.fetchall()
self.assertEqual(len(rows), 1)
self.checkDefaultIdEncryption(**context)
self.assertEqual(rows[0]['searchable_acrablock'], search_term)
def testRotatedKeys(self):
"""Verify decryption of searchable data with old keys."""
context = self.get_context_data()
# Encrypt searchable data with epoch 1 key
search_term = context['searchable']
encrypted_term = create_acrastruct_with_client_id(
search_term, TLS_CERT_CLIENT_ID_2)
context['searchable'] = encrypted_term
# Insert searchable data and some additional different rows
self.insertRow(context)
# Use plaintext search term here to avoid mismatches
self.insertDifferentRows(context, count=5, search_term=search_term)
# Encrypt the search term again with the same epoch 1 key,
# this will result in different encrypted data on outside
encrypted_term_1 = create_acrastruct_with_client_id(
search_term, TLS_CERT_CLIENT_ID_2)
self.assertNotEqual(encrypted_term_1, encrypted_term)
# However, searchable encryption should still work with that
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable == sa.bindparam('searchable')),
{'searchable': encrypted_term_1},
)
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0]['searchable'], search_term)
# Now, rotate the encryption keys
create_client_keypair(TLS_CERT_CLIENT_ID_2, only_storage=True)
# Encrypt the search term again, now with the epoch 2 key
encrypted_term_2 = create_acrastruct_with_client_id(
search_term, TLS_CERT_CLIENT_ID_2)
self.assertNotEqual(encrypted_term_2, encrypted_term)
self.assertNotEqual(encrypted_term_2, encrypted_term_1)
# And searchable encryption should still work
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable == sa.bindparam('searchable')),
{'searchable': encrypted_term_2},
)
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0]['searchable'], search_term)
# If you try the data encrypted with epoch 1 key, it should still work
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable == sa.bindparam('searchable')),
{'searchable': encrypted_term_1},
)
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0]['searchable'], search_term)
def testRotatedKeysAcraBlock(self):
"""Verify decryption of searchable data with old keys."""
context = self.get_context_data()
row_id = context['id']
# Encrypt searchable data with epoch 1 key
search_term = context['searchable_acrablock']
encrypted_term = create_acrastruct_with_client_id(
search_term, TLS_CERT_CLIENT_ID_2)
context['searchable_acrablock'] = encrypted_term
# Insert searchable data and some additional different rows
self.insertRow(context)
# Use plaintext search term here to avoid mismatches
self.insertDifferentRows(context, count=5, search_term=search_term, search_field='searchable_acrablock')
rows = self.engine_raw.execute(
sa.select([self.encryptor_table.c.searchable_acrablock])
.where(self.encryptor_table.c.id == row_id)).fetchall()
self.assertTrue(rows)
# AcraBlock should have half of AcraStruct begin tag. Check that searchable_acrablock is not AcraStruct
self.assertNotEqual(rows[0]['searchable_acrablock'][:8], encrypted_term[:8])
# skip 33 bytes of hash
self.assertEqual(rows[0]['searchable_acrablock'][33:33+4], encrypted_term[:4])
# Encrypt the search term again with the same epoch 1 key,
# this will result in different encrypted data on outside
encrypted_term_1 = create_acrastruct_with_client_id(
search_term, TLS_CERT_CLIENT_ID_2)
self.assertNotEqual(encrypted_term_1, encrypted_term)
# However, searchable encryption should still work with that
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable_acrablock == sa.bindparam('searchable_acrablock')),
{'searchable_acrablock': encrypted_term_1},
)
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0]['searchable_acrablock'], search_term)
# Now, rotate the encryption keys
create_client_keypair(TLS_CERT_CLIENT_ID_2, only_storage=True)
# Encrypt the search term again, now with the epoch 2 key
encrypted_term_2 = create_acrastruct_with_client_id(
search_term, TLS_CERT_CLIENT_ID_2)
self.assertNotEqual(encrypted_term_2, encrypted_term)
self.assertNotEqual(encrypted_term_2, encrypted_term_1)
# And searchable encryption should still work
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable_acrablock == sa.bindparam('searchable_acrablock')),
{'searchable_acrablock': encrypted_term_2},
)
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0]['searchable_acrablock'], search_term)
# If you try the data encrypted with epoch 1 key, it should still work
rows = self.executeSelect2(
sa.select([self.encryptor_table])
.where(self.encryptor_table.c.searchable_acrablock == sa.bindparam('searchable_acrablock')),
{'searchable_acrablock': encrypted_term_1},
)
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0]['searchable_acrablock'], search_term)
rows = self.engine_raw.execute(
sa.select([self.encryptor_table.c.searchable_acrablock])
.where(self.encryptor_table.c.id == row_id)).fetchall()
self.assertTrue(rows)
# AcraBlock should have half of AcraStruct begin tag. Check that searchable_acrablock is not AcraStruct
self.assertNotEqual(rows[0]['searchable_acrablock'][:8], encrypted_term[:8])
# skip 33 bytes of hash
self.assertEqual(rows[0]['searchable_acrablock'][33:33+4], encrypted_term[:4])
class TestSearchableTransparentEncryptionWithDefaultsAcraBlockBinaryPostgreSQL(BaseSearchableTransparentEncryptionBinaryPostgreSQLMixin, TestSearchableTransparentEncryption):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_acrablock_defaults_with_searchable_config.yaml')
class TestSearchableTransparentEncryptionWithDefaultsAcraBlockBinaryMySQL(BaseSearchableTransparentEncryptionBinaryMySQLMixin, TestSearchableTransparentEncryption):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_acrablock_defaults_with_searchable_config.yaml')
class TestSearchableTransparentEncryptionWithDefaultsAcraStructBinaryPostgreSQL(BaseSearchableTransparentEncryptionBinaryPostgreSQLMixin, TestSearchableTransparentEncryption):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_acrastruct_defaults_with_searchable_config.yaml')
class TestSearchableTransparentEncryptionWithDefaultsAcraStructBinaryMySQL(BaseSearchableTransparentEncryptionBinaryMySQLMixin, TestSearchableTransparentEncryption):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_acrastruct_defaults_with_searchable_config.yaml')
class TestSearchableTransparentEncryptionBinaryPostgreSQL(BaseSearchableTransparentEncryptionBinaryPostgreSQLMixin, TestSearchableTransparentEncryption):
pass
class TestSearchableTransparentEncryptionBinaryMySQL(BaseSearchableTransparentEncryptionBinaryMySQLMixin, TestSearchableTransparentEncryption):
pass
class TestTransparentSearchableEncryptionWithZone(BaseSearchableTransparentEncryption):
def testSearch(self):
self.skipTest("searching with encryption with zones not supported yet")
def testSearchWithEncryptedData(self):
self.skipTest("searching with encryption with zones not supported yet")
def testRotatedKeys(self):
self.skipTest("searching with encryption with zones not supported yet")
class BaseTokenization(BaseTestCase):
WHOLECELL_MODE = True
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_tokenization_config.yaml')
def get_specified_client_id(self):
return TLS_CERT_CLIENT_ID_2
def fork_acra(self, popen_kwargs: dict = None, **acra_kwargs: dict):
prepare_encryptor_config(
client_id=self.get_specified_client_id(), zone_id=zones[0][ZONE_ID], config_path=self.ENCRYPTOR_CONFIG)
acra_kwargs.update(encryptor_config_file=get_test_encryptor_config(self.ENCRYPTOR_CONFIG))
return super(BaseTokenization, self).fork_acra(popen_kwargs, **acra_kwargs)
def insert_via_1(self, query, values):
"""Execute SQLAlchemy INSERT query via AcraServer with "TEST_TLS_CLIENT_CERT"."""
return self.engine1.execute(query, values)
def insert_via_1_bulk(self, query, values):
"""Execute SQLAlchemy Bulk INSERT query via AcraServer with "TEST_TLS_CLIENT_CERT"."""
self.engine1.execute(query.values(values))
def fetch_from_1(self, query):
"""Execute SQLAlchemy SELECT query via AcraServer with "TEST_TLS_CLIENT_CERT"."""
return self.engine1.execute(query).fetchall()
def fetch_from_2(self, query):
"""Execute SQLAlchemy SELECT query via AcraServer with "TEST_TLS_CLIENT_2_CERT"."""
return self.engine2.execute(query).fetchall()
class BaseTokenizationWithBoltDB(BaseTokenization):
def fork_acra(self, popen_kwargs: dict = None, **acra_kwargs: dict):
acra_kwargs.update(token_db='token1.db')
return super(BaseTokenizationWithBoltDB, self).fork_acra(popen_kwargs, **acra_kwargs)
def tearDown(self):
super().tearDown()
os.remove('token1.db')
class BaseTokenizationWithRedis(RedisMixin, BaseTokenization):
def fork_acra(self, popen_kwargs: dict = None, **acra_kwargs: dict):
acra_kwargs.update(
redis_host_port='localhost:6379',
redis_db_tokens=self.TEST_REDIS_TOKEN_DB,
encryptor_config_file=get_test_encryptor_config(self.ENCRYPTOR_CONFIG))
return super(BaseTokenizationWithRedis, self).fork_acra(popen_kwargs, **acra_kwargs)
class BaseTokenizationWithBinaryBindMySQL(BaseTokenization, BaseBinaryMySQLTestCase):
def checkSkip(self):
if not TEST_MYSQL:
self.skipTest("Only for MySQL")
super().checkSkip()
def insert_via_1(self, query, values):
query, parameters = self.compileInsertQuery(query, values)
self.executor1.execute_prepared_statement_no_result(query, parameters)
def insert_via_1_bulk(self, query, values):
"""Execute SQLAlchemy Bulk INSERT query via AcraServer with "TEST_TLS_CLIENT_CERT"."""
query, parameters = self.compileBulkInsertQuery(query.values(values), values)
return self.executor1.execute_prepared_statement_no_result(query, parameters)
def fetch_from_1(self, query):
query, parameters = self.compileQuery(query, literal_binds=True)
return self.executor1.execute_prepared_statement(query, parameters)
def fetch_from_2(self, query):
query, parameters = self.compileQuery(query, literal_binds=True)
return self.executor2.execute_prepared_statement(query, parameters)
class BaseTokenizationWithBinaryPostgreSQL(BaseTokenization, BaseBinaryPostgreSQLTestCase):
"""Verify tokenization with PostgreSQL extended protocol (binary format)."""
FORMAT = AsyncpgExecutor.BinaryFormat
def checkSkip(self):
if not TEST_POSTGRESQL:
self.skipTest("Only for postgresql")
super().checkSkip()
def insert_via_1(self, query, values):
query, parameters = self.compileQuery(query, values)
self.executor1.execute_prepared_statement(query, parameters)
def insert_via_1_bulk(self, query, values):
"""Execute SQLAlchemy Bulk INSERT query via AcraServer with "TEST_TLS_CLIENT_CERT"."""
query, parameters = self.compileBulkInsertQuery(query.values(values), values)
return self.executor1.execute_prepared_statement(query, parameters)
def fetch_from_1(self, query):
query, parameters = self.compileQuery(query, literal_binds=True)
return self.executor1.execute_prepared_statement(query, parameters)
def fetch_from_2(self, query):
query, parameters = self.compileQuery(query, literal_binds=True)
return self.executor2.execute_prepared_statement(query, parameters)
class BaseTokenizationWithTextPostgreSQL(BaseTokenizationWithBinaryPostgreSQL):
"""Verify tokenization with PostgreSQL extended protocol (text format)."""
FORMAT = AsyncpgExecutor.TextFormat
# TODO(ilammy, 2020-10-19): test binary formats
# We need to skip this test only for MySQL but perform it for PostgreSQL.
# This is already done by BaseBinaryPostgreSQLTestCase, but the parent
# overrides checkSkip(). When parent's override is removed, this one
# becomes unnecessary and should be removed too.
def checkSkip(self):
BaseBinaryPostgreSQLTestCase.checkSkip(self)
class BaseTokenizationWithBinaryMySQL(BaseTokenization):
"""Verify tokenization with MySQL binary protocol."""
def checkSkip(self):
if not TEST_MYSQL:
self.skipTest('this test is only for MySQL')
elif not TEST_WITH_TLS:
self.skipTest("running tests only with TLS")
def fetch_from_1(self, query):
return self.execute(query, TEST_TLS_CLIENT_KEY, TEST_TLS_CLIENT_CERT)
def fetch_from_2(self, query):
return self.execute(query, TEST_TLS_CLIENT_2_KEY, TEST_TLS_CLIENT_2_CERT)
def execute(self, query, ssl_key, ssl_cert):
# We need a rendered SQL query here. It will be converted into
# a prepared statement (without arguments) to use MySQL binary
# protocol on the wire.
query = query.compile(compile_kwargs={"literal_binds": True}).string
args = ConnectionArgs(
host=get_db_host(), port=self.ACRASERVER_PORT, dbname=DB_NAME,
user=DB_USER, password=DB_USER_PASSWORD,
ssl_ca=TEST_TLS_CA,
ssl_key=ssl_key,
ssl_cert=ssl_cert)
result = MysqlExecutor(args).execute_prepared_statement(query)
# For some weird reason MySQL connector in prepared statement mode
# does not decode TEXT columns into Python strings. In text mode
# it tries to decode the bytes and returns strings if they decode.
# Do the same here.
for row in result:
for column, value in row.items():
if isinstance(value, (bytes, bytearray)):
try:
row[column] = value.decode('utf8')
except (LookupError, UnicodeDecodeError):
pass
return result
class TestTokenizationWithoutZone(BaseTokenization):
ZONE = False
def testTokenizationDefaultClientID(self):
default_client_id_table = sa.Table(
'test_tokenization_default_client_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer()),
sa.Column('token_i64', sa.BigInteger()),
sa.Column('token_str', sa.Text),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_email', sa.Text),
extend_existing=True,
)
metadata.create_all(self.engine_raw, [default_client_id_table])
self.engine1.execute(default_client_id_table.delete())
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'token_i32': random_int32(),
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
# insert data data
self.insert_via_1(default_client_id_table.insert(), data)
# expect that data was encrypted with client_id which used to insert (client_id==keypair1)
source_data = self.fetch_from_1(
sa.select([default_client_id_table])
.where(default_client_id_table.c.id == data['id']))
hidden_data = self.fetch_from_2(
sa.select([default_client_id_table])
.where(default_client_id_table.c.id == data['id']))
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
# data owner take source data
for k in ('token_i32', 'token_i64', 'token_str', 'token_bytes', 'token_email'):
if isinstance(source_data[0][k], bytearray) and isinstance(data[k], str):
self.assertEqual(source_data[0][k], bytearray(data[k], encoding='utf-8'))
else:
self.assertEqual(source_data[0][k], data[k])
self.assertNotEqual(hidden_data[0][k], data[k])
def testTokenizationDefaultClientIDWithBulkInsert(self):
default_client_id_table = sa.Table(
'test_tokenization_default_client_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer()),
sa.Column('token_i64', sa.BigInteger()),
sa.Column('token_str', sa.Text),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_email', sa.Text),
extend_existing=True,
)
metadata.create_all(self.engine_raw, [default_client_id_table])
self.engine1.execute(default_client_id_table.delete())
values = []
for idx in range(5):
insert_data = {
'id': 1 + idx,
'nullable_column': None,
'empty': b'',
'token_i32': random_int32(),
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
values.append(insert_data)
# bulk insert data
self.insert_via_1_bulk(default_client_id_table.insert(), values)
# expect that data was encrypted with client_id which used to insert (client_id==TEST_TLS_CLIENT_CERT)
source_data = self.fetch_from_1(sa.select([default_client_id_table]))
hidden_data = self.fetch_from_2(sa.select([default_client_id_table]))
if len(source_data) != len(hidden_data):
self.fail('incorrect len of result data')
for idx in range(len(source_data)):
# data owner take source data
for k in ('token_i32', 'token_i64', 'token_str', 'token_bytes', 'token_email'):
if isinstance(source_data[idx][k], bytearray) and isinstance(values[idx][k], str):
self.assertEqual(source_data[idx][k], bytearray(values[idx][k], encoding='utf-8'))
else:
self.assertEqual(source_data[idx][k], values[idx][k])
self.assertNotEqual(hidden_data[idx][k], values[idx][k])
def testTokenizationSpecificClientID(self):
specific_client_id_table = sa.Table(
'test_tokenization_specific_client_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer()),
sa.Column('token_i64', sa.BigInteger()),
sa.Column('token_str', sa.Text),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_email', sa.Text),
extend_existing=True,
)
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'token_i32': random_int32(),
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
metadata.create_all(self.engine_raw, [specific_client_id_table])
self.engine1.execute(specific_client_id_table.delete())
# insert data data using client_id==TEST_TLS_CLIENT_CERT
self.insert_via_1(specific_client_id_table.insert(), data)
# expect that source data return client_id==TEST_TLS_CLIENT_2_CERT which defined in config
source_data = self.fetch_from_2(
sa.select([specific_client_id_table])
.where(specific_client_id_table.c.id == data['id']))
hidden_data = self.fetch_from_1(
sa.select([specific_client_id_table])
.where(specific_client_id_table.c.id == data['id']))
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
# data owner take source data
for k in ('token_i32', 'token_i64', 'token_str', 'token_bytes', 'token_email'):
if isinstance(source_data[0][k], bytearray) and isinstance(data[k], str):
self.assertEqual(source_data[0][k], bytearray(data[k], encoding='utf-8'))
else:
self.assertEqual(source_data[0][k], data[k])
self.assertNotEqual(hidden_data[0][k], data[k])
def testTokenizationDefaultClientIDStarExpression(self):
default_client_id_table = sa.Table(
'test_tokenization_default_client_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer()),
sa.Column('token_i64', sa.BigInteger()),
sa.Column('token_str', sa.Text),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_email', sa.Text),
extend_existing=True,
)
metadata.create_all(self.engine_raw, [default_client_id_table])
self.engine1.execute(default_client_id_table.delete())
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'token_i32': random_int32(),
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
# insert data data
self.insert_via_1(default_client_id_table.insert(), data)
# expect that data was encrypted with client_id which used to insert (client_id==TEST_TLS_CLIENT_CERT)
source_data = self.fetch_from_1(
sa.select(['*'], from_obj=default_client_id_table)
.where(default_client_id_table.c.id == data['id']))
hidden_data = self.fetch_from_2(
sa.select(['*'], from_obj=default_client_id_table)
.where(default_client_id_table.c.id == data['id']))
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
# data owner take source data
for k in ('token_i32', 'token_i64', 'token_str', 'token_bytes', 'token_email'):
# binary data returned as memoryview objects
if isinstance(source_data[0][k], bytearray) and isinstance(data[k], str):
self.assertEqual(utils.memoryview_to_bytes(source_data[0][k]), bytearray(data[k], encoding='utf-8'))
else:
self.assertEqual(utils.memoryview_to_bytes(source_data[0][k]), data[k])
self.assertNotEqual(utils.memoryview_to_bytes(hidden_data[0][k]), data[k])
class TestReturningProcessingMixing:
ZONE = False
specific_client_id_table = sa.Table(
'test_tokenization_specific_client_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer()),
sa.Column('token_i64', sa.BigInteger()),
sa.Column('token_str', sa.Text),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=True, default=b''),
sa.Column('token_email', sa.Text),
extend_existing=True,
)
def insert_with_enum_and_return_data(self):
raise NotImplementedError
def insert_with_star_and_return_data(self):
raise NotImplementedError
def test_insert_returning_with_col_enum(self):
source, hidden, data = self.insert_with_enum_and_return_data()
self.assertEqual(source[1], data['token_str'])
self.assertEqual(source[2], data['token_i64'])
self.assertEqual(source[3], data['token_email'])
self.assertEqual(source[4], data['token_i32'])
self.assertNotEqual(hidden[1], data['token_str'])
self.assertNotEqual(hidden[2], data['token_i64'])
self.assertNotEqual(hidden[3], data['token_email'])
self.assertNotEqual(hidden[4], data['token_i32'])
def test_insert_returning_with_star(self):
source, hidden, data = self.insert_with_star_and_return_data()
self.assertEqual(source[3], data['token_i32'])
self.assertEqual(source[4], data['token_i64'])
self.assertEqual(source[5], data['token_str'])
self.assertEqual(source[7], data['token_email'])
self.assertNotEqual(hidden[3], data['token_i32'])
self.assertNotEqual(hidden[4], data['token_i64'])
self.assertNotEqual(hidden[5], data['token_str'])
self.assertNotEqual(hidden[7], data['token_email'])
class TestReturningProcessingMariaDB(TestReturningProcessingMixing, BaseTokenization):
data = {
'nullable_column': None,
'empty': b'',
'token_i32': random_int32(),
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
def checkSkip(self):
if not TEST_MARIADB or TEST_WITH_TLS:
self.skipTest("Only for MariaDB")
super().checkSkip()
def build_raw_query_with_enum(self):
id = get_random_id()
# TODO(zhars, 2021-5-20): rewrite query when sqlalchemy will support RETURNING statements
return 'INSERT INTO test_tokenization_specific_client_id ' \
'(id, empty, token_bytes, token_i32, token_i64, token_str, token_email) ' \
'VALUES ({}, {}, {}, {}, {}, \'{}\', \'{}\') ' \
'RETURNING test_tokenization_specific_client_id.id, test_tokenization_specific_client_id.token_str,' \
' test_tokenization_specific_client_id.token_i64, test_tokenization_specific_client_id.token_email, ' \
'test_tokenization_specific_client_id.token_i32'.format(id, self.data['empty'], self.data['empty'], self.data['token_i32'], self.data['token_i64'], self.data['token_str'], self.data['token_email'])
def build_raw_query_with_star(self):
id = get_random_id()
# TODO(zhars, 2021-5-20): rewrite query when sqlalchemy will support RETURNING statements
return 'INSERT INTO test_tokenization_specific_client_id ' \
'(id, empty, token_bytes, token_i32, token_i64, token_str, token_email) ' \
'VALUES ({}, {}, {}, {}, {}, \'{}\', \'{}\') ' \
'RETURNING *'.format(id, self.data['empty'], self.data['empty'], self.data['token_i32'], self.data['token_i64'], self.data['token_str'], self.data['token_email'])
def insert_with_enum_and_return_data(self):
metadata.create_all(self.engine_raw, [self.specific_client_id_table])
self.fetch_from_2(sa.select([self.specific_client_id_table]).where(self.specific_client_id_table.c.id == id))
source = self.fetch_from_2(self.build_raw_query_with_enum())[0]
hidden = self.fetch_from_1(self.build_raw_query_with_enum())[0]
return source, hidden, self.data
def insert_with_star_and_return_data(self):
metadata.create_all(self.engine_raw, [self.specific_client_id_table])
self.fetch_from_2(sa.select([self.specific_client_id_table]).where(self.specific_client_id_table.c.id == id))
source = self.fetch_from_2(self.build_raw_query_with_star())[0]
hidden = self.fetch_from_1(self.build_raw_query_with_star())[0]
return source, hidden, self.data
class TestReturningProcessingPostgreSQL(TestReturningProcessingMixing, BaseTokenization):
data = {
'nullable_column': None,
'empty': b'',
'token_i32': random_int32(),
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
def checkSkip(self):
if not TEST_POSTGRESQL:
self.skipTest("Only for PostgreSQL")
super().checkSkip()
def build_raw_query_with_enum(self):
self.data['id'] = get_random_id()
return self.specific_client_id_table.insert(). \
returning(self.specific_client_id_table.c.id, self.specific_client_id_table.c.token_str, self.specific_client_id_table.c.token_i64,
self.specific_client_id_table.c.token_email, self.specific_client_id_table.c.token_i32), self.data
def build_raw_query_with_star(self):
self.data['id'] = get_random_id()
return self.specific_client_id_table.insert().returning(sa.literal_column('*')), self.data
def insert_with_enum_and_return_data(self):
metadata.create_all(self.engine_raw, [self.specific_client_id_table])
self.fetch_from_2(sa.select([self.specific_client_id_table]).where(self.specific_client_id_table.c.id == get_random_id()))
source_query, source_data = self.build_raw_query_with_enum()
source = self.engine2.execute(source_query, source_data).fetchone()
hidden_query, hidden_data = self.build_raw_query_with_enum()
hidden = self.engine1.execute(hidden_query, hidden_data).fetchone()
return source, hidden, self.data
def insert_with_star_and_return_data(self):
metadata.create_all(self.engine_raw, [self.specific_client_id_table])
self.fetch_from_2(sa.select([self.specific_client_id_table]).where(self.specific_client_id_table.c.id == get_random_id()))
source_query, data = self.build_raw_query_with_star()
source = self.engine2.execute(source_query, data).fetchone()
hidden_query, data = self.build_raw_query_with_star()
hidden = self.engine1.execute(hidden_query, data).fetchone()
return source, hidden, self.data
class TestTokenizationWithZone(BaseTokenization):
ZONE = True
def testTokenizationSpecificZoneID(self):
specific_zone_id_table = sa.Table(
'test_tokenization_specific_zone_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('zone_id', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer()),
sa.Column('token_i64', sa.BigInteger()),
sa.Column('token_str', sa.Text),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_email', sa.Text),
extend_existing=True,
)
metadata.create_all(self.engine_raw, [specific_zone_id_table])
self.engine1.execute(specific_zone_id_table.delete())
zone_id = zones[0][ZONE_ID]
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'zone_id': zone_id.encode('ascii'),
'token_i32': random_int32(),
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
# insert data data using client_id==TEST_TLS_CLIENT_CERT
self.insert_via_1(specific_zone_id_table.insert(), data)
# expect that source data will returned from acra-servers with all client_id with correct zone id
source_data = self.fetch_from_2(
sa.select([specific_zone_id_table])
.where(specific_zone_id_table.c.id == data['id']))
hidden_data = self.fetch_from_1(
sa.select([specific_zone_id_table])
.where(specific_zone_id_table.c.id == data['id']))
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
token_fields = ('token_i32', 'token_i64', 'token_str', 'token_bytes', 'token_email')
# data owner take source data
for k in token_fields:
if isinstance(source_data[0][k], bytearray) and isinstance(data[k], str):
self.assertEqual(source_data[0][k], bytearray(data[k], encoding='utf-8'))
self.assertEqual(hidden_data[0][k], bytearray(data[k], encoding='utf-8'))
else:
self.assertEqual(source_data[0][k], data[k])
self.assertEqual(hidden_data[0][k], data[k])
# expect that source data will not returned from acra-servers with all client_id with incorrect zone id
columns = [sa.cast(zones[1][ZONE_ID].encode('ascii'), BYTEA)]
# all columns except zone id
columns.extend([i for i in list(specific_zone_id_table.c) if i.name != 'zone_id'])
source_data = self.engine2.execute(
sa.select(columns)
.where(specific_zone_id_table.c.id == data['id']))
source_data = source_data.fetchall()
for i in token_fields:
self.assertNotEqual(source_data[0][i], data[i])
def testTokenizationSpecificZoneIDStarExpression(self):
specific_zone_id_table = sa.Table(
'test_tokenization_specific_zone_id_star_expression', metadata,
sa.Column('id', sa.Integer, primary_key=True),
# don't store zoneID in table
#sa.Column('zone_id', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer()),
sa.Column('token_i64', sa.BigInteger()),
sa.Column('token_str', sa.Text),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_email', sa.Text),
extend_existing=True,
)
metadata.drop_all(self.engine_raw, [specific_zone_id_table])
metadata.create_all(self.engine_raw, [specific_zone_id_table])
self.engine1.execute(specific_zone_id_table.delete())
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'token_i32': random_int32(),
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
# insert data data using client_id==keypair1
self.insert_via_1(specific_zone_id_table.insert(), data)
CORRECT_ZONE, INCORRECT_ZONE = range(2)
# expect that source data will not returned from all acra-servers with incorrect zone id
columns = [
sa.literal(zones[CORRECT_ZONE][ZONE_ID]),
# mysql doesn't support query like `select 'string', * from table1`, only qualified StarExpr like `select 'string', t1.* from table1 as t1`
sa.text('{}.*'.format(specific_zone_id_table.name))
]
# expect that source data will returned from all acra-servers with correct zone id
source_data = self.fetch_from_2(
sa.select(columns, from_obj=specific_zone_id_table)
.where(specific_zone_id_table.c.id == data['id']))
hidden_data = self.fetch_from_1(
sa.select(columns, from_obj=specific_zone_id_table)
.where(specific_zone_id_table.c.id == data['id']))
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
token_fields = ('token_i32', 'token_i64', 'token_str', 'token_bytes', 'token_email')
# data owner take source data
for k in token_fields:
if isinstance(source_data[0][k], bytearray) and isinstance(data[k], str):
self.assertEqual(utils.memoryview_to_bytes(source_data[0][k]), bytearray(data[k], encoding='utf-8'))
self.assertEqual(utils.memoryview_to_bytes(hidden_data[0][k]), bytearray(data[k], encoding='utf-8'))
else:
self.assertEqual(utils.memoryview_to_bytes(source_data[0][k]), data[k])
self.assertEqual(utils.memoryview_to_bytes(hidden_data[0][k]), data[k])
# expect that source data will not returned from all acra-servers with incorrect zone id
columns = [
sa.literal(zones[INCORRECT_ZONE][ZONE_ID]),
sa.text('{}.*'.format(specific_zone_id_table.name))
]
source_data = self.engine2.execute(
sa.select(columns)
.where(specific_zone_id_table.c.id == data['id']))
source_data = source_data.fetchall()
for i in token_fields:
self.assertNotEqual(utils.memoryview_to_bytes(source_data[0][i]), data[i])
class TestTokenizationWithoutZoneWithBoltDB(BaseTokenizationWithBoltDB, TestTokenizationWithoutZone):
pass
class TestTokenizationWithZoneWithBoltDB(BaseTokenizationWithBoltDB, TestTokenizationWithZone):
pass
class TestTokenizationWithoutZoneWithRedis(BaseTokenizationWithRedis, TestTokenizationWithoutZone):
pass
class TestTokenizationWithZoneWithRedis(BaseTokenizationWithRedis, TestTokenizationWithZone):
pass
class TestTokenizationWithoutZoneBinaryMySQL(BaseTokenizationWithBinaryMySQL, TestTokenizationWithoutZone):
pass
class TestTokenizationWithZoneBinaryMySQL(BaseTokenizationWithBinaryMySQL, TestTokenizationWithZone):
pass
class TestTokenizationWithoutZoneTextPostgreSQL(BaseTokenizationWithTextPostgreSQL, TestTokenizationWithoutZone):
pass
class TestTokenizationWithZoneTextPostgreSQL(BaseTokenizationWithTextPostgreSQL, TestTokenizationWithZone):
pass
class TestTokenizationWithoutZoneBinaryPostgreSQL(BaseTokenizationWithBinaryPostgreSQL, TestTokenizationWithoutZone):
pass
class TestTokenizationWithZoneBinaryPostgreSQL(BaseTokenizationWithBinaryPostgreSQL, TestTokenizationWithZone):
pass
class TestTokenizationWithoutZoneBinaryBindMySQL(BaseTokenizationWithBinaryBindMySQL, TestTokenizationWithoutZone):
pass
class TestTokenizationWithZoneBinaryBindMySQL(BaseTokenizationWithBinaryBindMySQL, TestTokenizationWithZone):
pass
class BaseMasking(BaseTokenization):
WHOLECELL_MODE = False
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_masking_config.yaml')
def check_crypto_envelope(self, table, row_id):
temp_acrastruct = create_acrastruct_with_client_id(b'somedata', TLS_CERT_CLIENT_ID_1)
# expect that data was encrypted with client_id from acra-server which used to insert (client_id==TEST_TLS_CLIENT_CERT)
source_data = self.engine_raw.execute(
sa.select([table])
.where(table.c.id == row_id))
source_data = source_data.fetchone()
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length',
'shorter_plaintext'):
# check that data not contains AcraStruct tag begin
self.assertNotIn(temp_acrastruct[:8], source_data[i])
# and check that data contains AcraBlock tag begin
self.assertIn(temp_acrastruct[:4], source_data[i])
def get_specified_client_id(self):
return TLS_CERT_CLIENT_ID_2
def fork_acra(self, popen_kwargs: dict = None, **acra_kwargs: dict):
prepare_encryptor_config(
client_id=self.get_specified_client_id(), zone_id=zones[0][ZONE_ID], config_path=self.ENCRYPTOR_CONFIG)
acra_kwargs.update(token_db='token1.db',
encryptor_config_file=get_test_encryptor_config(self.ENCRYPTOR_CONFIG))
return super(BaseTokenization, self).fork_acra(popen_kwargs, **acra_kwargs)
def executeInsert(self, query, values):
"""Execute a Bulk Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_CERT"."""
return self.engine1.execute(query.values(values))
def executeBulkInsert(self, query, values):
"""Execute a Bulk Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_CERT"."""
return self.engine1.execute(query.values(values))
def tearDown(self):
super().tearDown()
os.remove('token1.db')
class BaseMaskingBinaryPostgreSQLMixin(BaseBinaryPostgreSQLTestCase, BaseTestCase):
def executeInsert(self, query, values):
"""Execute a Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_CERT"."""
query, parameters = self.compileInsertQuery(query, values)
return self.executor1.execute_prepared_statement(query, parameters)
def executeBulkInsert(self, query, values):
"""Execute a Bulk Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_CERT"."""
query, parameters = self.compileBulkInsertQuery(query.values(values), values)
return self.executor1.execute_prepared_statement(query, parameters)
class BaseMaskingBinaryMySQLMixin(BaseBinaryMySQLTestCase, BaseTestCase):
def executeInsert(self, query, values):
"""Execute a Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_CERT"."""
query, parameters = self.compileInsertQuery(query, values)
return self.executor1.execute_prepared_statement_no_result(query, parameters)
def executeBulkInsert(self, query, values):
"""Execute a Bulk Insert query with list of values via AcraServer for "TEST_TLS_CLIENT_CERT"."""
query, parameters = self.compileBulkInsertQuery(query.values(values), values)
return self.executor1.execute_prepared_statement_no_result(query, parameters)
class TestMaskingWithoutZone(BaseMasking):
def test_masking_default_client_id(self):
default_client_id_table = sa.Table(
'test_masking_default_client_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_prefix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_suffix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_without_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('exact_plaintext_length', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('shorter_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
extend_existing=True
)
metadata.create_all(self.engine_raw, [default_client_id_table])
self.engine_raw.execute(default_client_id_table.delete())
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'masked_prefix': random_bytes(9),
'masked_suffix': random_bytes(9),
'masked_without_plaintext': random_bytes(),
'exact_plaintext_length': random_bytes(10),
'shorter_plaintext': random_bytes(9),
}
# insert data data with another client_id (keypair2) than should be encrypted (keypair1)
self.executeInsert(default_client_id_table.insert(), data)
self.check_crypto_envelope(default_client_id_table, data['id'])
# expect that data was encrypted with client_id from acra-server which used to insert (client_id==TEST_TLS_CLIENT_CERT)
source_data = self.engine1.execute(
sa.select([default_client_id_table])
.where(default_client_id_table.c.id == data['id']))
source_data = source_data.fetchall()
hidden_data = self.engine2.execute(
sa.select([default_client_id_table])
.where(default_client_id_table.c.id == data['id']))
hidden_data = hidden_data.fetchall()
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
self.assertEqual(source_data[0][i], data[i])
hidden_data = hidden_data[0]
mask_pattern = 'xxxx'.encode('ascii')
# check that mask at correct place
self.assertEqual(hidden_data['masked_prefix'][:len(mask_pattern)], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_prefix']), len(data['masked_prefix']))
# check that data after mask is not the same as source data
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], data)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], data['masked_prefix'][len(mask_pattern):])
# check that mask at correct place
self.assertEqual(hidden_data['masked_suffix'][-len(mask_pattern):], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_suffix']), len(data['masked_suffix']))
# check that data before mask is not the same as source data
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], data)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], data['masked_suffix'][:-len(mask_pattern)])
self.assertEqual(mask_pattern, hidden_data['masked_without_plaintext'])
# if plaintext length > data, then whole data will be encrypted
self.assertEqual(mask_pattern, hidden_data['exact_plaintext_length'])
self.assertEqual(mask_pattern, hidden_data['shorter_plaintext'])
def test_masking_specific_client_id(self):
specific_client_id_table = sa.Table(
'test_masking_specific_client_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_prefix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_suffix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_without_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('exact_plaintext_length', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('shorter_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
extend_existing=True
)
metadata.create_all(self.engine_raw, [specific_client_id_table])
self.engine_raw.execute(specific_client_id_table.delete())
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'masked_prefix': random_bytes(9),
'masked_suffix': random_bytes(9),
'masked_without_plaintext': random_bytes(),
'exact_plaintext_length': random_bytes(10),
'shorter_plaintext': random_bytes(9),
}
# insert data data with another client_id (keypair1) than should be encrypted (keypair2)
self.executeInsert(specific_client_id_table.insert(), data)
self.check_crypto_envelope(specific_client_id_table, data['id'])
# expect that data was encrypted with client_id from acra-server which used to insert (client_id==TEST_TLS_CLIENT_2_CERT)
source_data = self.engine2.execute(
sa.select([specific_client_id_table])
.where(specific_client_id_table.c.id == data['id']))
source_data = source_data.fetchall()
hidden_data = self.engine1.execute(
sa.select([specific_client_id_table])
.where(specific_client_id_table.c.id == data['id']))
hidden_data = hidden_data.fetchall()
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
self.assertEqual(source_data[0][i], data[i])
hidden_data = hidden_data[0]
mask_pattern = 'xxxx'.encode('ascii')
# check that mask at correct place
self.assertEqual(hidden_data['masked_prefix'][:len(mask_pattern)], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_prefix']), len(data['masked_prefix']))
# check that data after mask is not the same as source data
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], data)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], data['masked_prefix'][len(mask_pattern):])
# check that mask at correct place
self.assertEqual(hidden_data['masked_suffix'][-len(mask_pattern):], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_suffix']), len(data['masked_suffix']))
# check that data before mask is not the same as source data
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], data)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], data['masked_suffix'][:-len(mask_pattern)])
self.assertEqual(mask_pattern, hidden_data['masked_without_plaintext'])
# if plaintext length > data, then whole data will be encrypted
self.assertEqual(mask_pattern, hidden_data['exact_plaintext_length'])
self.assertEqual(mask_pattern, hidden_data['shorter_plaintext'])
class TestMaskingWithZonePerValue(BaseMasking):
ZONE = True
def test_masking_specific_zone_id_bulk(self):
specific_zone_id_table = sa.Table(
'test_masking_specific_zone_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_prefix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_suffix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_without_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('exact_plaintext_length', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('shorter_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
extend_existing=True
)
metadata.create_all(self.engine_raw, [specific_zone_id_table])
self.engine_raw.execute(specific_zone_id_table.delete())
values = []
for idx in range(3):
data = {
'id': 1 + idx,
'nullable_column': None,
'empty': b'',
'masked_prefix': random_bytes(9),
'masked_suffix': random_bytes(9),
'masked_without_plaintext': random_bytes(),
'exact_plaintext_length': random_bytes(10),
'shorter_plaintext': random_bytes(9),
}
values.append(data)
# insert data data with another client_id (keypair1) than should be encrypted (keypair2)
self.executeBulkInsert(specific_zone_id_table.insert(), values)
columns = []
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
# create in loop to generate new objects of literal and avoid removing in select clause by sqlalchemy
correct_zone = sa.literal(zones[0][ZONE_ID])
columns.append(correct_zone)
columns.append(getattr(specific_zone_id_table.c, i))
for value in values:
self.check_crypto_envelope(specific_zone_id_table, value['id'])
# check that using any acra-server with correct zone we fetch decrypted data
for engine in (self.engine1, self.engine2):
# expect that data was encrypted with client_id from acra-server which used to insert (client_id==TEST_TLS_CLIENT_2_CERT)
response = engine.execute(
sa.select(columns)
.where(specific_zone_id_table.c.id == value['id']))
source_data = response.fetchall()
if len(source_data) != 1:
self.fail('incorrect len of result data')
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
self.assertEqual(source_data[0][i], value[i])
incorrect_zone = sa.literal(zones[1][ZONE_ID])
# check that using any acra-server with incorrect zone we fetch masked data
for engine in (self.engine1, self.engine2):
hidden_data = engine.execute(
sa.select([incorrect_zone, specific_zone_id_table])
.where(specific_zone_id_table.c.id == value['id']))
hidden_data = hidden_data.fetchall()
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
self.assertEqual(source_data[0][i], value[i])
hidden_data = hidden_data[0]
mask_pattern = 'xxxx'.encode('ascii')
# check that mask at correct place
self.assertEqual(hidden_data['masked_prefix'][:len(mask_pattern)], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_prefix']), len(value['masked_prefix']))
# check that data after mask is not the same as source data
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], value)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], value['masked_prefix'][len(mask_pattern):])
# check that mask at correct place
self.assertEqual(hidden_data['masked_suffix'][-len(mask_pattern):], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_suffix']), len(value['masked_suffix']))
# check that data before mask is not the same as source data
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], value)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], value['masked_suffix'][:-len(mask_pattern)])
self.assertEqual(mask_pattern, hidden_data['masked_without_plaintext'])
# if plaintext length > data, then whole data will be encrypted
self.assertEqual(mask_pattern, hidden_data['exact_plaintext_length'])
self.assertEqual(mask_pattern, hidden_data['shorter_plaintext'])
def test_masking_specific_zone_id(self):
specific_zone_id_table = sa.Table(
'test_masking_specific_zone_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_prefix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_suffix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_without_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('exact_plaintext_length', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('shorter_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
extend_existing=True
)
metadata.create_all(self.engine_raw, [specific_zone_id_table])
self.engine_raw.execute(specific_zone_id_table.delete())
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'masked_prefix': random_bytes(9),
'masked_suffix': random_bytes(9),
'masked_without_plaintext': random_bytes(),
'exact_plaintext_length': random_bytes(10),
'shorter_plaintext': random_bytes(9),
}
# insert data data with another client_id (keypair1) than should be encrypted (keypair2)
self.engine1.execute(specific_zone_id_table.insert(values=data))
self.check_crypto_envelope(specific_zone_id_table, data['id'])
columns = []
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
# create in loop to generate new objects of literal and avoid removing in select clause by sqlalchemy
correct_zone = sa.literal(zones[0][ZONE_ID])
columns.append(correct_zone)
columns.append(getattr(specific_zone_id_table.c, i))
# check that using any acra-server with correct zone we fetch decrypted data
for engine in (self.engine1, self.engine2):
# expect that data was encrypted with client_id from acra-server which used to insert (client_id==TEST_TLS_CLIENT_2_CERT)
response = engine.execute(
sa.select(columns)
.where(specific_zone_id_table.c.id == data['id']))
source_data = response.fetchall()
if len(source_data) != 1:
self.fail('incorrect len of result data')
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
self.assertEqual(source_data[0][i], data[i])
incorrect_zone = sa.literal(zones[1][ZONE_ID])
# check that using any acra-server with incorrect zone we fetch masked data
for engine in (self.engine1, self.engine2):
hidden_data = engine.execute(
sa.select([incorrect_zone, specific_zone_id_table])
.where(specific_zone_id_table.c.id == data['id']))
hidden_data = hidden_data.fetchall()
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
self.assertEqual(source_data[0][i], data[i])
hidden_data = hidden_data[0]
mask_pattern = 'xxxx'.encode('ascii')
# check that mask at correct place
self.assertEqual(hidden_data['masked_prefix'][:len(mask_pattern)], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_prefix']), len(data['masked_prefix']))
# check that data after mask is not the same as source data
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], data)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], data['masked_prefix'][len(mask_pattern):])
# check that mask at correct place
self.assertEqual(hidden_data['masked_suffix'][-len(mask_pattern):], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_suffix']), len(data['masked_suffix']))
# check that data before mask is not the same as source data
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], data)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], data['masked_suffix'][:-len(mask_pattern)])
self.assertEqual(mask_pattern, hidden_data['masked_without_plaintext'])
# if plaintext length > data, then whole data will be encrypted
self.assertEqual(mask_pattern, hidden_data['exact_plaintext_length'])
self.assertEqual(mask_pattern, hidden_data['shorter_plaintext'])
class TestMaskingWithZonePerRow(BaseMasking):
ZONE = True
def fork_acra(self, popen_kwargs: dict = None, **acra_kwargs: dict):
if popen_kwargs is None:
popen_kwargs = {}
env = popen_kwargs.get('env', {})
env['ZONE_FOR_ROW'] = 'on'
env.update(os.environ)
popen_kwargs['env'] = env
return super(TestMaskingWithZonePerRow, self).fork_acra(popen_kwargs, **acra_kwargs)
def test_masking_specific_zone_id(self):
specific_zone_id_table = sa.Table(
'test_masking_specific_zone_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_prefix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_suffix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_without_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('exact_plaintext_length', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('shorter_plaintext', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
extend_existing=True
)
metadata.create_all(self.engine_raw, [specific_zone_id_table])
self.engine_raw.execute(specific_zone_id_table.delete())
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
'masked_prefix': random_bytes(9),
'masked_suffix': random_bytes(9),
'masked_without_plaintext': random_bytes(),
'exact_plaintext_length': random_bytes(10),
'shorter_plaintext': random_bytes(9),
}
# insert data data with another client_id (keypair1) than should be encrypted (keypair2)
self.engine1.execute(specific_zone_id_table.insert(values=data))
self.check_crypto_envelope(specific_zone_id_table, data['id'])
columns = [sa.literal(zones[0][ZONE_ID])]
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
# create in loop to generate new objects of literal and avoid removing in select clause by sqlalchemy
columns.append(getattr(specific_zone_id_table.c, i))
# check that using any acra-server with correct zone we fetch decrypted data
for engine in (self.engine1, self.engine2):
# expect that data was encrypted with client_id from acra-server which used to insert (client_id==TEST_TLS_CLIENT_2_CERT)
response = engine.execute(
sa.select(columns)
.where(specific_zone_id_table.c.id == data['id']))
source_data = response.fetchall()
if len(source_data) != 1:
self.fail('incorrect len of result data')
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
self.assertEqual(source_data[0][i], data[i])
incorrect_zone = sa.literal(zones[1][ZONE_ID])
# check that using any acra-server with incorrect zone we fetch masked data
for engine in (self.engine1, self.engine2):
hidden_data = engine.execute(
sa.select([incorrect_zone, specific_zone_id_table])
.where(specific_zone_id_table.c.id == data['id']))
hidden_data = hidden_data.fetchall()
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length', 'shorter_plaintext'):
self.assertEqual(source_data[0][i], data[i])
hidden_data = hidden_data[0]
mask_pattern = 'xxxx'.encode('ascii')
# check that mask at correct place
self.assertEqual(hidden_data['masked_prefix'][:len(mask_pattern)], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_prefix']), len(data['masked_prefix']))
# check that data after mask is not the same as source data
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], data)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_prefix'][len(mask_pattern):], data['masked_prefix'][len(mask_pattern):])
# check that mask at correct place
self.assertEqual(hidden_data['masked_suffix'][-len(mask_pattern):], mask_pattern)
# check that len of masked value not equal to source data because acrastruct always longer than plaintext
self.assertNotEqual(len(hidden_data['masked_suffix']), len(data['masked_suffix']))
# check that data before mask is not the same as source data
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], data)
# check that data after mask is not the same as source data with same offset as mask length
self.assertNotEqual(hidden_data['masked_suffix'][:-len(mask_pattern)], data['masked_suffix'][:-len(mask_pattern)])
self.assertEqual(mask_pattern, hidden_data['masked_without_plaintext'])
# if plaintext length > data, then whole data will be encrypted
self.assertEqual(mask_pattern, hidden_data['exact_plaintext_length'])
self.assertEqual(mask_pattern, hidden_data['shorter_plaintext'])
class BaseAcraBlockMasking:
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_masking_acrablock_config.yaml')
def check_crypto_envelope(self, table, row_id):
temp_acrastruct = create_acrastruct_with_client_id(b'somedata', TLS_CERT_CLIENT_ID_1)
# expect that data was encrypted with client_id from acra-server which used to insert (client_id==TEST_TLS_CLIENT_CERT)
source_data = self.engine_raw.execute(
sa.select([table])
.where(table.c.id == row_id))
source_data = source_data.fetchone()
for i in ('masked_prefix', 'masked_suffix', 'masked_without_plaintext', 'exact_plaintext_length',
'shorter_plaintext'):
# check that data not contains AcraStruct tag begin
self.assertNotIn(temp_acrastruct[:8], source_data[i])
# and check that data contains AcraBlock tag begin
self.assertIn(temp_acrastruct[:4], source_data[i])
class TestMaskingAcraBlockWithoutZone(BaseAcraBlockMasking, TestMaskingWithoutZone):
pass
class TestMaskingAcraBlockWithoutZoneBinaryMySQL(BaseAcraBlockMasking, BaseMaskingBinaryMySQLMixin, TestMaskingWithoutZone):
pass
class TestMaskingAcraBlockWithoutZoneBinaryPostgreSQL(BaseAcraBlockMasking, BaseMaskingBinaryPostgreSQLMixin, TestMaskingWithoutZone):
pass
class TestMaskingAcraBlockWithoutZoneWithDefaults(BaseAcraBlockMasking, TestMaskingWithoutZone):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_masking_acrablock_with_defaults_config.yaml')
class TestMaskingAcraBlockWithZonePerValue(BaseAcraBlockMasking, TestMaskingWithZonePerValue):
pass
class TestMaskingAcraBlockWithZonePerValueBinaryMySQL(BaseAcraBlockMasking, BaseMaskingBinaryMySQLMixin, TestMaskingWithZonePerValue):
pass
class TestMaskingAcraBlockWithZonePerValueBinaryPostgreSQL(BaseAcraBlockMasking, BaseMaskingBinaryPostgreSQLMixin, TestMaskingWithZonePerValue):
pass
class TestMaskingAcraBlockWithZonePerValueWithDefaults(BaseAcraBlockMasking, TestMaskingWithZonePerValue):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_masking_acrablock_with_defaults_config.yaml')
class TestMaskingAcraBlockWithZonePerRow(BaseAcraBlockMasking, TestMaskingWithZonePerRow):
pass
class TestMaskingWithoutZoneConnectorlessWithTLSByDN(TLSAuthenticationByDistinguishedNameMixin, TLSAuthenticationDirectlyToAcraMixin, TestMaskingWithoutZone):
def get_specified_client_id(self):
return extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_2_CERT, extractor=self.get_identifier_extractor_type())
class TestMaskingWithoutZoneConnectorlessWithTLSBySerialNumber(TLSAuthenticationBySerialNumberMixin, TLSAuthenticationDirectlyToAcraMixin, TestMaskingWithoutZone):
def get_specified_client_id(self):
return extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_2_CERT, extractor=self.get_identifier_extractor_type())
class TestMaskingWithZonePerValueConnectorlessWithTLSByDN(TLSAuthenticationByDistinguishedNameMixin, TLSAuthenticationDirectlyToAcraMixin, TestMaskingWithZonePerValue):
def get_specified_client_id(self):
return extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_2_CERT, extractor=self.get_identifier_extractor_type())
class TestMaskingWithZonePerValueConnectorlessWithTLSBySerialNumber(TLSAuthenticationBySerialNumberMixin, TLSAuthenticationDirectlyToAcraMixin, TestMaskingWithZonePerValue):
def get_specified_client_id(self):
return extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_2_CERT, extractor=self.get_identifier_extractor_type())
class TestTransparentEncryptionConnectorlessWithTLSBySerialNumber(TLSAuthenticationBySerialNumberMixin, TestTransparentEncryption, TLSAuthenticationDirectlyToAcraMixin):
pass
class TestTransparentEncryptionConnectorlessWithTLSByDN(TLSAuthenticationByDistinguishedNameMixin, TestTransparentEncryption, TLSAuthenticationDirectlyToAcraMixin):
pass
class TestSearchableTransparentEncryptionConnectorlessWithTLSByDN(TLSAuthenticationByDistinguishedNameMixin, TestSearchableTransparentEncryption, TLSAuthenticationDirectlyToAcraMixin):
pass
class TestSearchableTransparentEncryptionConnectorlessWithTLSBySerialNumber(TLSAuthenticationBySerialNumberMixin, TestSearchableTransparentEncryption, TLSAuthenticationDirectlyToAcraMixin):
pass
class TestSearchableTransparentEncryptionWithZoneConnectorlessWithTLSByDN(TLSAuthenticationByDistinguishedNameMixin, TestTransparentSearchableEncryptionWithZone, TLSAuthenticationDirectlyToAcraMixin):
pass
class TestSearchableTransparentEncryptionWithZoneConnectorlessWithTLSBySerialNumber(TLSAuthenticationBySerialNumberMixin, TestTransparentSearchableEncryptionWithZone, TLSAuthenticationDirectlyToAcraMixin):
pass
class TestTokenizationConnectorlessWithTLSBySerialNumber(TLSAuthenticationBySerialNumberMixin, TLSAuthenticationDirectlyToAcraMixin, TestTokenizationWithoutZone):
def get_specified_client_id(self):
return extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_2_CERT, extractor=self.get_identifier_extractor_type())
class TestTokenizationConnectorlessWithTLSByDN(TLSAuthenticationByDistinguishedNameMixin, TLSAuthenticationDirectlyToAcraMixin, TestTokenizationWithoutZone):
def get_specified_client_id(self):
return extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_2_CERT, extractor=self.get_identifier_extractor_type())
class TestTokenizationConnectorlessWithZoneWithTLSBySerialNumber(TLSAuthenticationBySerialNumberMixin, TLSAuthenticationDirectlyToAcraMixin, TestTokenizationWithZone):
def get_specified_client_id(self):
return extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_2_CERT, extractor=self.get_identifier_extractor_type())
class TestTokenizationConnectorlessWithZoneWithTLSByDN(TLSAuthenticationByDistinguishedNameMixin, TLSAuthenticationDirectlyToAcraMixin, TestTokenizationWithZone):
def get_specified_client_id(self):
return extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_2_CERT, extractor=self.get_identifier_extractor_type())
class TestEmptyPreparedStatementQueryPostgresql(BaseTestCase):
def checkSkip(self):
if not TEST_POSTGRESQL:
self.skipTest("Only for postgresql")
super().checkSkip()
def testPassedEmptyQuery(self):
# no matter which connector to use
executor = AsyncpgExecutor(ConnectionArgs(
host=get_db_host(), port=self.ACRASERVER_PORT, dbname=DB_NAME,
user=DB_USER, password=DB_USER_PASSWORD,
format=AsyncpgExecutor.BinaryFormat,
ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT
))
result = executor.execute(query='')
self.assertIsNotNone(result)
result = executor.execute_prepared_statement(query='')
self.assertIsNotNone(result)
# just check that Postgresql deny empty queries for SimpleQuery protocol of queries
executor = Psycopg2Executor(ConnectionArgs(
host=get_db_host(), port=self.ACRASERVER_PORT, dbname=DB_NAME,
user=DB_USER, password=DB_USER_PASSWORD,
ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT
))
with self.assertRaises(psycopg2.ProgrammingError) as exc:
executor.execute(query='')
self.assertEqual(exc.exception.args[0].lower(), "can't execute an empty query")
with self.assertRaises(psycopg2.errors.SyntaxError) as exc:
executor.execute_prepared_statement(query='')
self.assertIn('syntax error at end of input', exc.exception.args[0].lower())
class TestEmptyPreparedStatementQueryMysql(BaseTestCase):
def checkSkip(self):
if not TEST_MYSQL:
self.skipTest("Only for mysql")
super().checkSkip()
def testNotPassedEmptyQuery(self):
# no matter which client_id to use
executor = MysqlExecutor(ConnectionArgs(
host=get_db_host(), port=self.ACRASERVER_PORT, dbname=DB_NAME,
user=DB_USER, password=DB_USER_PASSWORD,
ssl_ca=TEST_TLS_CA,
ssl_key=TEST_TLS_CLIENT_KEY,
ssl_cert=TEST_TLS_CLIENT_CERT))
with self.assertRaises(mysql.connector.errors.ProgrammingError) as exc:
executor.execute_prepared_statement(query='')
self.assertEqual(exc.exception.errno, 1065)
self.assertEqual(exc.exception.sqlstate, '42000')
self.assertEqual(exc.exception.msg.lower(), 'query was empty')
class TestKeymakerCertificateKeysFailures(unittest.TestCase):
def testFailureOnUsageClientIDAndCertificate(self):
with tempfile.TemporaryDirectory() as folder:
# by default --client_id=client, so we define only --tls_cert
with self.assertRaises(subprocess.CalledProcessError) as exc:
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'),
'--keystore={}'.format(KEYSTORE_VERSION),
'--keys_output_dir={}'.format(folder),
'--keys_public_output_dir={}'.format(folder),
'--tls_cert={}'.format(TEST_TLS_CLIENT_CERT)],
env={'ACRA_MASTER_KEY': get_master_key()},
stderr=subprocess.STDOUT)
self.assertIn("You can either specify identifier for keys".lower(), exc.exception.output.decode('utf8').lower())
self.assertEqual(exc.exception.returncode, 1)
def testFailureEmptyExtractorType(self):
with tempfile.TemporaryDirectory() as folder:
with self.assertRaises(subprocess.CalledProcessError) as exc:
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'),
'--keystore={}'.format(KEYSTORE_VERSION),
'--keys_output_dir={}'.format(folder),
'--keys_public_output_dir={}'.format(folder),
'--client_id=',
'--tls_cert={}'.format(TEST_TLS_CLIENT_CERT),
'--tls_identifier_extractor_type=""'],
env={'ACRA_MASTER_KEY': get_master_key()},
stderr=subprocess.STDOUT)
self.assertIn("invalid identifier extractor type".lower(), exc.exception.output.decode('utf8').lower())
self.assertEqual(exc.exception.returncode, 1)
class BaseKeymakerCertificateKeys:
def testSuccessKeyGeneration(self):
with tempfile.TemporaryDirectory() as folder:
key_id = extract_client_id_from_cert(tls_cert=TEST_TLS_CLIENT_CERT, extractor=self.get_identifier_extractor_type())
# check that key not exists
with self.assertRaises(subprocess.CalledProcessError) as exc:
read_storage_private_key(folder, key_id)
self.assertEqual(exc.exception.returncode, 1)
subprocess.check_output(
[os.path.join(BINARY_OUTPUT_FOLDER, 'acra-keymaker'),
'--keystore={}'.format(KEYSTORE_VERSION),
'--keys_output_dir={}'.format(folder),
'--keys_public_output_dir={}'.format(folder),
'--client_id=',
'--tls_cert={}'.format(TEST_TLS_CLIENT_CERT),
'--tls_identifier_extractor_type={}'.format(self.get_identifier_extractor_type())],
env={'ACRA_MASTER_KEY': get_master_key()},
stderr=subprocess.STDOUT)
# check that key exists
self.assertIsNotNone(read_storage_private_key(folder, key_id))
class TestKeymakerCertificateKeysBySerialNumber(TLSAuthenticationBySerialNumberMixin, BaseKeymakerCertificateKeys,
unittest.TestCase):
pass
class TestKeymakerCertificateKeysByDistinguishedName(TLSAuthenticationByDistinguishedNameMixin,
BaseKeymakerCertificateKeys, unittest.TestCase):
pass
class TestTransparentAcraBlockEncryption(TestTransparentEncryption):
WHOLECELL_MODE = False
encryptor_table = sa.Table('test_transparent_acrablock_encryption', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('specified_client_id',
sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('default_client_id',
sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('number', sa.Integer),
sa.Column('zone_id', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('raw_data', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('nullable', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i64', sa.BigInteger(), nullable=False, default=1),
sa.Column('token_str', sa.Text, nullable=False, default=''),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_prefix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
)
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_acrablock_config.yaml')
def testAcraStructReEncryption(self):
specified_id = TLS_CERT_CLIENT_ID_1
default_id = TLS_CERT_CLIENT_ID_2
test_data = get_pregenerated_random_data().encode('utf-8')
specified_acrastruct = create_acrastruct_with_client_id(test_data, specified_id)
default_acrastruct = create_acrastruct_with_client_id(test_data, default_id)
row_id = get_random_id()
zone = zones[0]
data = {'specified_client_id': specified_acrastruct,
'default_client_id': default_acrastruct,
'zone_id': test_data,
'id': row_id,
'masked_prefix': get_pregenerated_random_data().encode('ascii'),
'token_bytes': get_pregenerated_random_data().encode('ascii'),
'token_str': get_pregenerated_random_data(),
'token_i64': random.randint(0, 2 ** 32),
}
self.insertRow(data)
raw_data = self.engine_raw.execute(
sa.select([self.encryptor_table.c.specified_client_id,
self.encryptor_table.c.default_client_id,
sa.LargeBinary().bind_expression(zone[ZONE_ID].encode('ascii')),
self.encryptor_table.c.zone_id,
self.encryptor_table.c.masked_prefix,
self.encryptor_table.c.token_bytes,
self.encryptor_table.c.token_str,
self.encryptor_table.c.token_i64])
.where(self.encryptor_table.c.id == row_id))
raw_data = raw_data.fetchone()
self.assertNotEqual(raw_data['specified_client_id'], test_data)
self.assertNotEqual(raw_data['default_client_id'], test_data)
self.assertEqual(raw_data['specified_client_id'][:3], CRYPTO_ENVELOPE_HEADER)
self.assertEqual(raw_data['default_client_id'][:3], CRYPTO_ENVELOPE_HEADER)
self.assertNotEqual(raw_data['zone_id'], test_data)
# no matter from which acrastruct take first symbols
self.assertEqual(raw_data['zone_id'][:3], CRYPTO_ENVELOPE_HEADER)
for i in ('masked_prefix', 'token_bytes', 'token_str', 'token_i64'):
self.assertNotEqual(raw_data[i], data[i])
decrypted_data = self.engine2.execute(
sa.select([self.encryptor_table.c.specified_client_id,
self.encryptor_table.c.default_client_id,
sa.LargeBinary().bind_expression(zone[ZONE_ID].encode('ascii')),
self.encryptor_table.c.zone_id,
self.encryptor_table.c.masked_prefix,
self.encryptor_table.c.token_bytes,
self.encryptor_table.c.token_str,
self.encryptor_table.c.token_i64])
.where(self.encryptor_table.c.id == row_id))
decrypted_data = decrypted_data.fetchone()
self.assertNotEqual(decrypted_data['specified_client_id'], specified_acrastruct)
self.assertEqual(decrypted_data['default_client_id'], test_data)
# haven't to be decrypted due to zonemode off
self.assertNotEqual(decrypted_data['zone_id'], test_data)
for i in ('masked_prefix', 'token_bytes', 'token_str', 'token_i64'):
self.assertEqual(decrypted_data[i], data[i])
class TestTransparentAcraBlockEncryptionMissingExtraLog(TestTransparentAcraBlockEncryption):
def fork_acra(self, popen_kwargs: dict=None, **acra_kwargs: dict):
self.log_file = tempfile.NamedTemporaryFile('w+', encoding='utf-8')
acra_kwargs['log_to_file'] = self.log_file.name
acra_kwargs['poison_detect_enable'] = 'true'
return super().fork_acra(popen_kwargs, **acra_kwargs)
def testAcraStructReEncryption(self):
super().testAcraStructReEncryption()
with open(self.log_file.name, 'r') as f:
logs = f.read()
self.assertNotIn('invalid AcraBlock', logs)
self.assertNotIn("Can't decrypt AcraBlock", logs)
def testEncryptedInsert(self):
super().testEncryptedInsert()
with open(self.log_file.name, 'r') as f:
logs = f.read()
self.assertNotIn('invalid AcraBlock', logs)
self.assertNotIn("Can't decrypt AcraBlock", logs)
class TestTransparentAcraBlockEncryptionWithDefaults(TestTransparentAcraBlockEncryption):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_acrablock_config_with_defaults.yaml')
class TestTransparentAcraBlockEncryptionWithZone(TestTransparentAcraBlockEncryption, TestTransparentEncryptionWithZone):
ZONE = True
zone_encryptor_table = sa.Table('test_transparent_acrablock_encryption_with_zone', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('specified_client_id',
sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('default_client_id',
sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('number', sa.Integer),
sa.Column('zone_id', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('raw_data', sa.LargeBinary(length=COLUMN_DATA_SIZE)),
sa.Column('nullable', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i64', sa.BigInteger(), nullable=False, default=1),
sa.Column('token_str', sa.Text, nullable=False, default=''),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('masked_prefix', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
)
def testAcraStructReEncryption(self):
specified_id = TLS_CERT_CLIENT_ID_1
default_id = TLS_CERT_CLIENT_ID_2
test_data = get_pregenerated_random_data().encode('utf-8')
specified_acrastruct = create_acrastruct_with_client_id(test_data, specified_id)
default_acrastruct = create_acrastruct_with_client_id(test_data, default_id)
zone = zones[0]
zone_acrastruct = create_acrastruct(test_data, b64decode(zone[ZONE_PUBLIC_KEY]), zone[ZONE_ID].encode('utf-8'))
row_id = get_random_id()
data = {'specified_client_id': specified_acrastruct,
'default_client_id': default_acrastruct,
'zone_id': zone_acrastruct,
'id': row_id,
'masked_prefix': get_pregenerated_random_data().encode('ascii'),
'token_bytes': get_pregenerated_random_data().encode('ascii'),
'token_str': get_pregenerated_random_data(),
'token_i64': random.randint(0, 2 ** 32),
}
self.engine2.execute(self.zone_encryptor_table.insert(), data)
raw_data = self.engine_raw.execute(
sa.select([self.zone_encryptor_table.c.specified_client_id,
self.zone_encryptor_table.c.default_client_id,
sa.literal(zone[ZONE_ID]),
self.zone_encryptor_table.c.zone_id,
self.zone_encryptor_table.c.masked_prefix,
self.zone_encryptor_table.c.token_bytes,
self.zone_encryptor_table.c.token_str,
self.zone_encryptor_table.c.token_i64])
.where(self.zone_encryptor_table.c.id == row_id))
raw_data = raw_data.fetchone()
# should be equal to acrablock begin tag that is first 4 symbols of acrastructs
self.assertEqual(raw_data['specified_client_id'][:3], CRYPTO_ENVELOPE_HEADER)
self.assertNotEqual(raw_data['specified_client_id'], test_data)
self.assertEqual(raw_data['default_client_id'][:3], CRYPTO_ENVELOPE_HEADER)
self.assertNotEqual(raw_data['default_client_id'], test_data)
self.assertEqual(raw_data['zone_id'][:3], CRYPTO_ENVELOPE_HEADER)
self.assertNotEqual(raw_data['zone_id'], test_data)
for i in ('masked_prefix', 'token_bytes', 'token_str', 'token_i64'):
self.assertNotEqual(raw_data[i], data[i])
decrypted_data = self.engine2.execute(
sa.select([self.zone_encryptor_table.c.specified_client_id,
self.zone_encryptor_table.c.default_client_id,
sa.literal(zone[ZONE_ID]),
self.zone_encryptor_table.c.zone_id,
self.zone_encryptor_table.c.masked_prefix,
self.zone_encryptor_table.c.token_bytes,
self.zone_encryptor_table.c.token_str,
self.zone_encryptor_table.c.token_i64])
.where(self.zone_encryptor_table.c.id == row_id))
decrypted_data = decrypted_data.fetchone()
self.assertEqual(decrypted_data['specified_client_id'][:3], CRYPTO_ENVELOPE_HEADER)
self.assertEqual(decrypted_data['default_client_id'][:3], CRYPTO_ENVELOPE_HEADER)
self.assertEqual(decrypted_data['zone_id'], test_data)
for i in ('masked_prefix', 'token_bytes', 'token_str', 'token_i64'):
self.assertEqual(decrypted_data[i], data[i])
class TestTransparentAcraBlockEncryptionWithZoneWithDefaults(TestTransparentAcraBlockEncryptionWithZone):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_acrablock_config_with_defaults.yaml')
class TestInvalidCryptoEnvelope(unittest.TestCase):
ENCRYPTOR_CONFIG = get_encryptor_config('tests/ee_encryptor_config.yaml')
def test_invalid_defaults(self):
with open(self.ENCRYPTOR_CONFIG, 'r') as f:
config = yaml.safe_load(f)
if 'defaults' not in config:
config['defaults'] = {
'crypto_envelope': 'invalid',
}
with open(get_test_encryptor_config(self.ENCRYPTOR_CONFIG), 'w') as f:
yaml.dump(config, f)
with self.assertRaises(Exception) as e:
BaseTestCase().fork_acra(encryptor_config_file=get_test_encryptor_config(self.ENCRYPTOR_CONFIG))
self.assertEqual(str(e.exception), WAIT_CONNECTION_ERROR_MESSAGE)
def test_invalid_specified_values(self):
with open(self.ENCRYPTOR_CONFIG, 'r') as f:
config = yaml.safe_load(f)
for table in config['schemas']:
for column in table['encrypted']:
column['crypto_envelope'] = 'invalid'
with open(get_test_encryptor_config(self.ENCRYPTOR_CONFIG), 'w') as f:
yaml.dump(config, f)
with self.assertRaises(Exception) as e:
BaseTestCase().fork_acra(encryptor_config_file=get_test_encryptor_config(self.ENCRYPTOR_CONFIG))
self.assertEqual(str(e.exception), WAIT_CONNECTION_ERROR_MESSAGE)
class TestRegressionInvalidOctalEncoding(BaseTokenizationWithBinaryPostgreSQL):
def testOctalIntegerValue(self):
default_client_id_table = sa.Table(
'test_tokenization_default_client_id', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('nullable_column', sa.Text, nullable=True),
sa.Column('empty', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_i32', sa.Integer()),
sa.Column('token_i64', sa.BigInteger()),
sa.Column('token_str', sa.Text),
sa.Column('token_bytes', sa.LargeBinary(length=COLUMN_DATA_SIZE), nullable=False, default=b''),
sa.Column('token_email', sa.Text),
extend_existing=True,
)
metadata.create_all(self.engine_raw, [default_client_id_table])
self.engine1.execute(default_client_id_table.delete())
data = {
'id': 1,
'nullable_column': None,
'empty': b'',
# \111 - octal value that will be decoded to 'I' or 73 byte value. And will be incorrect for uint32 conversion
'token_i32': 1546727729,
'token_i64': random_int64(),
'token_str': random_str(),
'token_bytes': random_bytes(),
'token_email': random_email(),
}
#self.engine_raw.execute(default_client_id_table.insert(), data)
# insert data data
self.insert_via_1(default_client_id_table.insert(), data)
# expect that data was encrypted with client_id which used to insert (client_id==TEST_TLS_CLIENT_CERT)
source_data = self.fetch_from_1(
sa.select([default_client_id_table])
.where(default_client_id_table.c.id == data['id']))
hidden_data = self.fetch_from_2(
sa.select([default_client_id_table])
.where(default_client_id_table.c.id == data['id']))
if len(source_data) != len(hidden_data) != 1:
self.fail('incorrect len of result data')
# data owner take source data
for k in ('token_i32', 'token_i64', 'token_str', 'token_bytes', 'token_email'):
if isinstance(source_data[0][k], bytearray) and isinstance(data[k], str):
self.assertEqual(source_data[0][k], bytearray(data[k], encoding='utf-8'))
else:
self.assertEqual(source_data[0][k], data[k])
self.assertNotEqual(hidden_data[0][k], data[k])
if __name__ == '__main__':
import xmlrunner
output_path = os.environ.get('TEST_XMLOUTPUT', '')
if output_path:
with open(output_path, 'wb') as output:
unittest.main(testRunner=xmlrunner.XMLTestRunner(output=output))
else:
unittest.main()
| cossacklabs/acra | tests/test.py | Python | apache-2.0 | 375,193 | 0.003038 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of GNUWiNetwork,
# Copyright (C) 2014 by
# Pablo Belzarena, Gabriel Gomez Sena, Victor Gonzalez Barbone,
# Facultad de Ingenieria, Universidad de la Republica, Uruguay.
#
# GNUWiNetwork is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GNUWiNetwork is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNUWiNetwork. If not, see <http://www.gnu.org/licenses/>.
#
'''PSK modulation transmit / receive block.
'''
import sys
sys.path +=['..']
from gnuradio import digital
import gwnevents.api_events as api_events
import gwnTxRxL1_channel as TxRxLayer1
import gwnblocks.gwnblock as gwn
import math
class ChannelQPSK(gwn.GWNBlock):
'''PSK modulation block.
'''
def __init__(self,noise_voltage=0.01,frequency_offset=0.01,epsilon=1.001,taps=(1+0.5j, ),):
'''Constructor.
'''
super(ChannelQPSK,self).__init__(1, 'GNURadioChannelQPSK', 2, 2,1)
#super(TunTapInterface, self).__init__(1,'TunTapInterface', 2, 2)
self.set_timer(0, False, 3, 10)
self.rx_conn = gwn.gwninport.AQueueConnector()
self.tx_conn = gwn.gwninport.AQueueConnector()
self.rx_queue = self.rx_conn.lsevents
self.tx_queue = self.tx_conn.lsevents
self.set_connection_in(self.rx_conn, 1)
self.set_connection_out(self.tx_conn, 1)
self.tb = TxRxLayer1.gwn_sim_top_block(self.rx_queue,self.tx_queue,noise_voltage,frequency_offset,epsilon,taps,)
self.tb.start() # start flow graph
return
def process_data(self, port_type, port_nr, ev):
'''Process data function for PSK block.
'''
# print " ------------------------------------"
# print ev
# print port_type,port_nr
# print "-------------------------------------"
# PRUEBA: EL TIMER ESTA PUESTO SOLO PARA PROBAR EL SENSADO
if port_type == "intimer":
self.sense_carrier()
if port_type == 'inport' and port_nr == 0:
frame = ev.frmpkt
self.write_out(1, frame) # 1, to GNU radio
elif port_type == 'inport' and port_nr == 1:
frame = ev # ev is a frame received
if not frame:
print 'PSK: an empty frame from L1'
else:
event = api_events.mkevent("DataData")
event.frmpkt = frame
self.write_out(0, event)
return
def set_rx_freq(self, value):
'''Set receive frequency.
'''
self.tb_rx.set_freq(value)
def set_tx_freq(self, value):
'''Set transmit frequency.
'''
self.tb.set_freq(value)
def sense_carrier(self):
'''Sense carrier function.
'''
print " channel dbs sensed : "
aux = self.tb.hier_rx_0.analog_probe_avg_mag_sqrd_x_0.level()
if aux >0:
print 10*math.log10(aux)
def stop(self):
'''PSK block stop function.
This stop function is required to stop GNU Radio threads. Overwrites generic block stop function; first stops locally started threads, waits on them, and finally invokes the generic stop function in PSK super class (generic block).
'''
self.tb_tx.stop()
self.tb_tx.wait()
print("tx top block stopped")
self.tb_rx.stop() # wait for it to finish
self.tb_rx.wait() # wait for it to finish
print("rx top block stopped")
super(ChannelQPSK, self).stop()
class dotdict(dict):
'''dot.notation access to dictionary attributes.
'''
def __getattr__(self, attr):
return self.get(attr)
__setattr__= dict.__setitem__
__delattr__= dict.__delitem__
def main():
g = PSK()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| vagonbar/GNUnetwork | gwn/blocks/libio/gnuradio/new/gwnChannelqpsk.py | Python | gpl-3.0 | 4,398 | 0.007731 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import routes
from keystone.common import wsgi
from keystone.controllers.token import TokenController
from keystone.controllers.roles import RolesController
from keystone.controllers.staticfiles import StaticFilesController
from keystone.controllers.tenant import TenantController
from keystone.controllers.user import UserController
from keystone.controllers.version import VersionController
from keystone.controllers.extensions import ExtensionsController
import keystone.contrib.extensions.admin as extension
logger = logging.getLogger(__name__) # pylint: disable=C0103
class AdminApi(wsgi.Router):
"""WSGI entry point for admin Keystone API requests."""
def __init__(self):
mapper = routes.Mapper()
# Load extensions first so they can override core if they need to
extension.get_extension_configurer().configure(mapper)
# Token Operations
auth_controller = TokenController()
mapper.connect("/tokens", controller=auth_controller,
action="authenticate",
conditions=dict(method=["POST"]))
mapper.connect("/tokens/{token_id}", controller=auth_controller,
action="validate_token",
conditions=dict(method=["GET"]))
mapper.connect("/tokens/{token_id}", controller=auth_controller,
action="check_token",
conditions=dict(method=["HEAD"]))
# Do we need this. API doesn't have delete token.
mapper.connect("/tokens/{token_id}", controller=auth_controller,
action="delete_token",
conditions=dict(method=["DELETE"]))
mapper.connect("/tokens/{token_id}/endpoints",
controller=auth_controller,
action="endpoints",
conditions=dict(method=["GET"]))
# Tenant Operations
tenant_controller = TenantController()
mapper.connect("/tenants", controller=tenant_controller,
action="get_tenants", conditions=dict(method=["GET"]))
mapper.connect("/tenants/{tenant_id}",
controller=tenant_controller,
action="get_tenant", conditions=dict(method=["GET"]))
roles_controller = RolesController()
mapper.connect("/tenants/{tenant_id}/users/{user_id}/roles",
controller=roles_controller, action="get_user_roles",
conditions=dict(method=["GET"]))
# User Operations
user_controller = UserController()
mapper.connect("/users/{user_id}",
controller=user_controller,
action="get_user",
conditions=dict(method=["GET"]))
mapper.connect("/users/{user_id}/roles",
controller=roles_controller, action="get_user_roles",
conditions=dict(method=["GET"]))
# Miscellaneous Operations
version_controller = VersionController()
mapper.connect("/", controller=version_controller,
action="get_version_info", file="admin/version",
conditions=dict(method=["GET"]))
extensions_controller = ExtensionsController()
mapper.connect("/extensions",
controller=extensions_controller,
action="get_extensions_info",
conditions=dict(method=["GET"]))
# Static Files Controller
static_files_controller = StaticFilesController()
mapper.connect("/identityadminguide.pdf",
controller=static_files_controller,
action="get_pdf_contract",
root="content/admin/", pdf="identityadminguide.pdf",
conditions=dict(method=["GET"]))
mapper.connect("/identity-admin.wadl",
controller=static_files_controller,
action="get_wadl_contract",
root="content/admin/", wadl="identity-admin.wadl",
conditions=dict(method=["GET"]))
mapper.connect("/common.ent",
controller=static_files_controller,
action="get_wadl_contract",
root="content/common/", wadl="common.ent",
conditions=dict(method=["GET"]))
mapper.connect("/xsd/{xsd}",
controller=static_files_controller,
action="get_xsd_contract",
root="content/common/",
conditions=dict(method=["GET"]))
mapper.connect("/xsd/atom/{xsd}",
controller=static_files_controller,
action="get_xsd_atom_contract",
root="content/common/",
conditions=dict(method=["GET"]))
mapper.connect("/xslt/{file:.*}",
controller=static_files_controller,
action="get_static_file",
root="content/common/", path="xslt/",
mimetype="application/xml",
conditions=dict(method=["GET"]))
mapper.connect("/js/{file:.*}",
controller=static_files_controller,
action="get_static_file",
root="content/common/", path="js/",
mimetype="application/javascript",
conditions=dict(method=["GET"]))
mapper.connect("/style/{file:.*}",
controller=static_files_controller,
action="get_static_file",
root="content/common/", path="style/",
mimetype="application/css",
conditions=dict(method=["GET"]))
mapper.connect("/samples/{file:.*}",
controller=static_files_controller,
action="get_static_file",
root="content/common/", path="samples/",
conditions=dict(method=["GET"]))
super(AdminApi, self).__init__(mapper)
| admiyo/keystone | keystone/routers/admin.py | Python | apache-2.0 | 6,755 | 0.009326 |
# Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
from os.path import dirname
from adapt.intent import IntentBuilder
from mycroft.messagebus.message import Message
from mycroft.skills.core import MycroftSkill
__author__ = 'seanfitz'
class NapTimeSkill(MycroftSkill):
def __init__(self):
super(NapTimeSkill, self).__init__(name="NapTimeSkill")
def initialize(self):
self.load_data_files(dirname(__file__))
naptime_intent = IntentBuilder("NapTimeIntent").require(
"SleepCommand").build()
self.register_intent(naptime_intent, self.handle_intent)
def handle_intent(self, message):
self.emitter.emit(Message('recognizer_loop:sleep'))
self.speak_dialog("sleep")
def stop(self):
pass
def create_skill():
return NapTimeSkill()
| ethanaward/mycroft-core | mycroft/skills/naptime/__init__.py | Python | gpl-3.0 | 1,477 | 0 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ssd_mobilenet_v1_fpn_feature_extractor.
By using parameterized test decorator, this test serves for both Slim-based and
Keras-based Mobilenet V1 FPN feature extractors in SSD.
"""
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from tensorflow.contrib import slim as contrib_slim
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_mobilenet_v1_fpn_feature_extractor
from object_detection.models import ssd_mobilenet_v1_fpn_keras_feature_extractor
slim = contrib_slim
@parameterized.parameters(
{'use_keras': False},
{'use_keras': True},
)
class SsdMobilenetV1FpnFeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
is_training=True, use_explicit_padding=False,
use_keras=False):
"""Constructs a new feature extractor.
Args:
depth_multiplier: float depth multiplier for feature extractor
pad_to_multiple: the nearest multiple to zero pad the input height and
width dimensions to.
is_training: whether the network is in training mode.
use_explicit_padding: Use 'VALID' padding for convolutions, but prepad
inputs so that the output dimensions are the same as if 'SAME' padding
were used.
use_keras: if True builds a keras-based feature extractor, if False builds
a slim-based one.
Returns:
an ssd_meta_arch.SSDFeatureExtractor object.
"""
min_depth = 32
if use_keras:
return (ssd_mobilenet_v1_fpn_keras_feature_extractor.
SSDMobileNetV1FpnKerasFeatureExtractor(
is_training=is_training,
depth_multiplier=depth_multiplier,
min_depth=min_depth,
pad_to_multiple=pad_to_multiple,
conv_hyperparams=self._build_conv_hyperparams(
add_batch_norm=False),
freeze_batchnorm=False,
inplace_batchnorm_update=False,
use_explicit_padding=use_explicit_padding,
use_depthwise=True,
name='MobilenetV1_FPN'))
else:
return (ssd_mobilenet_v1_fpn_feature_extractor.
SSDMobileNetV1FpnFeatureExtractor(
is_training,
depth_multiplier,
min_depth,
pad_to_multiple,
self.conv_hyperparams_fn,
use_depthwise=True,
use_explicit_padding=use_explicit_padding))
def test_extract_features_returns_correct_shapes_256(self, use_keras):
image_height = 256
image_width = 256
depth_multiplier = 1.0
pad_to_multiple = 1
expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256),
(2, 8, 8, 256), (2, 4, 4, 256),
(2, 2, 2, 256)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
use_keras=use_keras)
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
use_keras=use_keras)
def test_extract_features_returns_correct_shapes_384(self, use_keras):
image_height = 320
image_width = 320
depth_multiplier = 1.0
pad_to_multiple = 1
expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256),
(2, 10, 10, 256), (2, 5, 5, 256),
(2, 3, 3, 256)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
use_keras=use_keras)
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
use_keras=use_keras)
def test_extract_features_with_dynamic_image_shape(self, use_keras):
image_height = 256
image_width = 256
depth_multiplier = 1.0
pad_to_multiple = 1
expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256),
(2, 8, 8, 256), (2, 4, 4, 256),
(2, 2, 2, 256)]
self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
use_keras=use_keras)
self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
use_keras=use_keras)
def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
self, use_keras):
image_height = 299
image_width = 299
depth_multiplier = 1.0
pad_to_multiple = 32
expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256),
(2, 10, 10, 256), (2, 5, 5, 256),
(2, 3, 3, 256)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
use_keras=use_keras)
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
use_keras=use_keras)
def test_extract_features_returns_correct_shapes_enforcing_min_depth(
self, use_keras):
image_height = 256
image_width = 256
depth_multiplier = 0.5**12
pad_to_multiple = 1
expected_feature_map_shape = [(2, 32, 32, 32), (2, 16, 16, 32),
(2, 8, 8, 32), (2, 4, 4, 32),
(2, 2, 2, 32)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
use_keras=use_keras)
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
use_keras=use_keras)
def test_extract_features_raises_error_with_invalid_image_size(
self, use_keras):
image_height = 32
image_width = 32
depth_multiplier = 1.0
pad_to_multiple = 1
self.check_extract_features_raises_error_with_invalid_image_size(
image_height, image_width, depth_multiplier, pad_to_multiple,
use_keras=use_keras)
def test_preprocess_returns_correct_value_range(self, use_keras):
image_height = 256
image_width = 256
depth_multiplier = 1
pad_to_multiple = 1
test_image = np.random.rand(2, image_height, image_width, 3)
feature_extractor = self._create_feature_extractor(depth_multiplier,
pad_to_multiple,
use_keras=use_keras)
preprocessed_image = feature_extractor.preprocess(test_image)
self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
def test_variables_only_created_in_scope(self, use_keras):
depth_multiplier = 1
pad_to_multiple = 1
scope_name = 'MobilenetV1'
self.check_feature_extractor_variables_under_scope(
depth_multiplier, pad_to_multiple, scope_name, use_keras=use_keras)
def test_variable_count(self, use_keras):
depth_multiplier = 1
pad_to_multiple = 1
variables = self.get_feature_extractor_variables(
depth_multiplier, pad_to_multiple, use_keras=use_keras)
self.assertEqual(len(variables), 153)
def test_fused_batchnorm(self, use_keras):
image_height = 256
image_width = 256
depth_multiplier = 1
pad_to_multiple = 1
image_placeholder = tf.placeholder(tf.float32,
[1, image_height, image_width, 3])
feature_extractor = self._create_feature_extractor(depth_multiplier,
pad_to_multiple,
use_keras=use_keras)
preprocessed_image = feature_extractor.preprocess(image_placeholder)
if use_keras:
_ = feature_extractor(preprocessed_image)
else:
_ = feature_extractor.extract_features(preprocessed_image)
self.assertTrue(
any('FusedBatchNorm' in op.type
for op in tf.get_default_graph().get_operations()))
if __name__ == '__main__':
tf.test.main()
| alexgorban/models | research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_test.py | Python | apache-2.0 | 9,782 | 0.002249 |
from constraint_solver import pywrapcp
def main():
solver = pywrapcp.Solver("time limit test")
n = 10
x = [solver.IntVar(1, n, "x[%i]" % i) for i in range(n)]
solver.Add(solver.AllDifferent(x, True))
solution = solver.Assignment()
solution.Add(x)
db = solver.Phase(x,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MIN_VALUE)
time_limit = 10000
branch_limit = 100000000
failures_limit = 100000000
solutions_limit = 10000000
limits = (
solver.Limit(
time_limit, branch_limit, failures_limit, solutions_limit, True))
search_log = solver.SearchLog(1000)
solver.NewSearch(db, [limits, search_log])
num_solutions = 0
while solver.NextSolution():
print "x:", [x[i].Value() for i in range(n)]
num_solutions += 1
solver.EndSearch()
print
print "num_solutions:", num_solutions
print "failures:", solver.failures()
print "branches:", solver.branches()
print "wall_time:", solver.wall_time()
if __name__ == "__main__":
main()
| capturePointer/or-tools | examples/tests/issue4.py | Python | apache-2.0 | 1,031 | 0.022308 |
#!/usr/bin/python
# Copyright (c) 2015 IBM
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_flavor_facts
short_description: Retrieve facts about one or more flavors
author: "David Shrewsbury (@Shrews)"
version_added: "2.1"
description:
- Retrieve facts about available OpenStack instance flavors. By default,
facts about ALL flavors are retrieved. Filters can be applied to get
facts for only matching flavors. For example, you can filter on the
amount of RAM available to the flavor, or the number of virtual CPUs
available to the flavor, or both. When specifying multiple filters,
*ALL* filters must match on a flavor before that flavor is returned as
a fact.
notes:
- This module creates a new top-level C(openstack_flavors) fact, which
contains a list of unsorted flavors.
requirements:
- "python >= 2.6"
- "openstacksdk"
options:
name:
description:
- A flavor name. Cannot be used with I(ram) or I(vcpus) or I(ephemeral).
ram:
description:
- "A string used for filtering flavors based on the amount of RAM
(in MB) desired. This string accepts the following special values:
'MIN' (return flavors with the minimum amount of RAM), and 'MAX'
(return flavors with the maximum amount of RAM)."
- "A specific amount of RAM may also be specified. Any flavors with this
exact amount of RAM will be returned."
- "A range of acceptable RAM may be given using a special syntax. Simply
prefix the amount of RAM with one of these acceptable range values:
'<', '>', '<=', '>='. These values represent less than, greater than,
less than or equal to, and greater than or equal to, respectively."
type: bool
default: 'no'
vcpus:
description:
- A string used for filtering flavors based on the number of virtual
CPUs desired. Format is the same as the I(ram) parameter.
type: bool
default: 'no'
limit:
description:
- Limits the number of flavors returned. All matching flavors are
returned by default.
ephemeral:
description:
- A string used for filtering flavors based on the amount of ephemeral
storage. Format is the same as the I(ram) parameter
type: bool
default: 'no'
version_added: "2.3"
availability_zone:
description:
- Ignored. Present for backwards compatibility
extends_documentation_fragment: openstack
'''
EXAMPLES = '''
# Gather facts about all available flavors
- os_flavor_facts:
cloud: mycloud
# Gather facts for the flavor named "xlarge-flavor"
- os_flavor_facts:
cloud: mycloud
name: "xlarge-flavor"
# Get all flavors that have exactly 512 MB of RAM.
- os_flavor_facts:
cloud: mycloud
ram: "512"
# Get all flavors that have 1024 MB or more of RAM.
- os_flavor_facts:
cloud: mycloud
ram: ">=1024"
# Get a single flavor that has the minimum amount of RAM. Using the 'limit'
# option will guarantee only a single flavor is returned.
- os_flavor_facts:
cloud: mycloud
ram: "MIN"
limit: 1
# Get all flavors with 1024 MB of RAM or more, AND exactly 2 virtual CPUs.
- os_flavor_facts:
cloud: mycloud
ram: ">=1024"
vcpus: "2"
# Get all flavors with 1024 MB of RAM or more, exactly 2 virtual CPUs, and
# less than 30gb of ephemeral storage.
- os_flavor_facts:
cloud: mycloud
ram: ">=1024"
vcpus: "2"
ephemeral: "<30"
'''
RETURN = '''
openstack_flavors:
description: Dictionary describing the flavors.
returned: On success.
type: complex
contains:
id:
description: Flavor ID.
returned: success
type: string
sample: "515256b8-7027-4d73-aa54-4e30a4a4a339"
name:
description: Flavor name.
returned: success
type: string
sample: "tiny"
disk:
description: Size of local disk, in GB.
returned: success
type: int
sample: 10
ephemeral:
description: Ephemeral space size, in GB.
returned: success
type: int
sample: 10
ram:
description: Amount of memory, in MB.
returned: success
type: int
sample: 1024
swap:
description: Swap space size, in MB.
returned: success
type: int
sample: 100
vcpus:
description: Number of virtual CPUs.
returned: success
type: int
sample: 2
is_public:
description: Make flavor accessible to the public.
returned: success
type: bool
sample: true
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.openstack import openstack_full_argument_spec, openstack_module_kwargs, openstack_cloud_from_module
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=False, default=None),
ram=dict(required=False, default=None),
vcpus=dict(required=False, default=None),
limit=dict(required=False, default=None, type='int'),
ephemeral=dict(required=False, default=None),
)
module_kwargs = openstack_module_kwargs(
mutually_exclusive=[
['name', 'ram'],
['name', 'vcpus'],
['name', 'ephemeral']
]
)
module = AnsibleModule(argument_spec, **module_kwargs)
name = module.params['name']
vcpus = module.params['vcpus']
ram = module.params['ram']
ephemeral = module.params['ephemeral']
limit = module.params['limit']
filters = {}
if vcpus:
filters['vcpus'] = vcpus
if ram:
filters['ram'] = ram
if ephemeral:
filters['ephemeral'] = ephemeral
sdk, cloud = openstack_cloud_from_module(module)
try:
if name:
flavors = cloud.search_flavors(filters={'name': name})
else:
flavors = cloud.list_flavors()
if filters:
flavors = cloud.range_search(flavors, filters)
if limit is not None:
flavors = flavors[:limit]
module.exit_json(changed=False,
ansible_facts=dict(openstack_flavors=flavors))
except sdk.exceptions.OpenStackCloudException as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| mheap/ansible | lib/ansible/modules/cloud/openstack/os_flavor_facts.py | Python | gpl-3.0 | 6,809 | 0.000587 |
# Copyright 2014 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This package contains the unit tests for OVS Cinder Plugin for OpenStack
Tested on Plugin version 1.0.2a
"""
| mflu/openvstorage_centos | openstack/tests/__init__.py | Python | apache-2.0 | 694 | 0 |
import os
import gettext as gettext_module
from django import http
from django.conf import settings
from django.utils import importlib
from django.utils.translation import check_for_language, activate, to_locale, get_language
from django.utils.text import javascript_quote
from django.utils.encoding import smart_unicode
from django.utils.formats import get_format_modules, get_format
from django.utils import six
def set_language(request):
"""
Redirect to a given url while setting the chosen language in the
session or cookie. The url and the language code need to be
specified in the request parameters.
Since this view changes how the user will see the rest of the site, it must
only be accessed as a POST request. If called as a GET request, it will
redirect to the page in the request (the 'next' parameter) without changing
any state.
"""
next = request.REQUEST.get('next', None)
if not next:
next = request.META.get('HTTP_REFERER', None)
if not next:
next = '/'
response = http.HttpResponseRedirect(next)
if request.method == 'POST':
lang_code = request.POST.get('language', None)
if lang_code and check_for_language(lang_code):
if hasattr(request, 'session'):
request.session['django_language'] = lang_code
else:
response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang_code)
return response
def get_formats():
"""
Returns all formats strings required for i18n to work
"""
FORMAT_SETTINGS = (
'DATE_FORMAT', 'DATETIME_FORMAT', 'TIME_FORMAT',
'YEAR_MONTH_FORMAT', 'MONTH_DAY_FORMAT', 'SHORT_DATE_FORMAT',
'SHORT_DATETIME_FORMAT', 'FIRST_DAY_OF_WEEK', 'DECIMAL_SEPARATOR',
'THOUSAND_SEPARATOR', 'NUMBER_GROUPING',
'DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS'
)
result = {}
for module in [settings] + get_format_modules(reverse=True):
for attr in FORMAT_SETTINGS:
result[attr] = get_format(attr)
src = []
for k, v in result.items():
if six.PY3:
k = k.encode('ascii')
if isinstance(v, six.string_types + (int,)):
src.append("formats['%s'] = '%s';\n" % (javascript_quote(k), javascript_quote(smart_unicode(v))))
elif isinstance(v, (tuple, list)):
v = [javascript_quote(smart_unicode(value)) for value in v]
src.append("formats['%s'] = ['%s'];\n" % (javascript_quote(k), "', '".join(v)))
return ''.join(src)
NullSource = """
/* gettext identity library */
function gettext(msgid) { return msgid; }
function ngettext(singular, plural, count) { return (count == 1) ? singular : plural; }
function gettext_noop(msgid) { return msgid; }
function pgettext(context, msgid) { return msgid; }
function npgettext(context, singular, plural, count) { return (count == 1) ? singular : plural; }
"""
LibHead = """
/* gettext library */
var catalog = new Array();
"""
LibFoot = """
function gettext(msgid) {
var value = catalog[msgid];
if (typeof(value) == 'undefined') {
return msgid;
} else {
return (typeof(value) == 'string') ? value : value[0];
}
}
function ngettext(singular, plural, count) {
value = catalog[singular];
if (typeof(value) == 'undefined') {
return (count == 1) ? singular : plural;
} else {
return value[pluralidx(count)];
}
}
function gettext_noop(msgid) { return msgid; }
function pgettext(context, msgid) {
var value = gettext(context + '\x04' + msgid);
if (value.indexOf('\x04') != -1) {
value = msgid;
}
return value;
}
function npgettext(context, singular, plural, count) {
var value = ngettext(context + '\x04' + singular, context + '\x04' + plural, count);
if (value.indexOf('\x04') != -1) {
value = ngettext(singular, plural, count);
}
return value;
}
"""
LibFormatHead = """
/* formatting library */
var formats = new Array();
"""
LibFormatFoot = """
function get_format(format_type) {
var value = formats[format_type];
if (typeof(value) == 'undefined') {
return format_type;
} else {
return value;
}
}
"""
SimplePlural = """
function pluralidx(count) { return (count == 1) ? 0 : 1; }
"""
InterPolate = r"""
function interpolate(fmt, obj, named) {
if (named) {
return fmt.replace(/%\(\w+\)s/g, function(match){return String(obj[match.slice(2,-2)])});
} else {
return fmt.replace(/%s/g, function(match){return String(obj.shift())});
}
}
"""
PluralIdx = r"""
function pluralidx(n) {
var v=%s;
if (typeof(v) == 'boolean') {
return v ? 1 : 0;
} else {
return v;
}
}
"""
def null_javascript_catalog(request, domain=None, packages=None):
"""
Returns "identity" versions of the JavaScript i18n functions -- i.e.,
versions that don't actually do anything.
"""
src = [NullSource, InterPolate, LibFormatHead, get_formats(), LibFormatFoot]
return http.HttpResponse(''.join(src), 'text/javascript')
def javascript_catalog(request, domain='djangojs', packages=None):
"""
Returns the selected language catalog as a javascript library.
Receives the list of packages to check for translations in the
packages parameter either from an infodict or as a +-delimited
string from the request. Default is 'django.conf'.
Additionally you can override the gettext domain for this view,
but usually you don't want to do that, as JavaScript messages
go to the djangojs domain. But this might be needed if you
deliver your JavaScript source from Django templates.
"""
if request.GET:
if 'language' in request.GET:
if check_for_language(request.GET['language']):
activate(request.GET['language'])
if packages is None:
packages = ['django.conf']
if isinstance(packages, six.string_types):
packages = packages.split('+')
packages = [p for p in packages if p == 'django.conf' or p in settings.INSTALLED_APPS]
default_locale = to_locale(settings.LANGUAGE_CODE)
locale = to_locale(get_language())
t = {}
paths = []
en_selected = locale.startswith('en')
en_catalog_missing = True
# paths of requested packages
for package in packages:
p = importlib.import_module(package)
path = os.path.join(os.path.dirname(p.__file__), 'locale')
paths.append(path)
# add the filesystem paths listed in the LOCALE_PATHS setting
paths.extend(list(reversed(settings.LOCALE_PATHS)))
# first load all english languages files for defaults
for path in paths:
try:
catalog = gettext_module.translation(domain, path, ['en'])
t.update(catalog._catalog)
except IOError:
pass
else:
# 'en' is the selected language and at least one of the packages
# listed in `packages` has an 'en' catalog
if en_selected:
en_catalog_missing = False
# next load the settings.LANGUAGE_CODE translations if it isn't english
if default_locale != 'en':
for path in paths:
try:
catalog = gettext_module.translation(domain, path, [default_locale])
except IOError:
catalog = None
if catalog is not None:
t.update(catalog._catalog)
# last load the currently selected language, if it isn't identical to the default.
if locale != default_locale:
# If the currently selected language is English but it doesn't have a
# translation catalog (presumably due to being the language translated
# from) then a wrong language catalog might have been loaded in the
# previous step. It needs to be discarded.
if en_selected and en_catalog_missing:
t = {}
else:
locale_t = {}
for path in paths:
try:
catalog = gettext_module.translation(domain, path, [locale])
except IOError:
catalog = None
if catalog is not None:
locale_t.update(catalog._catalog)
if locale_t:
t = locale_t
src = [LibHead]
plural = None
if '' in t:
for l in t[''].split('\n'):
if l.startswith('Plural-Forms:'):
plural = l.split(':',1)[1].strip()
if plural is not None:
# this should actually be a compiled function of a typical plural-form:
# Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;
plural = [el.strip() for el in plural.split(';') if el.strip().startswith('plural=')][0].split('=',1)[1]
src.append(PluralIdx % plural)
else:
src.append(SimplePlural)
csrc = []
pdict = {}
for k, v in t.items():
if k == '':
continue
if isinstance(k, six.string_types):
csrc.append("catalog['%s'] = '%s';\n" % (javascript_quote(k), javascript_quote(v)))
elif isinstance(k, tuple):
if k[0] not in pdict:
pdict[k[0]] = k[1]
else:
pdict[k[0]] = max(k[1], pdict[k[0]])
csrc.append("catalog['%s'][%d] = '%s';\n" % (javascript_quote(k[0]), k[1], javascript_quote(v)))
else:
raise TypeError(k)
csrc.sort()
for k, v in pdict.items():
src.append("catalog['%s'] = [%s];\n" % (javascript_quote(k), ','.join(["''"]*(v+1))))
src.extend(csrc)
src.append(LibFoot)
src.append(InterPolate)
src.append(LibFormatHead)
src.append(get_formats())
src.append(LibFormatFoot)
src = ''.join(src)
return http.HttpResponse(src, 'text/javascript')
| vsajip/django | django/views/i18n.py | Python | bsd-3-clause | 9,782 | 0.002556 |
__author__ = 'leif'
from django.contrib import admin
from models import *
admin.site.register(GameExperiment)
admin.site.register(UserProfile)
admin.site.register(MaxHighScore)
| leifos/boxes | treasure-houses/asg/admin.py | Python | mit | 178 | 0 |
"""Test."""
import pytest
TM_TABLE = [
([0, 1, 1, 0, 1], True),
([0], True),
([1], False),
([0, 1, 0, 0], False),
]
@pytest.mark.parametrize("n, result", TM_TABLE)
def test_is_thue_morse(n, result):
"""Test."""
from is_thue_morse import is_thue_morse
assert is_thue_morse(n) == result
| rrustia/code-katas | src/test_is_thue_morse.py | Python | mit | 318 | 0 |
import getpass
from distutils import log
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def run(self):
try:
orig.upload.run(self)
finally:
self.announce(
"WARNING: Uploading via this command is deprecated, use twine "
"to upload instead (https://pypi.org/p/twine/)",
log.WARN
)
def finalize_options(self):
orig.upload.finalize_options(self)
self.username = (
self.username or
getpass.getuser()
)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
return keyring.get_password(self.repository, self.username)
except Exception:
pass
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
try:
return getpass.getpass()
except (Exception, KeyboardInterrupt):
pass
| astaninger/speakout | venv/lib/python3.6/site-packages/setuptools/command/upload.py | Python | mit | 1,493 | 0 |
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fake drivers used in testing.
"""
from ironic.common import exception
from ironic.drivers import base
from ironic.drivers.modules import fake
from ironic.drivers.modules import ipminative
from ironic.drivers.modules import ipmitool
from ironic.drivers.modules import pxe
from ironic.drivers.modules import seamicro
from ironic.drivers.modules import ssh
from ironic.drivers import utils
from ironic.openstack.common import importutils
class FakeDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = fake.FakePower()
self.deploy = fake.FakeDeploy()
self.a = fake.FakeVendorA()
self.b = fake.FakeVendorB()
self.mapping = {'first_method': self.a,
'second_method': self.b}
self.vendor = utils.MixinVendorInterface(self.mapping)
self.console = fake.FakeConsole()
class FakeIPMIToolDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = ipmitool.IPMIPower()
self.deploy = fake.FakeDeploy()
self.vendor = ipmitool.VendorPassthru()
class FakePXEDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = fake.FakePower()
self.deploy = pxe.PXEDeploy()
self.vendor = pxe.VendorPassthru()
class FakeSSHDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = ssh.SSHPower()
self.deploy = fake.FakeDeploy()
class FakeIPMINativeDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = ipminative.NativeIPMIPower()
self.deploy = fake.FakeDeploy()
self.vendor = ipminative.VendorPassthru()
class FakeSeaMicroDriver(base.BaseDriver):
"""Fake SeaMicro driver."""
def __init__(self):
if not importutils.try_import('seamicroclient'):
raise exception.DriverNotFound('FakeSeaMicroDriver')
self.power = seamicro.Power()
self.deploy = fake.FakeDeploy()
self.vendor = seamicro.VendorPassthru()
| varunarya10/ironic | ironic/drivers/fake.py | Python | apache-2.0 | 2,806 | 0 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Weapon()
result.template = "object/weapon/ranged/vehicle/shared_vehicle_atst_ranged.iff"
result.attribute_template_id = 10
result.stfName("obj_n","unknown_weapon")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/weapon/ranged/vehicle/shared_vehicle_atst_ranged.py | Python | mit | 454 | 0.048458 |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides the data access object (DAO) for Organizations."""
import MySQLdb
from google.cloud.security.common.data_access import dao
from google.cloud.security.common.data_access import errors as db_errors
from google.cloud.security.common.data_access import violation_map as vm
from google.cloud.security.common.util import log_util
LOGGER = log_util.get_logger(__name__)
class ViolationDao(dao.Dao):
"""Data access object (DAO) for rule violations."""
def insert_violations(self, violations, resource_name,
snapshot_timestamp=None):
"""Import violations into database.
Args:
violations: An iterator of RuleViolations.
resource_name: String that defines a resource
snapshot_timestamp: The snapshot timestamp to associate these
violations with.
Return:
A tuple of (int, list) containing the count of inserted rows and
a list of violations that encountered an error during insert.
Raise:
MySQLError if snapshot table could not be created.
"""
try:
# Make sure to have a reasonable timestamp to use.
if not snapshot_timestamp:
snapshot_timestamp = self.get_latest_snapshot_timestamp(
('PARTIAL_SUCCESS', 'SUCCESS'))
# Create the violations snapshot table.
snapshot_table = self._create_snapshot_table(
resource_name, snapshot_timestamp)
except MySQLdb.Error, e:
raise db_errors.MySQLError(resource_name, e)
inserted_rows = 0
violation_errors = []
for violation in violations:
for formatted_violation in _format_violation(violation,
resource_name):
try:
self.execute_sql_with_commit(
resource_name,
vm.VIOLATION_INSERT_MAP[resource_name](snapshot_table),
formatted_violation)
inserted_rows += 1
except MySQLdb.Error, e:
LOGGER.error('Unable to insert violation %s due to %s',
formatted_violation, e)
violation_errors.append(formatted_violation)
return (inserted_rows, violation_errors)
def _format_violation(violation, resource_name):
"""Violation formating stub that uses a map to call the formating
function for the resource.
Args:
violation: An iterator of RuleViolations.
resource_name: String that defines a resource
Returns:
Formatted violations
"""
formatted_output = vm.VIOLATION_MAP[resource_name](violation)
return formatted_output
| felixbb/forseti-security | google/cloud/security/common/data_access/violation_dao.py | Python | apache-2.0 | 3,393 | 0 |
'''
Copyright 2017 Dell Inc. or its subsidiaries. All Rights Reserved.
Author(s):
Norton Luo
This test validate the AMQP message send out in the workflow, and node delete and discover.
It also validate the web hook api and node registeration function.
Ths test will choose a node and reset it. After the system start reset. It will delete the node and let the node
run into discover workflow. AMQP and webhook are lunched before that in separate working thread to monitor the messages.
'''
from sm_plugin import smp_get_stream_monitor
from time import sleep
import gevent
import gevent.queue
import random
import flogging
import unittest
import json
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import fit_common
import env_ip_helpers
import test_api_utils
from nose.plugins.attrib import attr
from nosedep import depends
logs = flogging.get_loggers()
WEBHOOK_PORT = 9889
class RequestHandler(BaseHTTPRequestHandler):
def do_POST(self):
logs.debug('POST came in on test-http-server')
request_headers = self.headers
content_length = request_headers.getheaders('content-length')
if content_length:
length = int(content_length[0])
else:
length = 0
webhook_body = str(self.rfile.read(length))
logs.tdl.debug('body is: %s', webhook_body)
self.send_response(200)
self.server.do_post_queue.put(webhook_body)
class HttpWorker(gevent.Greenlet):
def __init__(self, port, timeout=10):
super(HttpWorker, self).__init__()
self.__server = HTTPServer(('', port), RequestHandler)
self.__server.timeout = timeout
self.__server.do_post_queue = gevent.queue.Queue()
testhost_ipv4 = env_ip_helpers.get_testhost_ip()
self.ipv4_address = testhost_ipv4
self.ipv4_port = port
@property
def do_post_queue(self):
return self.__server.do_post_queue
def dispose(self):
logs.debug('http service shutdown')
def _run(self):
self.__server.handle_request()
@attr(all=True, regression=False, smoke=False)
class test_node_rediscover_amqp_message(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Get the stream-monitor plugin for AMQP
cls._amqp_sp = smp_get_stream_monitor('amqp')
# Create the "all events" tracker
cls._on_events_tracker = cls._amqp_sp.create_tracker('on-events-all', 'on.events', '#')
# We have context information that needs to be passed from test-to-test. Set up the template
# space.
cls._run_context = {
'start_nodeid': None,
'start_node_uuid': None,
'reboot_graphid': None,
'rediscovered_nodeid': None
}
# Set up the web-serverlet to get the callback from the hooks part
# of the api. We do this here so thee server stays up for the required
# tests!
cls._serverworker = HttpWorker(WEBHOOK_PORT, 300)
cls._serverworker.start()
@classmethod
def tearDownClass(cls):
cls._serverworker.dispose()
def setUp(self):
# attach a processor to the on-events-tracker amqp tracker. Then we can
# attach indiviual match-clauses to this in each test-case.
self.__qproc = self._amqp_sp.get_tracker_queue_processor(self._on_events_tracker)
def __set_run_context(self, key, value):
assert key in self._run_context, \
'{} not a run-context variable'.format(key)
assert self._run_context[key] is None, \
'attempt to set existing run-context for {} to {}, was already {}'.format(
key, value, self._run_context[key])
self._run_context[key] = value
def __get_run_context(self, key):
assert key in self._run_context, \
'{} not a run-context variable'.format(key)
assert self._run_context[key] is not None, \
'attempt to get unset run-context for {}'.format(key)
return self._run_context[key]
def __set_web_hook(self):
mondata = fit_common.rackhdapi('/api/current/hooks')
self.assertTrue(
mondata['status'] < 209,
'Incorrect HTTP return code, could not check hooks. expected<209, got:' + str(mondata['status']))
ip = self._serverworker.ipv4_address
# ip = '172.31.110.34'
port = self._serverworker.ipv4_port
hookurl = "http://" + str(ip) + ":" + str(port)
for hooks in mondata['json']:
if hooks['url'] == hookurl:
logs.debug("Hook URL already exist in RackHD")
return
response = fit_common.rackhdapi(
'/api/current/hooks',
action='post',
payload={
"name": "FITdiscovery",
"url": hookurl,
"filters": [{"type": "node",
"action": "discovered"}]})
self.assertTrue(
response['status'] < 209,
'Incorrect HTTP return code, expected<209, got:' + str(response['status']))
def __apply_obmsetting_to_node(self, nodeid):
usr = None
# pwd = ''
response = fit_common.rackhdapi(
'/api/2.0/nodes/' + nodeid + '/catalogs/bmc')
bmcip = response['json']['data']['IP Address']
# Try credential record in config file
for creds in fit_common.fitcreds()['bmc']:
if fit_common.remote_shell(
'ipmitool -I lanplus -H ' + bmcip + ' -U ' + creds['username'] + ' -P ' +
creds['password'] + ' fru')['exitcode'] == 0:
usr = creds['username']
pwd = creds['password']
break
# Put the credential to OBM settings
if usr is not None:
payload = {
"service": "ipmi-obm-service",
"config": {
"host": bmcip,
"user": usr,
"password": pwd},
"nodeId": nodeid}
api_data = fit_common.rackhdapi("/api/2.0/obms", action='put', payload=payload)
if api_data['status'] == 201:
return True
return False
def __check_skupack(self):
sku_installed = fit_common.rackhdapi('/api/2.0/skus')['json']
if len(sku_installed) < 2:
return False
else:
return True
def __process_web_message(self, webhook_body):
try:
webhook_body_json = json.loads(webhook_body)
except ValueError:
self.fail("FAILURE - The message body is not json format!")
self.assertIn("action", webhook_body_json, "action field is not contained in the discover message")
self.assertEquals(
webhook_body_json['action'], "discovered",
"action field not correct! expect {0}, get {1}"
.format("discovered", webhook_body_json['action']))
self.assertIn("data", webhook_body_json, "data field is not contained in the discover message")
self.assertIn("nodeId", webhook_body_json["data"], "nodeId is not contained in the discover message")
self.assertNotEquals(
webhook_body_json["data"]["nodeId"], "", "nodeId generated in discovery doesn't include valid data ")
self.assertIn(
"ipMacAddresses", webhook_body_json["data"], "ipMacAddresses is not contained in the discover message")
self.assertNotEquals(
webhook_body_json["data"]["ipMacAddresses"], "",
"ipMacAddresses generated during node discovery doesn't include valid data ")
def __build_info_vblock(self, message_type, action, typeid, nodeid):
expected_payload = {
"type": message_type,
"action": action,
"typeId": typeid,
"nodeId": nodeid,
"severity": "information",
"createdAt": "<<present>>",
"data": "<<present>>",
"version": "1.0"
}
expected_rk = "{}.{}.information.{}.{}".format(message_type, action, typeid, nodeid)
ex = {
'body': expected_payload,
'routing_key': expected_rk
}
return ex
def __build_simple_graph_vblock(self, action, graphid, status):
ex = {
'body': {
'status': status
},
'routing_key': 'graph.{}.{}'.format(action, graphid)
}
return ex
def __wait_for_uuid(self):
node_uuid = self.__get_run_context('start_node_uuid')
logs.debug('Begining wait for uuid %s to reappear', node_uuid)
for dummy in range(0, 20):
sleep(30)
rest_data = fit_common.rackhdapi('/redfish/v1/Systems/')
if rest_data['json']['Members@odata.count'] == 0:
continue
node_collection = rest_data['json']['Members']
for computenode in node_collection:
nodeidurl = computenode['@odata.id']
api_data = fit_common.rackhdapi(nodeidurl)
if api_data['status'] > 399:
break
if node_uuid == api_data['json']['UUID']:
return True
logs.debug("Time out to find the node with uuid!")
return False
def __check_added_node_CB(self, other_event):
body = other_event.body
if "nodeId" not in body:
return False
new_nodid = body["nodeId"]
if self.__wait_for_uuid():
self.__set_run_context('rediscovered_nodeid', new_nodid)
logs.debug('Located node again with id=%s', new_nodid)
return True
return False
def __node_discover(self, node_uuid):
logs.debug('Validate node discovery registration web hook Message')
self._process_web_message(30)
def test_rediscover_pick_node(self):
node_collection = test_api_utils.get_node_list_by_type("compute")
self.assertNotEquals(node_collection, [], "No compute node found!")
for dummy in node_collection:
nodeid = node_collection[random.randint(0, len(node_collection) - 1)]
if fit_common.rackhdapi('/api/2.0/nodes/' + nodeid)['json']['name'] != "Management Server":
break
self.__set_run_context('start_nodeid', nodeid)
node_uuid = fit_common.rackhdapi('/redfish/v1/Systems/' + nodeid)['json']['UUID']
logs.debug('UUID of selected Node is %s', node_uuid)
self.__set_run_context('start_node_uuid', node_uuid)
@depends(after='test_rediscover_pick_node')
def test_rediscover_check_obm(self):
nodeid = self.__get_run_context('start_nodeid')
node_obm = fit_common.rackhdapi('/api/2.0/nodes/' + nodeid)['json']['obms']
if node_obm == []:
logs.trl.debug('Picked node does not have OBM settings. Trying to add them')
applied = self.__apply_obmsetting_to_node(self.__get_run_context('start_nodeid'))
self.assertTrue(applied, 'Failed to apply obm settings')
@depends(after='test_rediscover_check_obm')
def test_rediscover_reboot_kickoff(self):
nodeid = self.__get_run_context('start_nodeid')
# first setup the web-hook to monitor (see test_rediscover_rackd_discover_hook) for
# rackhd hook messages.
self.__set_web_hook()
# now give the node a kick
response = fit_common.rackhdapi(
'/redfish/v1/Systems/' + nodeid + '/Actions/ComputerSystem.Reset', action='post',
payload={"ResetType": "ForceRestart"})
self.assertTrue(
response['status'] < 209, 'Incorrect HTTP return code, expected<209, got:' + str(response['status']))
graphid = response['json']["@odata.id"].split('/redfish/v1/TaskService/Tasks/')[1]
self.__set_run_context('reboot_graphid', graphid)
@depends(after='test_rediscover_reboot_kickoff', before='test_rediscover_node_delete')
def test_rediscover_reboot_graph_ampq_flow(self):
nodeid = self.__get_run_context('start_nodeid')
graphid = self.__get_run_context('reboot_graphid')
# Push a new match-group onto this processor. Ordered=true makes it so the the matchers
# in it need to occur in order.
self.__qproc.open_group(ordered=True)
self.__qproc.match_on_routekey('basic-start', routing_key='graph.started.{}'.format(graphid),
validation_block=self.__build_simple_graph_vblock('started', graphid, 'running'))
self.__qproc.match_on_routekey('info-start', routing_key='graph.started.information.{}.#'.format(graphid),
validation_block=self.__build_info_vblock('graph', 'started', graphid, nodeid))
# note: the (3,4) bounding here is actually there because of a flaw in the and-group matcher.
# it's a sticky enough thing to fix, so I'm letting it go for now. Basically, what
# happens is we get 3 progress messages, the basic-finish, and one more progress. But
# if we call out the 4th one as its own matcher (and put 3,3 here), this
# one overmatches.
self.__qproc.match_on_routekey('graph-task-progress',
routing_key='graph.progress.updated.information.{}.#'.format(graphid),
min=3, max=4,
validation_block=self.__build_info_vblock('graph', 'progress.updated', graphid, nodeid))
self.__qproc.match_on_routekey(description='basic-finish', routing_key='graph.finished.{}'.format(graphid),
validation_block=self.__build_simple_graph_vblock('finished', graphid, 'succeeded'))
self.__qproc.match_on_routekey(description='info-finish', routing_key='graph.finished.information.{}.#'.format(graphid),
validation_block=self.__build_info_vblock('graph', 'finished', graphid, nodeid))
self.__qproc.close_group()
results = self._amqp_sp.finish(timeout=30)
results[0].assert_errors(self)
@depends(after='test_rediscover_reboot_kickoff')
def test_rediscover_node_delete(self):
nodeid = self.__get_run_context('start_nodeid')
result = fit_common.rackhdapi('/api/2.0/nodes/' + nodeid, action='delete')
self.assertTrue(result['status'] < 209, 'Was expecting response code < 209. Got ' + str(result['status']))
self.__qproc.match_on_routekey('node-removed-information',
routing_key='node.removed.information.{}.#'.format(nodeid),
validation_block=self.__build_info_vblock('node', 'removed', nodeid, nodeid))
results = self._amqp_sp.finish(timeout=30)
results[0].assert_errors(self)
@depends(after='test_rediscover_node_delete')
def test_rediscover_node_discover_rebooted_node(self):
# look for node-adds until one happens that has the same uuid as the old deleted node
self.__qproc.match_on_routekey('node-added', routing_key='node.added.#', match_CB=self.__check_added_node_CB)
results = self._amqp_sp.finish(timeout=300)
results[0].assert_errors(self)
@depends(after='test_rediscover_node_discover_rebooted_node', before='test_rediscover_node_registration')
def test_rediscover_node_discover_sku_assign(self):
nodeid = self.__get_run_context('rediscovered_nodeid')
skupack_intalled = self.__check_skupack()
if not skupack_intalled:
raise unittest.SkipTest('skupack is not installed, skipping sku assigned message check')
self.__qproc.match_on_routekey('sku-assigned', routing_key='node.sku.assigned.information.{}.#'.format(nodeid),
validation_block=self.__build_info_vblock('node', 'sku.assigned', nodeid, nodeid))
results = self._amqp_sp.finish(timeout=300)
results[0].assert_errors(self)
@depends(after='test_rediscover_node_discover_rebooted_node')
def test_rediscover_node_registration(self):
nodeid = self.__get_run_context('rediscovered_nodeid')
self.__qproc.match_on_routekey('node-discovered-information',
routing_key='node.discovered.information.{}.#'.format(nodeid),
validation_block=self.__build_info_vblock('node', 'discovered', nodeid, nodeid))
results = self._amqp_sp.finish(timeout=300)
results[0].assert_errors(self)
@depends(after='test_rediscover_node_registration')
def test_rediscover_rackd_discover_hook(self):
found_msgs = []
while True:
try:
m = self._serverworker.do_post_queue.get(timeout=0.1)
except gevent.queue.Empty:
m = None
if m is None:
break
self.__process_web_message(m)
found_msgs.append(m)
found_count = len(found_msgs)
self.assertNotEqual(found_count, 0, 'No discovery message was posted back via the webhook')
self.assertEqual(found_count, 1, 'Received more than one posted webhook: {}'.format(found_msgs))
@depends(after='test_rediscover_node_registration')
def test_rediscover_node_discover_obm_settings(self):
nodeid = self.__get_run_context('rediscovered_nodeid')
applied = self.__apply_obmsetting_to_node(nodeid)
self.assertTrue(applied, 'failed to apply obm settings to rediscovered node {}'.format(nodeid))
self.__qproc.match_on_routekey('obm-assigned', routing_key='node.obms.assigned.information.{}.#'.format(nodeid),
validation_block=self.__build_info_vblock('node', 'obms.assigned', nodeid, nodeid))
self.__qproc.match_on_routekey('node-accessible', routing_key='node.accessible.information.{}.#'.format(nodeid),
validation_block=self.__build_info_vblock('node', 'accessible', nodeid, nodeid))
results = self._amqp_sp.finish(timeout=300)
results[0].assert_errors(self)
if __name__ == '__main__':
unittest.main()
| RackHD/RackHD | test/tests/amqp/test_amqp_node_rediscover.py | Python | apache-2.0 | 18,627 | 0.003382 |
#!/usr/bin/env python
from __future__ import print_function
from collections import Counter
from operator import itemgetter
import os
_path = os.path.abspath(os.path.dirname(__file__))
SOURCE = os.path.join(_path, 'poems_for_wordcount.txt')
DESTINATION = os.path.join(_path, 'poem_words_out.txt')
def sort_word_counts(word_dict):
# first sort to get k by alpha
sorted_by_key = sorted(word_dict.items(), key=itemgetter(0))
# then reverse sort on number of occurrences (v) to get list in desc order
return sorted(sorted_by_key, key=itemgetter(1), reverse=1)
def main():
with open(SOURCE, 'rb') as source, open(DESTINATION, 'wb') as destination:
word_counts = Counter(source.read().lower().split())
for item in sort_word_counts(word_counts):
print("{} {}".format(*item), file=destination)
def test_sort_word_counts():
word_list = 'you watch the brown fox jumped over the fence'.split()
word_counts = Counter(word_list)
sorted_list = sort_word_counts(word_counts)
assert sorted_list[0][0] == 'the'
assert sorted_list[1][0] == 'brown'
assert sorted_list[-1][0] == 'you'
def test_output():
main()
output = open(DESTINATION, 'rb').readlines()
word, count = output[0].split()
assert len(output) == 3518
assert word == 'the'
assert int(count) == 1085
if __name__ == '__main__':
main()
| clarkkarenl/brautbot | wordcount.py | Python | artistic-2.0 | 1,390 | 0 |
__author__ = 'Nick Apperley'
# -*- coding: utf-8 -*-
#
# Establishes an OpenVPN connection using an OVPN file. Based on a Hacking Lab Python script
# (http://media.hacking-lab.com/largefiles/livecd/z_openvpn_config/backtrack/vpn-with-python.py). Requires Python 3
# and the pexpect library (module).
import pexpect
from invalid_credentials_error import InvalidCredentialsError
# Set the timeout in seconds.
timeout = 15
def open_vpn_connection(username, password, conf_dir, ovpn_file):
process = pexpect.spawn('openvpn %s' % ovpn_file, cwd=conf_dir, timeout=timeout)
try:
process.expect('Enter Auth Username:')
process.sendline(username)
process.expect('Enter Auth Password:')
process.sendline(password)
print('Connecting...')
process.expect('Initialization Sequence Completed')
print('Connected')
except pexpect.EOF:
print('Invalid username and/or password')
raise InvalidCredentialsError('Invalid OpenVPN username and/or password')
except pexpect.TIMEOUT:
print('Connection failed!')
raise TimeoutError('Cannot connect to OpenVPN server')
return process
def close_vpn_connection(process):
if process is not None:
process.kill(0)
print('Disconnected')
| napperley/OpenVPN_Tunneler | openvpn.py | Python | apache-2.0 | 1,290 | 0.003101 |
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import str
import json, requests
import os, errno
import urllib.request, urllib.parse, urllib.error
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
collections = []
next_url_url = "http://neurovault.org/api/collections/?format=json"
target_folder = "D:/scratch/neurovault_backup"
while next_url_url:
print("fetching %s"%next_url_url)
resp = requests.get(url=next_url_url)
data = json.loads(resp.text)
collections += [res for res in data['results'] if res['DOI'] != None]
next_url_url = data['next']
print("Fetched metadata of %d collections"%len(collections))
images_url_template = "http://neurovault.org/api/collections/%d/images/"
for collection in collections:
next_url = images_url_template%collection['id']
images = []
while next_url:
print("fetching %s"%next_url)
resp = requests.get(url=next_url)
data = json.loads(resp.text)
images += data['results']
next_url = data['next']
if len(images) == 0:
collections.remove(collection)
continue
mkdir_p(target_folder + "/%d"%collection['id'])
json.dump(images, open(target_folder + "/%d/images.json"%collection['id'], "w"), indent=4, sort_keys=True)
for image in images:
print("fetching %s"%image['file'])
try:
urllib.request.urlretrieve(image['file'], target_folder + "/%d/"%collection['id'] + str(image['id']) + ".nii.gz")
except:
print("failed to download %s"%image['file'])
json.dump(collections, open(target_folder + "/collections.json", "w"), indent=4, sort_keys=True)
| chrisfilo/NeuroVault | scripts/prepare_sdr_package.py | Python | mit | 1,853 | 0.013492 |
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.common.utils import data_utils
from tempest.scenario.data_processing.client_tests import base
from tempest import test
class JobTest(base.BaseDataProcessingTest):
def _check_create_job(self):
job_binary = {
'name': data_utils.rand_name('sahara-job-binary'),
'url': 'swift://sahara-container.sahara/example.jar',
'description': 'Test job binary',
'extra': {
'user': 'test',
'password': '123'
}
}
# create job_binary
job_binary = self.create_job_binary(**job_binary)
self.job = {
'job_type': 'Pig',
'mains': [job_binary.id]
}
job_name = data_utils.rand_name('sahara-job')
# create job
job = self.create_job(job_name, **self.job)
# check that job created successfully
self.assertEqual(job_name, job.name)
return job.id, job.name
def _check_job_list(self, job_id, job_name):
# check for job in list
job_list = self.client.jobs.list()
jobs_info = [(job.id, job.name) for job in job_list]
self.assertIn((job_id, job_name), jobs_info)
def _check_get_job(self, job_id, job_name):
# check job fetch by id
job = self.client.jobs.get(job_id)
self.assertEqual(job_name, job.name)
def _check_delete_job(self, job_id):
# delete job by id
self.client.jobs.delete(job_id)
# check that job really deleted
job_list = self.client.jobs.list()
self.assertNotIn(job_id, [job.id for job in job_list])
@test.services('data_processing')
def test_job(self):
job_id, job_name = self._check_create_job()
self._check_job_list(job_id, job_name)
self._check_get_job(job_id, job_name)
self._check_delete_job(job_id)
| esikachev/scenario | sahara/tests/tempest/scenario/data_processing/client_tests/test_jobs.py | Python | apache-2.0 | 2,479 | 0 |
#!/usr/bin/env python3
# Copyright 2021 Anapaya Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import pathlib
import subprocess
import time
from typing import List
import sys
from http import client
from plumbum import cli
from acceptance.common import base
from acceptance.common import docker
from acceptance.common import scion
from python.lib import scion_addr
import toml
logger = logging.getLogger(__name__)
class Test(base.TestBase):
"""
Test that in a topology with multiple ASes, every AS is capable of
requesting renewed certificates. The test verifies that each AS has loaded
the renewed certificate.
The test is split into multiple steps:
1. Start the topology.
2. For each AS in the topology, create a new private key and request
certificate chain renewal. The renewed chain is verified against the
TRC.
3. Remove the previous private key from the control servers.
4. Ensure that the new private key and certificate are loaded by observing
the http endpoint.
5. Check connectivity with an end to end test.
6. Stop all control servers and purge the state. This includes deleting
all databases with cached data, including the path and trust database.
7. Restart control servers and check connectivity again.
"""
end2end = cli.SwitchAttr(
"end2end_integration",
str,
default="./bin/end2end_integration",
help="The end2end_integration binary " +
"(default: ./bin/end2end_integration)",
)
def main(self):
if not self.nested_command:
try:
self.setup()
# Give some time for the topology to start.
time.sleep(10)
self._run()
finally:
self.teardown()
def _run(self):
isd_ases = scion.ASList.load("%s/gen/as_list.yml" %
self.test_state.artifacts).all
cs_configs = self._cs_configs()
logger.info("==> Start renewal process")
for isd_as in isd_ases:
logging.info("===> Start renewal: %s" % isd_as)
self._renewal_request(isd_as)
logger.info("==> Check key and certificate reloads")
self._check_key_cert(cs_configs)
logger.info("==> Check connectivity")
subprocess.run(
[self.end2end, "-d", "-outDir", self.test_state.artifacts],
check=True)
logger.info("==> Shutting down control servers and purging caches")
for container in self.list_containers("scion_sd.*"):
self.test_state.dc("rm", container)
for container in self.list_containers("scion_cs.*"):
self.stop_container(container)
for cs_config in cs_configs:
files = list((pathlib.Path(self.test_state.artifacts) /
"gen-cache").glob("%s*" % cs_config.stem))
for db_file in files:
db_file.unlink()
logger.info("Deleted files: %s" % [file.name for file in files])
logger.info("==> Restart containers")
self.setup_start()
time.sleep(5)
logger.info("==> Check connectivity")
subprocess.run(
[self.end2end, "-d", "-outDir", self.test_state.artifacts],
check=True)
logger.info("==> Backup mode")
for isd_as in isd_ases:
logging.info("===> Start renewal: %s" % isd_as)
self._renewal_request(isd_as, mode="--backup")
def _renewal_request(
self,
isd_as: scion_addr.ISD_AS,
mode: str = "--force",
):
as_dir = self._to_as_dir(isd_as)
docker_dir = pathlib.Path("/share") / self._rel(as_dir)
def read_file(filename: str) -> str:
with open(as_dir / "crypto/as" / filename) as f:
return f.read()
chain_name = "ISD%s-AS%s.pem" % (isd_as.isd_str(),
isd_as.as_file_fmt())
old_chain = read_file(chain_name)
old_key = read_file("cp-as.key")
chain = docker_dir / "crypto/as" / chain_name
args = [
chain,
docker_dir / "crypto/as/cp-as.key",
mode,
"--trc",
docker_dir / "certs/ISD1-B1-S1.trc",
"--sciond",
self.execute("tester_%s" % isd_as.file_fmt(), "sh", "-c",
"echo $SCION_DAEMON").strip(),
*self._local_flags(isd_as),
]
logger.info("Requesting certificate chain renewal: %s" %
chain.relative_to(docker_dir))
logger.info(
self.execute("tester_%s" % isd_as.file_fmt(), "./bin/scion-pki",
"certificate", "renew", *args))
logger.info("Verify renewed certificate chain")
verify_out = self.execute("tester_%s" % isd_as.file_fmt(),
"./bin/scion-pki", "certificate", "verify",
chain, "--trc",
"/share/gen/trcs/ISD1-B1-S1.trc")
logger.info(str(verify_out).rstrip("\n"))
renewed_chain = read_file(chain_name)
renewed_key = read_file("cp-as.key")
if renewed_chain == old_chain:
raise Exception(
"renewed chain does not differ from previous chain")
if renewed_key == old_key:
raise Exception("renewed key does not differ from previous key")
def _check_key_cert(self, cs_configs: List[pathlib.Path]):
not_ready = [*cs_configs]
for _ in range(5):
logger.info(
"Checking if all control servers have reloaded the key and certificate..."
)
for cs_config in not_ready:
conn = client.HTTPConnection(self._http_endpoint(cs_config))
conn.request("GET", "/signer")
resp = conn.getresponse()
if resp.status != 200:
logger.info("Unexpected response: %d %s", resp.status,
resp.reason)
continue
isd_as = scion_addr.ISD_AS(cs_config.stem[2:-2])
as_dir = self._to_as_dir(isd_as)
chain_name = "ISD%s-AS%s.pem" % (isd_as.isd_str(),
isd_as.as_file_fmt())
pld = json.loads(resp.read().decode("utf-8"))
if pld["subject_key_id"] != self._extract_skid(
as_dir / "crypto/as" / chain_name):
continue
logger.info(
"Control server successfully loaded new key and certificate: %s"
% self._rel(cs_config))
not_ready.remove(cs_config)
if not not_ready:
break
time.sleep(3)
else:
logger.error(
"Control servers without reloaded key and certificate: %s" %
[cs_config.name for cs_config in not_ready])
sys.exit(1)
def _http_endpoint(self, cs_config: pathlib.Path):
with open(cs_config, "r") as f:
cfg = toml.load(f)
return cfg["metrics"]["prometheus"]
def _extract_skid(self, file: pathlib.Path):
out = subprocess.check_output(
['openssl', 'x509', '-in', file, '-noout', '-text'])
lines = out.splitlines()
for i, v in enumerate(lines):
if v.decode("utf-8").find("Subject Key Identifier") > -1:
skid = lines[i + 1].decode("utf-8").split()[-1].replace(
":", " ").upper()
break
return skid
def _rel(self, path: pathlib.Path):
return path.relative_to(pathlib.Path(self.test_state.artifacts))
def _to_as_dir(self, isd_as: scion_addr.ISD_AS) -> pathlib.Path:
return pathlib.Path("%s/gen/AS%s" %
(self.test_state.artifacts, isd_as.as_file_fmt()))
def _cs_configs(self) -> List[pathlib.Path]:
return list(
pathlib.Path("%s/gen" %
self.test_state.artifacts).glob("AS*/cs*.toml"))
def _local_flags(self, isd_as: scion_addr.ISD_AS) -> List[str]:
return [
"--local",
self.execute("tester_%s" % isd_as.file_fmt(), "sh", "-c",
"echo $SCION_LOCAL_ADDR").strip(),
]
if __name__ == "__main__":
base.register_commands(Test)
base.TestBase.test_state = base.TestState(scion.SCIONDocker(),
docker.Compose())
Test.run()
| netsec-ethz/scion | acceptance/cert_renewal/test.py | Python | apache-2.0 | 9,210 | 0.000326 |
from beerlog import app
app.run(debug=True)
| toddself/beerlog2 | runserver.py | Python | apache-2.0 | 44 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""A twisted UDP interface that is similar to the built-in socket interface."""
import traceback
from twisted.internet import reactor
from twisted.internet.protocol import DatagramProtocol
class UDPSocket(DatagramProtocol):
def __init__(self, host, port):
self.host = host
self.port = port
self.task = None
reactor.callWhenRunning(self.connect)
def connect(self):
self.task = reactor.listenUDP(0, self)
def connectTransport(self, ip):
self.transport.connect(ip, self.port)
def startProtocol(self):
"""Start the protocol. Resolve the host in case it is a hostname,
then call connect on the resulting ip and configured port."""
reactor.resolve(self.host).addCallback(self.connectTransport)
def sendto(self, msg, addr):
# ignore the addr, because we only send to one place
try:
self.transport.write(msg)
except AttributeError:
# trying to log before twisted is running, nothing we can really do
pass
except AssertionError:
# trying to log before connection yields an assertion error
pass
def stopProtocol(self):
self.task.stopListening()
DatagramProtocol.stopProtocol(self)
| hiidef/pylogd | pylogd/twisted/socket.py | Python | mit | 1,335 | 0.001498 |
# Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.Model import Model
logger = logging.getLogger(__name__)
class OvalVariablesElement(Model):
MODEL_MAP = {
'tag_name' : 'oval_variables',
'elements': [
{'tag_name': 'generator', 'class': 'scap.model.oval_5.GeneratorType'},
{'tag_name': 'variables', 'class': 'VariablesType', 'min': 0, 'max': 1},
{'xmlns': 'http://www.w3.org/2000/09/xmldsig#', 'tag_name': 'Signature', 'min': 0, 'max': 1},
],
}
| cjaymes/pyscap | src/scap/model/oval_5/var/OvalVariablesElement.py | Python | gpl-3.0 | 1,169 | 0.004277 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-27 13:08
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('comments', '0003_auto_20170726_1348'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='parent',
field=models.ForeignKey(default=-1, null=True, on_delete=django.db.models.deletion.CASCADE,
to='comments.Comment'),
),
]
| goyal-sidd/BLT | comments/migrations/0004_auto_20170727_1308.py | Python | agpl-3.0 | 598 | 0.001672 |
"""
fixtures and setup for testing the sim_setup subpackage
"""
import pytest
@pytest.fixture(scope='function') # the created directories should be unique
def molecule(path_test_data, tmp_path):
from paratemp import cd
from paratemp.sim_setup import Molecule
path_gro = path_test_data / 'water.mol2'
# Note: this instantiation will make a new directory!
with cd(tmp_path):
mol = Molecule(path_gro)
return mol, tmp_path
@pytest.fixture
def molecule_w_params(molecule):
mol, tmp_path = molecule
mol.parameterize()
return mol, tmp_path
| theavey/ParaTemp | tests/test_sim_setup/conftest.py | Python | apache-2.0 | 584 | 0 |
# -*- encoding: utf-8 -*-
# Copyright (C) 2015 Alejandro López Espinosa (kudrom)
import datetime
import random
class Date(object):
"""
Descriptor for a date datum
"""
def __init__(self, variance, **kwargs):
"""
@param variance is the maximum variance of time
allowed for the generation of random data.
"""
self.variance = variance
def generate(self):
"""
Generates random data for the descriptor.
This is called by the DataSchemaManager.generate
"""
now = datetime.datetime.now().strftime("%s")
return int(now) + random.randrange(0, self.variance)
def validate(self, data):
"""
Validates @param data against the descriptor.
This is called by the DataSchemaManager.validate
"""
return True
| kudrom/lupulo | lupulo/descriptors/date.py | Python | gpl-2.0 | 883 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the timemachine plist plugin."""
import unittest
from plaso.parsers.plist_plugins import timemachine
from tests.parsers.plist_plugins import test_lib
class TimeMachinePluginTest(test_lib.PlistPluginTestCase):
"""Tests for the timemachine plist plugin."""
def testProcess(self):
"""Tests the Process function."""
plist_name = 'com.apple.TimeMachine.plist'
plugin = timemachine.TimeMachinePlugin()
storage_writer = self._ParsePlistFileWithPlugin(
plugin, [plist_name], plist_name)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 13)
# The order in which PlistParser generates events is nondeterministic
# hence we sort the events.
events = list(storage_writer.GetSortedEvents())
expected_timestamps = [
1379165051000000, 1380098455000000, 1380810276000000, 1381883538000000,
1382647890000000, 1383351739000000, 1384090020000000, 1385130914000000,
1386265911000000, 1386689852000000, 1387723091000000, 1388840950000000,
1388842718000000]
timestamps = sorted([event.timestamp for event in events])
self.assertEqual(timestamps, expected_timestamps)
expected_event_values = {
'data_type': 'plist:key',
'desc': (
'TimeMachine Backup in BackUpFast '
'(5B33C22B-A4A1-4024-A2F5-C9979C4AAAAA)'),
'key': 'item/SnapshotDates',
'root': '/Destinations'}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
if __name__ == '__main__':
unittest.main()
| Onager/plaso | tests/parsers/plist_plugins/timemachine.py | Python | apache-2.0 | 1,644 | 0.001825 |
# from . import test_partner_import
from . import test_product_import
| iw3hxn/LibrERP | data_migration/tests/__init__.py | Python | agpl-3.0 | 70 | 0 |
"""simpleneed URL Configuration.
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from .views import (
NeedLocationViewSet, ContactViewSet, MoodViewSet, GenderViewSet,
NeedViewSet, RoamViewSet, StatsViewSet, LocatedElementViewSet,
MessageViewSet, SupplyLocationViewSet
)
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r'moods', MoodViewSet)
router.register(r'needs', NeedViewSet)
router.register(r'genders', GenderViewSet)
router.register(r'needlocations', NeedLocationViewSet)
router.register(r'contacts', ContactViewSet)
router.register(r'roams', RoamViewSet)
router.register(r'stats', StatsViewSet)
router.register(r'messages', MessageViewSet)
router.register(r'locatedelements', LocatedElementViewSet)
router.register(r'supplylocations', SupplyLocationViewSet)
urlpatterns = [url(r'', include(router.urls))]
| b3j0f/simpleneed | www/simpleneed/urls.py | Python | mit | 1,471 | 0 |
from django.utils.datastructures import SortedDict
from bencode import bencode, bdecode
def sort_dict(D):
result = SortedDict()
for key in sorted(D.keys()):
if type(D[key]) is dict:
D[key] = sort_dict(D[key])
result[key] = D[key]
return result | abshkd/benzene | torrents/utils/__init__.py | Python | bsd-3-clause | 254 | 0.031496 |
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:4496")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:4496")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Corecoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Corecoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| TimMaylon/corecoin | contrib/bitrpc/bitrpc.py | Python | mit | 7,838 | 0.038147 |
from __future__ import unicode_literals
from django.http import Http404
class Resolver404(Http404):
pass
class NoReverseMatch(Exception):
pass
| yephper/django | django/urls/exceptions.py | Python | bsd-3-clause | 167 | 0 |
"""
.. module: lemur.decorators
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
"""
from builtins import str
from datetime import timedelta
from flask import make_response, request, current_app
from functools import update_wrapper
# this is only used for dev
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True): # pragma: no cover
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, str):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, str):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
h['Access-Control-Allow-Headers'] = "Origin, X-Requested-With, Content-Type, Accept, Authorization "
h['Access-Control-Allow-Credentials'] = 'true'
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
| shaunstanislaus/lemur | lemur/decorators.py | Python | apache-2.0 | 1,945 | 0.000514 |
from models import Event, EventRole
from common.adminForms import CommonAdminForm
class EventAdminForm(CommonAdminForm):
class Meta(CommonAdminForm.Meta):
model = Event
class EventRoleAdminForm(CommonAdminForm):
class Meta(CommonAdminForm.Meta):
model = EventRole
| alfred82santa/tarrabme2 | src/events/adminForms.py | Python | gpl-3.0 | 298 | 0.010067 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.