text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_eni
short_description: Create and optionally attach an Elastic Network Interface (ENI) to an instance
description:
- Create and optionally attach an Elastic Network Interface (ENI) to an instance. If an ENI ID is provided, an attempt is made to update the existing ENI. By passing 'None' as the instance_id, an ENI can be detached from an instance.
version_added: "2.0"
author: Rob White, wimnat [at] gmail.com, @wimnat
options:
eni_id:
description:
- The ID of the ENI
required: false
default: null
instance_id:
description:
- Instance ID that you wish to attach ENI to. To detach an ENI from an instance, use 'None'.
required: false
default: null
private_ip_address:
description:
- Private IP address.
required: false
default: null
subnet_id:
description:
- ID of subnet in which to create the ENI. Only required when state=present.
required: true
description:
description:
- Optional description of the ENI.
required: false
default: null
security_groups:
description:
- List of security groups associated with the interface. Only used when state=present.
required: false
default: null
state:
description:
- Create or delete ENI.
required: false
default: present
choices: [ 'present', 'absent' ]
device_index:
description:
- The index of the device for the network interface attachment on the instance.
required: false
default: 0
force_detach:
description:
- Force detachment of the interface. This applies either when explicitly detaching the interface by setting instance_id to None or when deleting an interface with state=absent.
required: false
default: no
delete_on_termination:
description:
- Delete the interface when the instance it is attached to is terminated. You can only specify this flag when the interface is being modified, not on creation.
required: false
source_dest_check:
description:
- By default, interfaces perform source/destination checks. NAT instances however need this check to be disabled. You can only specify this flag when the interface is being modified, not on creation.
required: false
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Create an ENI. As no security group is defined, ENI will be created in default security group
- ec2_eni:
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
# Create an ENI and attach it to an instance
- ec2_eni:
instance_id: i-xxxxxxx
device_index: 1
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
# Destroy an ENI, detaching it from any instance if necessary
- ec2_eni:
eni_id: eni-xxxxxxx
force_detach: yes
state: absent
# Update an ENI
- ec2_eni:
eni_id: eni-xxxxxxx
description: "My new description"
state: present
# Detach an ENI from an instance
- ec2_eni:
eni_id: eni-xxxxxxx
instance_id: None
state: present
### Delete an interface on termination
# First create the interface
- ec2_eni:
instance_id: i-xxxxxxx
device_index: 1
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
register: eni
# Modify the interface to enable the delete_on_terminaton flag
- ec2_eni:
eni_id: {{ "eni.interface.id" }}
delete_on_termination: true
'''
import time
import xml.etree.ElementTree as ET
import re
try:
import boto.ec2
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def get_error_message(xml_string):
root = ET.fromstring(xml_string)
for message in root.findall('.//Message'):
return message.text
def get_eni_info(interface):
interface_info = {'id': interface.id,
'subnet_id': interface.subnet_id,
'vpc_id': interface.vpc_id,
'description': interface.description,
'owner_id': interface.owner_id,
'status': interface.status,
'mac_address': interface.mac_address,
'private_ip_address': interface.private_ip_address,
'source_dest_check': interface.source_dest_check,
'groups': dict((group.id, group.name) for group in interface.groups),
}
if interface.attachment is not None:
interface_info['attachment'] = {'attachment_id': interface.attachment.id,
'instance_id': interface.attachment.instance_id,
'device_index': interface.attachment.device_index,
'status': interface.attachment.status,
'attach_time': interface.attachment.attach_time,
'delete_on_termination': interface.attachment.delete_on_termination,
}
return interface_info
def wait_for_eni(eni, status):
while True:
time.sleep(3)
eni.update()
# If the status is detached we just need attachment to disappear
if eni.attachment is None:
if status == "detached":
break
else:
if status == "attached" and eni.attachment.status == "attached":
break
def create_eni(connection, module):
instance_id = module.params.get("instance_id")
if instance_id == 'None':
instance_id = None
device_index = module.params.get("device_index")
subnet_id = module.params.get('subnet_id')
private_ip_address = module.params.get('private_ip_address')
description = module.params.get('description')
security_groups = module.params.get('security_groups')
changed = False
try:
eni = compare_eni(connection, module)
if eni is None:
eni = connection.create_network_interface(subnet_id, private_ip_address, description, security_groups)
if instance_id is not None:
try:
eni.attach(instance_id, device_index)
except BotoServerError:
eni.delete()
raise
# Wait to allow creation / attachment to finish
wait_for_eni(eni, "attached")
eni.update()
changed = True
except BotoServerError as e:
module.fail_json(msg=get_error_message(e.args[2]))
module.exit_json(changed=changed, interface=get_eni_info(eni))
def modify_eni(connection, module):
eni_id = module.params.get("eni_id")
instance_id = module.params.get("instance_id")
if instance_id == 'None':
instance_id = None
do_detach = True
else:
do_detach = False
device_index = module.params.get("device_index")
description = module.params.get('description')
security_groups = module.params.get('security_groups')
force_detach = module.params.get("force_detach")
source_dest_check = module.params.get("source_dest_check")
delete_on_termination = module.params.get("delete_on_termination")
changed = False
try:
# Get the eni with the eni_id specified
eni_result_set = connection.get_all_network_interfaces(eni_id)
eni = eni_result_set[0]
if description is not None:
if eni.description != description:
connection.modify_network_interface_attribute(eni.id, "description", description)
changed = True
if security_groups is not None:
if sorted(get_sec_group_list(eni.groups)) != sorted(security_groups):
connection.modify_network_interface_attribute(eni.id, "groupSet", security_groups)
changed = True
if source_dest_check is not None:
if eni.source_dest_check != source_dest_check:
connection.modify_network_interface_attribute(eni.id, "sourceDestCheck", source_dest_check)
changed = True
if delete_on_termination is not None:
if eni.attachment is not None:
if eni.attachment.delete_on_termination is not delete_on_termination:
connection.modify_network_interface_attribute(eni.id, "deleteOnTermination", delete_on_termination, eni.attachment.id)
changed = True
else:
module.fail_json(msg="Can not modify delete_on_termination as the interface is not attached")
if eni.attachment is not None and instance_id is None and do_detach is True:
eni.detach(force_detach)
wait_for_eni(eni, "detached")
changed = True
else:
if instance_id is not None:
eni.attach(instance_id, device_index)
wait_for_eni(eni, "attached")
changed = True
except BotoServerError as e:
print e
module.fail_json(msg=get_error_message(e.args[2]))
eni.update()
module.exit_json(changed=changed, interface=get_eni_info(eni))
def delete_eni(connection, module):
eni_id = module.params.get("eni_id")
force_detach = module.params.get("force_detach")
try:
eni_result_set = connection.get_all_network_interfaces(eni_id)
eni = eni_result_set[0]
if force_detach is True:
if eni.attachment is not None:
eni.detach(force_detach)
# Wait to allow detachment to finish
wait_for_eni(eni, "detached")
eni.update()
eni.delete()
changed = True
else:
eni.delete()
changed = True
module.exit_json(changed=changed)
except BotoServerError as e:
msg = get_error_message(e.args[2])
regex = re.compile('The networkInterface ID \'.*\' does not exist')
if regex.search(msg) is not None:
module.exit_json(changed=False)
else:
module.fail_json(msg=get_error_message(e.args[2]))
def compare_eni(connection, module):
eni_id = module.params.get("eni_id")
subnet_id = module.params.get('subnet_id')
private_ip_address = module.params.get('private_ip_address')
description = module.params.get('description')
security_groups = module.params.get('security_groups')
try:
all_eni = connection.get_all_network_interfaces(eni_id)
for eni in all_eni:
remote_security_groups = get_sec_group_list(eni.groups)
if (eni.subnet_id == subnet_id) and (eni.private_ip_address == private_ip_address) and (eni.description == description) and (remote_security_groups == security_groups):
return eni
except BotoServerError as e:
module.fail_json(msg=get_error_message(e.args[2]))
return None
def get_sec_group_list(groups):
# Build list of remote security groups
remote_security_groups = []
for group in groups:
remote_security_groups.append(group.id.encode())
return remote_security_groups
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
eni_id = dict(default=None),
instance_id = dict(default=None),
private_ip_address = dict(),
subnet_id = dict(),
description = dict(),
security_groups = dict(type='list'),
device_index = dict(default=0, type='int'),
state = dict(default='present', choices=['present', 'absent']),
force_detach = dict(default='no', type='bool'),
source_dest_check = dict(default=None, type='bool'),
delete_on_termination = dict(default=None, type='bool')
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.ec2, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError), e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
state = module.params.get("state")
eni_id = module.params.get("eni_id")
if state == 'present':
if eni_id is None:
if module.params.get("subnet_id") is None:
module.fail_json(msg="subnet_id must be specified when state=present")
create_eni(connection, module)
else:
modify_eni(connection, module)
elif state == 'absent':
if eni_id is None:
module.fail_json(msg="eni_id must be specified")
else:
delete_eni(connection, module)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
# this is magic, see lib/ansible/module_common.py
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
if __name__ == '__main__':
main()
| chepazzo/ansible-modules-extras | cloud/amazon/ec2_eni.py | Python | gpl-3.0 | 13,881 | 0.004322 |
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'', include('dju_privateurl.urls')),
]
| liminspace/dju-privateurl | tests/urls.py | Python | mit | 188 | 0 |
from patients.models import Patient, Next_of_Kin, Vitals, Visits, Diagnosis, Medication, History, Documents
from django import forms
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
class Next_of_KinForm(forms.ModelForm):
class Meta:
model = Next_of_Kin
class VitalsForm(forms.ModelForm):
class Meta:
model = Vitals
class VisitsForm(forms.ModelForm):
class Meta:
model = Visits
class DiagnosisForm(forms.ModelForm):
class Meta:
model = Diagnosis
class MedicationForm(forms.ModelForm):
class Meta:
model = Medication
class HistoryForm(forms.ModelForm):
class Meta:
model = History
class DocumentsForm(forms.ModelForm):
class Meta:
model = Documents
| ianjuma/usiu-app-dir | benchcare/patients/forms.py | Python | gpl-2.0 | 775 | 0.00129 |
# http://rosalind.info/problems/long/
def superstring(arr, accumulator=''):
# We now have all strings
if len(arr) == 0:
return accumulator
# Initial call
elif len(accumulator) == 0:
accumulator = arr.pop(0)
return superstring(arr, accumulator)
# Recursive call
else:
for i in range(len(arr)):
sample = arr[i]
l = len(sample)
for p in range(l / 2):
q = l - p
if accumulator.startswith(sample[p:]):
arr.pop(i)
return superstring(arr, sample[:p] + accumulator)
if accumulator.endswith(sample[:q]):
arr.pop(i)
return superstring(arr, accumulator + sample[q:])
f = open("rosalind_long.txt", "r")
dnas = {}
currentKey = ''
for content in f:
# Beginning of a new sample
if '>' in content:
key = content.rstrip().replace('>', '')
currentKey = key
dnas[currentKey] = ''
else:
dnas[currentKey] += content.rstrip()
print superstring(dnas.values())
| AntoineAugusti/katas | rosalind/long.py | Python | mit | 1,108 | 0.001805 |
import glob
import os
from os.path import join
import numpy as n
def writeScript(rootName, plate, env):
f=open(rootName+".sh",'w')
f.write("#!/bin/bash \n")
f.write("#PBS -l walltime=40:00:00 \n")
f.write("#PBS -o "+plate+".o.$PBS_JOBID \n")
f.write("#PBS -e "+plate+".e$PBS_JOBID \n")
f.write("#PBS -M comparat@mpe.mpg.de \n")
f.write("module load apps/anaconda/2.4.1 \n")
f.write("module load apps/python/2.7.8/gcc-4.4.7 \n")
f.write("export PYTHONPATH=$PYTHONPATH:/users/comparat/pySU/galaxy/python/ \n")
f.write("export PYTHONPATH=$PYTHONPATH:/users/comparat/pySU/spm/python/ \n")
f.write(" \n")
f.write("cd /users/comparat/pySU/spm/bin \n")
specList = n.array(glob.glob(os.path.join(os.environ[env], 'stellarpop-m11-chabrier', 'stellarpop', plate, 'spFly*.fits')))
data = n.array([os.path.basename(specName).split('-') for specName in specList])
for el in data :
f.write("python combine_model_spectra.py "+el[1]+" "+el[2]+" "+el[3]+" "+env+" \n")
f.write(" \n")
f.close()
env="SDSSDR12_DIR"
plates = n.loadtxt( join(os.environ[env], "catalogs", "plateNumberList"), unpack=True, dtype='str')
for plate in plates:
rootName = join(os.environ['HOME'], "batch_combine_sdss", plate)
writeScript(rootName, plate, env)
| JohanComparat/pySU | spm/bin/combine_model_spectra_write_scripts.py | Python | cc0-1.0 | 1,246 | 0.028892 |
#coding:utf-8
import functools
from cactus.utils.internal import getargspec
from cactus.plugin import defaults
class PluginManager(object):
def __init__(self, site, loaders):
self.site = site
self.loaders = loaders
self.reload()
for plugin_method in defaults.DEFAULTS:
if not hasattr(self, plugin_method):
setattr(self, plugin_method, functools.partial(self.call, plugin_method))
def reload(self):
plugins = []
for loader in self.loaders:
plugins.extend(loader.load())
self.plugins = sorted(plugins, key=lambda plugin: plugin.ORDER)
def call(self, method, *args, **kwargs):
"""
Call each plugin
"""
for plugin in self.plugins:
_meth = getattr(plugin, method)
_meth(*args, **kwargs)
def preBuildPage(self, site, page, context, data):
"""
Special call as we have changed the API for this.
We have two calling conventions:
- The new one, which passes page, context, data
- The deprecated one, which also passes the site (Now accessible via the page)
"""
for plugin in self.plugins:
# Find the correct calling convention
new = [page, context, data]
deprecated = [site, page, context, data]
arg_lists = dict((len(l), l) for l in [deprecated, new])
try:
# Try to find the best calling convention
n_args = len(getargspec(plugin.preBuildPage).args)
# Just use the new calling convention if there's fancy usage of
# *args, **kwargs that we can't control.
arg_list = arg_lists.get(n_args, new)
except NotImplementedError:
# If we can't get the number of args, use the new one.
arg_list = new
# Call with the best calling convention we have.
# If that doesn't work, then we'll let the error escalate.
context, data = plugin.preBuildPage(*arg_list)
return context, data
| ibarria0/Cactus | cactus/plugin/manager.py | Python | bsd-3-clause | 2,122 | 0.001885 |
#
# NineMSN CatchUp TV Video API Library
#
# This code is forked from Network Ten CatchUp TV Video API Library
# Copyright (c) 2013 Adam Malcontenti-Wilson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from brightcove.core import APIObject, Field, DateTimeField, ListField, EnumField
from brightcove.objects import ItemCollection, enum
ChannelNameEnum = enum('ten', 'eleven', 'one')
PlaylistTypeEnum = enum('full_episodes', 'web_extras', 'news', 'season', 'week', 'category', 'special', 'preview')
MediaDeliveryEnum = enum('default', 'http', 'http_ios')
class EnumNumField(Field):
def __init__(self, enum_cls, help=None):
self.help = help
self.enum_cls = enum_cls
def to_python(self, value):
for i, field in enumerate(self.enum_cls._fields):
if i == value:
return field
raise Exception('Invalid Enum: %s' % value)
def from_python(self, value):
return self.enum_cls._fields[value]
class Playlist(APIObject):
_fields = ['name', 'type', 'season', 'week', 'query']
type = EnumField(PlaylistTypeEnum)
def __repr__(self):
return '<Playlist name=\'{0}\'>'.format(self.name)
class Show(APIObject):
_fields = ['showName', 'channelName', 'videoLink', 'mobileLink', 'logo', 'fanart', 'playlists']
channelName = EnumField(ChannelNameEnum)
playlists = ListField(Playlist)
def __repr__(self):
return '<Show name=\'{0}\'>'.format(self.showName)
class AMFRendition(APIObject):
_fields = ['defaultURL', 'audioOnly', 'mediaDeliveryType', 'encodingRate',
'frameHeight', 'frameWidth', 'size',
'videoCodec', 'videoContainer']
mediaDeliveryType = EnumNumField(MediaDeliveryEnum)
def __repr__(self):
return '<Rendition bitrate=\'{0}\' type=\'{1}\' frameSize=\'{2}x{3}\'>'.format(self.encodingRate, self.mediaDeliveryType, self.frameWidth, self.frameHeight)
class ShowItemCollection(ItemCollection):
_item_class = Show
items = ListField(Show)
class PlaylistItemCollection(ItemCollection):
_item_class = Playlist
items = ListField(Playlist)
class MediaRenditionItemCollection(ItemCollection):
_item_class = AMFRendition
items = ListField(AMFRendition)
| predakanga/plugin.video.catchuptv.au.ninemsn | resources/lib/ninemsnvideo/objects.py | Python | mit | 3,248 | 0.011084 |
from datetime import datetime
from sqlalchemy.orm import reconstructor, relationship, backref
from sqlalchemy.schema import Column, ForeignKey
from sqlalchemy.types import Integer, Unicode, Boolean, DateTime
from sqlalchemy import BigInteger
from sqlalchemy.sql.expression import false, or_
from sqlalchemy.ext.associationproxy import association_proxy
from openspending.core import db
from openspending.model.common import (MutableDict, JSONType,
DatasetFacetMixin)
class DataOrg(db.Model):
""" The dataset is the core entity of any access to data. All
requests to the actual data store are routed through it, as well
as data loading and model generation.
The dataset keeps an in-memory representation of the data model
(including all dimensions and measures) which can be used to
generate necessary queries.
"""
__tablename__ = 'dataorg'
__searchable__ = ['label', 'description']
id = Column(Integer, primary_key=True)
label = Column(Unicode(2000))
description = Column(Unicode())
ORTemplate = Column(MutableDict.as_mutable(JSONType), default=dict)
mappingTemplate = Column(MutableDict.as_mutable(JSONType), default=dict)
prefuncs = Column(MutableDict.as_mutable(JSONType), default=dict)
lastUpdated = Column(DateTime, onupdate=datetime.utcnow)
#metadataorg_id = Column(Integer, ForeignKey('metadataorg.id'))
# metadataorg = relationship(MetadataOrg,
# backref=backref('dataorgs', lazy='dynamic'))
def __init__(self, dataorg=None):
if not dataorg:
return
self.label = dataorg.get('label')
self.description = dataorg.get('description')
self.ORTemplate = dataorg.get('ORTemplate', {})
self.mappingTemplate = dataorg.get('mappingTemplate', {})
self.prefuncs = dataorg.get('prefuncs', {})
self.lastUpdated = datetime.utcnow()
def touch(self):
""" Update the dataset timestamp. This is used for cache
invalidation. """
self.updated_at = datetime.utcnow()
db.session.add(self)
def to_json_dump(self):
""" Returns a JSON representation of an SQLAlchemy-backed object.
"""
json = {}
json['fields'] = {}
json['pk'] = getattr(self, 'id')
json['model'] = "DataOrg"
fields = ['label','description','ORTemplate','mappingTemplate','prefuncs']
for field in fields:
json['fields'][field] = getattr(self, field)
return json
@classmethod
def import_json_dump(cls, theobj):
fields = ['label','description','ORTemplate','mappingTemplate','prefuncs']
classobj = cls()
for field in fields:
setattr(classobj, field, theobj['fields'][field])
#classobj.set(field, theobj['fields'][field])
db.session.add(classobj)
db.session.commit()
return classobj.id
def __repr__(self):
return "<DataOrg(%r,%r)>" % (self.id, self.label)
def update(self, dataorg):
self.label = dataset.get('label')
self.description = dataset.get('description')
self.ORTemplate = dataset.get('ORTemplate', {})
self.mappingTemplate = dataset.get('mappingTemplate', {})
self.prefuncs = dataset.get('prefuncs', {})
self.lastUpdated = datetime.utcnow()
def as_dict(self):
return {
'id' : self.id,
'label': self.label,
'description': self.description,
'lastUpdated': self.lastUpdated
}
@classmethod
def get_all_admin(cls, order=True):
""" Query available datasets based on dataset visibility. """
q = db.session.query(cls)
if order:
q = q.order_by(cls.label.asc())
return q
@classmethod
def get_all(cls, order=True):
""" Query available datasets based on dataset visibility. """
q = db.session.query(cls)
if order:
q = q.order_by(cls.label.asc())
return q
@classmethod
def all(cls, order=True):
""" Query available datasets based on dataset visibility. """
q = db.session.query(cls)
if order:
q = q.order_by(cls.label.asc())
return q
@classmethod
def by_name(cls, label):
return db.session.query(cls).filter_by(label=label).first()
@classmethod
def by_id(cls, id):
return db.session.query(cls).filter_by(id=id).first()
#TODO
# class MetadataOrgSettings(colander.MappingSchema):
# fullname = colander.SchemaNode(colander.String())
# email = colander.SchemaNode(colander.String(),
# validator=colander.Email())
# public_email = colander.SchemaNode(colander.Boolean(), missing=False)
# twitter = colander.SchemaNode(colander.String(), missing=None,
# validator=colander.Length(max=140))
# public_twitter = colander.SchemaNode(colander.Boolean(), missing=False)
# password1 = colander.SchemaNode(colander.String(),
# missing=None, default=None)
# password2 = colander.SchemaNode(colander.String(),
# missing=None, default=None)
# script_root = colander.SchemaNode(colander.String(),
# missing=None, default=None) | USStateDept/FPA_Core | openspending/model/dataorg.py | Python | agpl-3.0 | 5,452 | 0.004585 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import uuidutils
from neutron.agent.common import config as agent_config
from neutron.agent.l3 import router_info
from neutron.agent.linux import ip_lib
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron.tests import base
_uuid = uuidutils.generate_uuid
class TestRouterInfo(base.BaseTestCase):
def setUp(self):
super(TestRouterInfo, self).setUp()
conf = agent_config.setup_conf()
self.ip_cls_p = mock.patch('neutron.agent.linux.ip_lib.IPWrapper')
ip_cls = self.ip_cls_p.start()
self.mock_ip = mock.MagicMock()
ip_cls.return_value = self.mock_ip
self.ri_kwargs = {'agent_conf': conf,
'interface_driver': mock.sentinel.interface_driver}
def _check_agent_method_called(self, calls):
self.mock_ip.netns.execute.assert_has_calls(
[mock.call(call, check_exit_code=False) for call in calls],
any_order=True)
def test_routing_table_update(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.router = {}
fake_route1 = {'destination': '135.207.0.0/16',
'nexthop': '1.2.3.4'}
fake_route2 = {'destination': '135.207.111.111/32',
'nexthop': '1.2.3.4'}
ri.update_routing_table('replace', fake_route1)
expected = [['ip', 'route', 'replace', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri.update_routing_table('delete', fake_route1)
expected = [['ip', 'route', 'delete', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri.update_routing_table('replace', fake_route2)
expected = [['ip', 'route', 'replace', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri.update_routing_table('delete', fake_route2)
expected = [['ip', 'route', 'delete', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
def test_update_routing_table(self):
# Just verify the correct namespace was used in the call
uuid = _uuid()
netns = 'qrouter-' + uuid
fake_route1 = {'destination': '135.207.0.0/16',
'nexthop': '1.2.3.4'}
ri = router_info.RouterInfo(uuid, {'id': uuid}, **self.ri_kwargs)
ri._update_routing_table = mock.Mock()
ri.update_routing_table('replace', fake_route1)
ri._update_routing_table.assert_called_once_with('replace',
fake_route1,
netns)
def test_routes_updated(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.router = {}
fake_old_routes = []
fake_new_routes = [{'destination': "110.100.31.0/24",
'nexthop': "10.100.10.30"},
{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.routes = fake_old_routes
ri.router['routes'] = fake_new_routes
ri.routes_updated(fake_old_routes, fake_new_routes)
expected = [['ip', 'route', 'replace', 'to', '110.100.30.0/24',
'via', '10.100.10.30'],
['ip', 'route', 'replace', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
ri.routes = fake_new_routes
fake_new_routes = [{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.router['routes'] = fake_new_routes
ri.routes_updated(ri.routes, fake_new_routes)
expected = [['ip', 'route', 'delete', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
fake_new_routes = []
ri.router['routes'] = fake_new_routes
ri.routes_updated(ri.routes, fake_new_routes)
expected = [['ip', 'route', 'delete', 'to', '110.100.30.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
def test_add_ports_address_scope_iptables(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
port = {
'id': _uuid(),
'fixed_ips': [{'ip_address': '172.9.9.9'}],
'address_scopes': {l3_constants.IP_VERSION_4: '1234'}
}
ipv4_mangle = ri.iptables_manager.ipv4['mangle'] = mock.MagicMock()
ri.get_address_scope_mark_mask = mock.Mock(return_value='fake_mark')
ri.get_internal_device_name = mock.Mock(return_value='fake_device')
ri.rt_tables_manager = mock.MagicMock()
ri.process_external_port_address_scope_routing = mock.Mock()
ri.process_floating_ip_address_scope_rules = mock.Mock()
ri.iptables_manager._apply = mock.Mock()
ri.router[l3_constants.INTERFACE_KEY] = [port]
ri.process_address_scope()
ipv4_mangle.add_rule.assert_called_once_with(
'scope', ri.address_scope_mangle_rule('fake_device', 'fake_mark'))
class BasicRouterTestCaseFramework(base.BaseTestCase):
def _create_router(self, router=None, **kwargs):
if not router:
router = mock.MagicMock()
self.agent_conf = mock.Mock()
self.router_id = _uuid()
return router_info.RouterInfo(self.router_id,
router,
self.agent_conf,
mock.sentinel.interface_driver,
**kwargs)
class TestBasicRouterOperations(BasicRouterTestCaseFramework):
def test_get_floating_ips(self):
router = mock.MagicMock()
router.get.return_value = [mock.sentinel.floating_ip]
ri = self._create_router(router)
fips = ri.get_floating_ips()
self.assertEqual([mock.sentinel.floating_ip], fips)
def test_process_floating_ip_nat_rules(self):
ri = self._create_router()
fips = [{'fixed_ip_address': mock.sentinel.ip,
'floating_ip_address': mock.sentinel.fip}]
ri.get_floating_ips = mock.Mock(return_value=fips)
ri.iptables_manager = mock.MagicMock()
ipv4_nat = ri.iptables_manager.ipv4['nat']
ri.floating_forward_rules = mock.Mock(
return_value=[(mock.sentinel.chain, mock.sentinel.rule)])
ri.process_floating_ip_nat_rules()
# Be sure that the rules are cleared first and apply is called last
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_nat.mock_calls[0])
self.assertEqual(mock.call.apply(), ri.iptables_manager.mock_calls[-1])
# Be sure that add_rule is called somewhere in the middle
ipv4_nat.add_rule.assert_called_once_with(mock.sentinel.chain,
mock.sentinel.rule,
tag='floating_ip')
def test_process_floating_ip_nat_rules_removed(self):
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[])
ri.iptables_manager = mock.MagicMock()
ipv4_nat = ri.iptables_manager.ipv4['nat']
ri.process_floating_ip_nat_rules()
# Be sure that the rules are cleared first and apply is called last
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_nat.mock_calls[0])
self.assertEqual(mock.call.apply(), ri.iptables_manager.mock_calls[-1])
# Be sure that add_rule is called somewhere in the middle
self.assertFalse(ipv4_nat.add_rule.called)
def test_process_floating_ip_address_scope_rules_diff_scopes(self):
ri = self._create_router()
fips = [{'fixed_ip_address': mock.sentinel.ip,
'floating_ip_address': mock.sentinel.fip,
'fixed_ip_address_scope': 'scope1'}]
ri.get_floating_ips = mock.Mock(return_value=fips)
ri._get_external_address_scope = mock.Mock(return_value='scope2')
ipv4_mangle = ri.iptables_manager.ipv4['mangle'] = mock.MagicMock()
ri.floating_mangle_rules = mock.Mock(
return_value=[(mock.sentinel.chain1, mock.sentinel.rule1)])
ri.get_external_device_name = mock.Mock()
ri.process_floating_ip_address_scope_rules()
# Be sure that the rules are cleared first
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_mangle.mock_calls[0])
# Be sure that add_rule is called somewhere in the middle
self.assertEqual(1, ipv4_mangle.add_rule.call_count)
self.assertEqual(mock.call.add_rule(mock.sentinel.chain1,
mock.sentinel.rule1,
tag='floating_ip'),
ipv4_mangle.mock_calls[1])
def test_process_floating_ip_address_scope_rules_same_scopes(self):
ri = self._create_router()
fips = [{'fixed_ip_address': mock.sentinel.ip,
'floating_ip_address': mock.sentinel.fip,
'fixed_ip_address_scope': 'scope1'}]
ri.get_floating_ips = mock.Mock(return_value=fips)
ri._get_external_address_scope = mock.Mock(return_value='scope1')
ipv4_mangle = ri.iptables_manager.ipv4['mangle'] = mock.MagicMock()
ri.process_floating_ip_address_scope_rules()
# Be sure that the rules are cleared first
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_mangle.mock_calls[0])
# Be sure that add_rule is not called somewhere in the middle
self.assertFalse(ipv4_mangle.add_rule.called)
def test_process_floating_ip_mangle_rules_removed(self):
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[])
ipv4_mangle = ri.iptables_manager.ipv4['mangle'] = mock.MagicMock()
ri.process_floating_ip_address_scope_rules()
# Be sure that the rules are cleared first
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_mangle.mock_calls[0])
# Be sure that add_rule is not called somewhere in the middle
self.assertFalse(ipv4_mangle.add_rule.called)
def _test_add_fip_addr_to_device_error(self, device):
ri = self._create_router()
ip = '15.1.2.3'
result = ri._add_fip_addr_to_device(
{'id': mock.sentinel.id, 'floating_ip_address': ip}, device)
device.addr.add.assert_called_with(ip + '/32')
return result
def test__add_fip_addr_to_device(self):
result = self._test_add_fip_addr_to_device_error(mock.Mock())
self.assertTrue(result)
def test__add_fip_addr_to_device_error(self):
device = mock.Mock()
device.addr.add.side_effect = RuntimeError
result = self._test_add_fip_addr_to_device_error(device)
self.assertFalse(result)
def test_process_snat_dnat_for_fip(self):
ri = self._create_router()
ri.process_floating_ip_nat_rules = mock.Mock(side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
ri.process_snat_dnat_for_fip)
ri.process_floating_ip_nat_rules.assert_called_once_with()
def test_put_fips_in_error_state(self):
ri = self._create_router()
ri.router = mock.Mock()
ri.router.get.return_value = [{'id': mock.sentinel.id1},
{'id': mock.sentinel.id2}]
statuses = ri.put_fips_in_error_state()
expected = [{mock.sentinel.id1: l3_constants.FLOATINGIP_STATUS_ERROR,
mock.sentinel.id2: l3_constants.FLOATINGIP_STATUS_ERROR}]
self.assertNotEqual(expected, statuses)
def test_configure_fip_addresses(self):
ri = self._create_router()
ri.process_floating_ip_addresses = mock.Mock(
side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
ri.configure_fip_addresses,
mock.sentinel.interface_name)
ri.process_floating_ip_addresses.assert_called_once_with(
mock.sentinel.interface_name)
def test_get_router_cidrs_returns_cidrs(self):
ri = self._create_router()
addresses = ['15.1.2.2/24', '15.1.2.3/32']
device = mock.MagicMock()
device.addr.list.return_value = [{'cidr': addresses[0]},
{'cidr': addresses[1]}]
self.assertEqual(set(addresses), ri.get_router_cidrs(device))
@mock.patch.object(ip_lib, 'IPDevice')
class TestFloatingIpWithMockDevice(BasicRouterTestCaseFramework):
def test_process_floating_ip_addresses_remap(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2',
'status': l3_constants.FLOATINGIP_STATUS_DOWN
}
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[fip])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ACTIVE},
fip_statuses)
self.assertFalse(device.addr.add.called)
self.assertFalse(device.addr.delete.called)
def test_process_router_with_disabled_floating_ip(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
ri = self._create_router()
ri.floating_ips = [fip]
ri.get_floating_ips = mock.Mock(return_value=[])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertIsNone(fip_statuses.get(fip_id))
def test_process_router_floating_ip_with_device_add_error(self, IPDevice):
IPDevice.return_value = device = mock.Mock(side_effect=RuntimeError)
device.addr.list.return_value = []
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2',
'status': 'DOWN'
}
ri = self._create_router()
ri.add_floating_ip = mock.Mock(
return_value=l3_constants.FLOATINGIP_STATUS_ERROR)
ri.get_floating_ips = mock.Mock(return_value=[fip])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ERROR},
fip_statuses)
# TODO(mrsmith): refactor for DVR cases
def test_process_floating_ip_addresses_remove(self, IPDevice):
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = self._create_router()
ri.remove_floating_ip = mock.Mock()
ri.router.get = mock.Mock(return_value=[])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({}, fip_statuses)
ri.remove_floating_ip.assert_called_once_with(device, '15.1.2.3/32')
| wolverineav/neutron | neutron/tests/unit/agent/l3/test_router_info.py | Python | apache-2.0 | 16,514 | 0 |
import magics
__all__ = ['by_version', 'by_magic']
_fallback = {
'EXTENDED_ARG': None,
'hasfree': [],
}
class dis(object):
def __init__(self, version, module):
self._version = version
from __builtin__ import __import__
self._module = __import__('decompyle.%s' % module, globals(),
locals(), 'decompyle')
def __getattr__(self, attr):
try:
val = self._module.__dict__[attr]
except KeyError, e:
if _fallback.has_key(attr):
val = _fallback[attr]
else:
raise e
return val
by_version = {
'1.5': dis('1.5', 'dis_15'),
'1.6': dis('1.6', 'dis_16'),
'2.0': dis('2.0', 'dis_20'),
'2.1': dis('2.1', 'dis_21'),
'2.2': dis('2.2', 'dis_22'),
'2.3': dis('2.3', 'dis_23'),
'2.4': dis('2.4', 'dis_24'),
'2.5': dis('2.5', 'dis_25'),
}
by_magic = dict( [ (mag, by_version[ver])
for mag, ver in magics.versions.iteritems() ] )
if __name__ == '__main__':
for m, ver in by_magic.items():
magics.__show(ver, m)
print by_version['2.2'].hasjrel
| devyn/unholy | decompyle/decompyle/dis_files.py | Python | mit | 1,163 | 0.006019 |
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="layout.xaxis.tickfont", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "ticks"),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/layout/xaxis/tickfont/_color.py | Python | mit | 418 | 0.002392 |
# ####################### BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
from .mbcssm import Big5SMModel
class Big5Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(Big5SMModel)
self._mDistributionAnalyzer = Big5DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "Big5"
| insiderr/insiderr-app | app/modules/requests/packages/chardet/big5prober.py | Python | gpl-3.0 | 1,685 | 0.000593 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import get_request_site_address, encode
from frappe.model.document import Document
from six.moves.urllib.parse import quote
from frappe.website.router import resolve_route
from frappe.website.doctype.website_theme.website_theme import add_website_theme
class WebsiteSettings(Document):
def validate(self):
self.validate_top_bar_items()
self.validate_footer_items()
self.validate_home_page()
def validate_home_page(self):
if frappe.flags.in_install:
return
if self.home_page and not resolve_route(self.home_page):
frappe.msgprint(_("Invalid Home Page") + " (Standard pages - index, login, products, blog, about, contact)")
self.home_page = ''
def validate_top_bar_items(self):
"""validate url in top bar items"""
for top_bar_item in self.get("top_bar_items"):
if top_bar_item.parent_label:
parent_label_item = self.get("top_bar_items", {"label": top_bar_item.parent_label})
if not parent_label_item:
# invalid item
frappe.throw(_("{0} does not exist in row {1}").format(top_bar_item.parent_label, top_bar_item.idx))
elif not parent_label_item[0] or parent_label_item[0].url:
# parent cannot have url
frappe.throw(_("{0} in row {1} cannot have both URL and child items").format(top_bar_item.parent_label,
top_bar_item.idx))
def validate_footer_items(self):
"""validate url in top bar items"""
for footer_item in self.get("footer_items"):
if footer_item.parent_label:
parent_label_item = self.get("footer_items", {"label": footer_item.parent_label})
if not parent_label_item:
# invalid item
frappe.throw(_("{0} does not exist in row {1}").format(footer_item.parent_label, footer_item.idx))
elif not parent_label_item[0] or parent_label_item[0].url:
# parent cannot have url
frappe.throw(_("{0} in row {1} cannot have both URL and child items").format(footer_item.parent_label,
footer_item.idx))
def on_update(self):
self.clear_cache()
def clear_cache(self):
# make js and css
# clear web cache (for menus!)
frappe.clear_cache(user = 'Guest')
from frappe.website.render import clear_cache
clear_cache()
# clears role based home pages
frappe.clear_cache()
def get_website_settings():
hooks = frappe.get_hooks()
context = frappe._dict({
'top_bar_items': get_items('top_bar_items'),
'footer_items': get_items('footer_items'),
"post_login": [
{"label": _("My Account"), "url": "/me"},
# {"class": "divider"},
{"label": _("Logout"), "url": "/?cmd=web_logout"}
]
})
settings = frappe.get_single("Website Settings")
for k in ["banner_html", "brand_html", "copyright", "twitter_share_via",
"facebook_share", "google_plus_one", "twitter_share", "linked_in_share",
"disable_signup", "hide_footer_signup", "head_html", "title_prefix",
"navbar_search"]:
if hasattr(settings, k):
context[k] = settings.get(k)
if settings.address:
context["footer_address"] = settings.address
for k in ["facebook_share", "google_plus_one", "twitter_share", "linked_in_share",
"disable_signup"]:
context[k] = int(context.get(k) or 0)
if frappe.request:
context.url = quote(str(get_request_site_address(full_address=True)), safe="/:")
context.encoded_title = quote(encode(context.title or ""), str(""))
for update_website_context in hooks.update_website_context or []:
frappe.get_attr(update_website_context)(context)
context.web_include_js = hooks.web_include_js or []
context.web_include_css = hooks.web_include_css or []
via_hooks = frappe.get_hooks("website_context")
for key in via_hooks:
context[key] = via_hooks[key]
if key not in ("top_bar_items", "footer_items", "post_login") \
and isinstance(context[key], (list, tuple)):
context[key] = context[key][-1]
add_website_theme(context)
if not context.get("favicon"):
context["favicon"] = "/assets/frappe/images/favicon.png"
if settings.favicon and settings.favicon != "attach_files:":
context["favicon"] = settings.favicon
return context
def get_items(parentfield):
all_top_items = frappe.db.sql("""\
select * from `tabTop Bar Item`
where parent='Website Settings' and parentfield= %s
order by idx asc""", parentfield, as_dict=1)
top_items = [d for d in all_top_items if not d['parent_label']]
# attach child items to top bar
for d in all_top_items:
if d['parent_label']:
for t in top_items:
if t['label']==d['parent_label']:
if not 'child_items' in t:
t['child_items'] = []
t['child_items'].append(d)
break
return top_items
| StrellaGroup/frappe | frappe/website/doctype/website_settings/website_settings.py | Python | mit | 4,709 | 0.026545 |
# =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
# noinspection PyPackageRequirements
import wx
from gui.statsView import StatsView
from gui.bitmap_loader import BitmapLoader
from gui.pyfa_gauge import PyGauge
import gui.mainFrame
from gui.chrome_tabs import EVT_NOTEBOOK_PAGE_CHANGED
from gui.utils import fonts
from eos.saveddata.module import Hardpoint
from gui.utils.numberFormatter import formatAmount
class ResourcesViewFull(StatsView):
name = "resourcesViewFull"
contexts = ["drone", "fighter", "cargo"]
def __init__(self, parent):
StatsView.__init__(self)
self.parent = parent
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.mainFrame.additionsPane.notebook.Bind(EVT_NOTEBOOK_PAGE_CHANGED, self.pageChanged)
def pageChanged(self, event):
page = self.mainFrame.additionsPane.getName(event.GetSelection())
if page == "Cargo":
self.toggleContext("cargo")
elif page == "Fighters":
self.toggleContext("fighter")
else:
self.toggleContext("drone")
def toggleContext(self, context):
# Apparently you cannot .Hide(True) on a Window, otherwise I would just .Hide(context !== x).
# This is a gimpy way to toggle this shit
for x in self.contexts:
bitmap = getattr(self, "bitmapFull{}Bay".format(x.capitalize()))
base = getattr(self, "baseFull{}Bay".format(x.capitalize()))
if context == x:
bitmap.Show()
base.Show(True)
else:
bitmap.Hide()
base.Hide(True)
fighter_sizer = getattr(self, "boxSizerFighter")
drone_sizer = getattr(self, "boxSizerDrones")
if context != "fighter":
fighter_sizer.ShowItems(False)
drone_sizer.ShowItems(True)
else:
fighter_sizer.ShowItems(True)
drone_sizer.ShowItems(False)
self.panel.Layout()
self.headerPanel.Layout()
def getHeaderText(self, fit):
return "Resources"
def getTextExtentW(self, text):
width, height = self.parent.GetTextExtent(text)
return width
def populatePanel(self, contentPanel, headerPanel):
contentSizer = contentPanel.GetSizer()
root = wx.BoxSizer(wx.VERTICAL)
contentSizer.Add(root, 0, wx.EXPAND, 0)
sizer = wx.BoxSizer(wx.HORIZONTAL)
root.Add(sizer, 0, wx.EXPAND)
root.Add(wx.StaticLine(contentPanel, wx.ID_ANY, style=wx.HORIZONTAL), 0, wx.EXPAND)
sizerResources = wx.BoxSizer(wx.HORIZONTAL)
root.Add(sizerResources, 1, wx.EXPAND, 0)
parent = self.panel = contentPanel
self.headerPanel = headerPanel
panel = "full"
base = sizerResources
sizer.AddStretchSpacer()
# Turrets & launcher hardslots display
tooltipText = {"turret": "Turret hardpoints", "launcher": "Launcher hardpoints", "drones": "Drones active",
"fighter": "Fighter squadrons active", "calibration": "Calibration"}
for type_ in ("turret", "launcher", "drones", "fighter", "calibration"):
box = wx.BoxSizer(wx.HORIZONTAL)
bitmap = BitmapLoader.getStaticBitmap("%s_big" % type_, parent, "gui")
tooltip = wx.ToolTip(tooltipText[type_])
bitmap.SetToolTip(tooltip)
box.Add(bitmap, 0, wx.ALIGN_CENTER)
sizer.Add(box, 0, wx.ALIGN_CENTER)
suffix = {'turret': 'Hardpoints', 'launcher': 'Hardpoints', 'drones': 'Active', 'fighter': 'Tubes',
'calibration': 'Points'}
lbl = wx.StaticText(parent, wx.ID_ANY, "0")
setattr(self, "label%sUsed%s%s" % (panel.capitalize(), type_.capitalize(), suffix[type_].capitalize()), lbl)
box.Add(lbl, 0, wx.ALIGN_CENTER | wx.LEFT, 5)
box.Add(wx.StaticText(parent, wx.ID_ANY, "/"), 0, wx.ALIGN_CENTER)
lbl = wx.StaticText(parent, wx.ID_ANY, "0")
setattr(self, "label%sTotal%s%s" % (panel.capitalize(), type_.capitalize(), suffix[type_].capitalize()),
lbl)
box.Add(lbl, 0, wx.ALIGN_CENTER)
setattr(self, "boxSizer{}".format(type_.capitalize()), box)
# Hack - We add a spacer after each thing, but we are always hiding something. The spacer is stil there.
# This way, we only have one space after the drones/fighters
if type_ != "drones":
sizer.AddStretchSpacer()
gauge_font = wx.Font(fonts.NORMAL, wx.SWISS, wx.NORMAL, wx.NORMAL, False)
# PG, Cpu & drone stuff
tooltipText = {"cpu": "CPU", "pg": "PowerGrid", "droneBay": "Drone bay", "fighterBay": "Fighter bay",
"droneBandwidth": "Drone bandwidth", "cargoBay": "Cargo bay"}
for i, group in enumerate((("cpu", "pg"), ("cargoBay", "droneBay", "fighterBay", "droneBandwidth"))):
main = wx.BoxSizer(wx.VERTICAL)
base.Add(main, 1, wx.ALIGN_CENTER)
for type_ in group:
capitalizedType = type_[0].capitalize() + type_[1:]
bitmap = BitmapLoader.getStaticBitmap(type_ + "_big", parent, "gui")
tooltip = wx.ToolTip(tooltipText[type_])
bitmap.SetToolTip(tooltip)
stats = wx.BoxSizer(wx.VERTICAL)
absolute = wx.BoxSizer(wx.HORIZONTAL)
stats.Add(absolute, 0, wx.EXPAND)
b = wx.BoxSizer(wx.HORIZONTAL)
main.Add(b, 1, wx.ALIGN_CENTER)
b.Add(bitmap, 0, wx.ALIGN_BOTTOM)
b.Add(stats, 1, wx.EXPAND)
lbl = wx.StaticText(parent, wx.ID_ANY, "0")
setattr(self, "label%sUsed%s" % (panel.capitalize(), capitalizedType), lbl)
absolute.Add(lbl, 0, wx.ALIGN_LEFT | wx.LEFT, 3)
absolute.Add(wx.StaticText(parent, wx.ID_ANY, "/"), 0, wx.ALIGN_LEFT)
lbl = wx.StaticText(parent, wx.ID_ANY, "0")
setattr(self, "label%sTotal%s" % (panel.capitalize(), capitalizedType), lbl)
absolute.Add(lbl, 0, wx.ALIGN_LEFT)
units = {"cpu": " tf", "pg": " MW", "droneBandwidth": " mbit/s", "droneBay": " m\u00B3",
"fighterBay": " m\u00B3", "cargoBay": " m\u00B3"}
lbl = wx.StaticText(parent, wx.ID_ANY, "%s" % units[type_])
absolute.Add(lbl, 0, wx.ALIGN_LEFT)
# Gauges modif. - Darriele
gauge = PyGauge(parent, gauge_font, 1)
gauge.SetValueRange(0, 0)
gauge.SetMinSize((self.getTextExtentW("1.999M/1.99M MW"), 23))
gauge.SetFractionDigits(2)
setattr(self, "gauge%s%s" % (panel.capitalize(), capitalizedType), gauge)
stats.Add(gauge, 0, wx.ALIGN_CENTER)
setattr(self, "base%s%s" % (panel.capitalize(), capitalizedType), b)
setattr(self, "bitmap%s%s" % (panel.capitalize(), capitalizedType), bitmap)
self.toggleContext("drone")
def refreshPanel(self, fit):
# If we did anything intresting, we'd update our labels to reflect the new fit's stats here
stats = (
("label%sUsedTurretHardpoints", lambda: fit.getHardpointsUsed(Hardpoint.TURRET), 0, 0, 0),
("label%sTotalTurretHardpoints", lambda: fit.ship.getModifiedItemAttr('turretSlotsLeft'), 0, 0, 0),
("label%sUsedLauncherHardpoints", lambda: fit.getHardpointsUsed(Hardpoint.MISSILE), 0, 0, 0),
("label%sTotalLauncherHardpoints", lambda: fit.ship.getModifiedItemAttr('launcherSlotsLeft'), 0, 0, 0),
("label%sUsedDronesActive", lambda: fit.activeDrones, 0, 0, 0),
("label%sTotalDronesActive", lambda: fit.extraAttributes["maxActiveDrones"], 0, 0, 0),
("label%sUsedFighterTubes", lambda: fit.fighterTubesUsed, 3, 0, 9),
("label%sTotalFighterTubes", lambda: fit.ship.getModifiedItemAttr("fighterTubes"), 3, 0, 9),
("label%sUsedCalibrationPoints", lambda: fit.calibrationUsed, 0, 0, 0),
("label%sTotalCalibrationPoints", lambda: fit.ship.getModifiedItemAttr('upgradeCapacity'), 0, 0, 0),
("label%sUsedPg", lambda: fit.pgUsed, 4, 0, 9),
("label%sUsedCpu", lambda: fit.cpuUsed, 4, 0, 9),
("label%sTotalPg", lambda: fit.ship.getModifiedItemAttr("powerOutput"), 4, 0, 9),
("label%sTotalCpu", lambda: fit.ship.getModifiedItemAttr("cpuOutput"), 4, 0, 9),
("label%sUsedDroneBay", lambda: fit.droneBayUsed, 3, 0, 9),
("label%sUsedFighterBay", lambda: fit.fighterBayUsed, 3, 0, 9),
("label%sUsedDroneBandwidth", lambda: fit.droneBandwidthUsed, 3, 0, 9),
("label%sTotalDroneBay", lambda: fit.ship.getModifiedItemAttr("droneCapacity"), 3, 0, 9),
("label%sTotalDroneBandwidth", lambda: fit.ship.getModifiedItemAttr("droneBandwidth"), 3, 0, 9),
("label%sTotalFighterBay", lambda: fit.ship.getModifiedItemAttr("fighterCapacity"), 3, 0, 9),
("label%sUsedCargoBay", lambda: fit.cargoBayUsed, 3, 0, 9),
("label%sTotalCargoBay", lambda: fit.ship.getModifiedItemAttr("capacity"), 3, 0, 9),
)
panel = "Full"
usedTurretHardpoints = 0
labelUTH = ""
totalTurretHardpoints = 0
labelTTH = ""
usedLauncherHardpoints = 0
labelULH = ""
totalLauncherHardPoints = 0
labelTLH = ""
usedDronesActive = 0
labelUDA = ""
totalDronesActive = 0
labelTDA = ""
usedFighterTubes = 0
labelUFT = ""
totalFighterTubes = 0
labelTFT = ""
usedCalibrationPoints = 0
labelUCP = ""
totalCalibrationPoints = 0
labelTCP = ""
for labelName, value, prec, lowest, highest in stats:
label = getattr(self, labelName % panel)
value = value() if fit is not None else 0
value = value if value is not None else 0
if labelName % panel == "label%sUsedTurretHardpoints" % panel:
usedTurretHardpoints = value
labelUTH = label
elif labelName % panel == "label%sTotalTurretHardpoints" % panel:
totalTurretHardpoints = value
labelTTH = label
elif labelName % panel == "label%sUsedLauncherHardpoints" % panel:
usedLauncherHardpoints = value
labelULH = label
elif labelName % panel == "label%sTotalLauncherHardpoints" % panel:
totalLauncherHardPoints = value
labelTLH = label
elif labelName % panel == "label%sUsedDronesActive" % panel:
usedDronesActive = value
labelUDA = label
elif labelName % panel == "label%sTotalDronesActive" % panel:
totalDronesActive = value
labelTDA = label
elif labelName % panel == "label%sUsedFighterTubes" % panel:
usedFighterTubes = value
labelUFT = label
elif labelName % panel == "label%sTotalFighterTubes" % panel:
totalFighterTubes = value
labelTFT = label
elif labelName % panel == "label%sUsedCalibrationPoints" % panel:
usedCalibrationPoints = value
labelUCP = label
elif labelName % panel == "label%sTotalCalibrationPoints" % panel:
totalCalibrationPoints = value
labelTCP = label
if isinstance(value, str):
label.SetLabel(value)
label.SetToolTip(wx.ToolTip(value))
else:
label.SetLabel(formatAmount(value, prec, lowest, highest))
label.SetToolTip(wx.ToolTip("%.1f" % value))
colorWarn = wx.Colour(204, 51, 51)
colorNormal = wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOWTEXT)
if usedTurretHardpoints > totalTurretHardpoints:
colorT = colorWarn
else:
colorT = colorNormal
if usedLauncherHardpoints > totalLauncherHardPoints:
colorL = colorWarn
else:
colorL = colorNormal
if usedDronesActive > totalDronesActive:
colorD = colorWarn
else:
colorD = colorNormal
if usedFighterTubes > totalFighterTubes:
colorF = colorWarn
else:
colorF = colorNormal
if usedCalibrationPoints > totalCalibrationPoints:
colorC = colorWarn
else:
colorC = colorNormal
labelUTH.SetForegroundColour(colorT)
labelTTH.SetForegroundColour(colorT)
labelULH.SetForegroundColour(colorL)
labelTLH.SetForegroundColour(colorL)
labelUDA.SetForegroundColour(colorD)
labelTDA.SetForegroundColour(colorD)
labelUFT.SetForegroundColour(colorF)
labelTFT.SetForegroundColour(colorF)
labelUCP.SetForegroundColour(colorC)
labelTCP.SetForegroundColour(colorC)
if fit is not None:
resMax = (
lambda: fit.ship.getModifiedItemAttr("cpuOutput"),
lambda: fit.ship.getModifiedItemAttr("powerOutput"),
lambda: fit.ship.getModifiedItemAttr("droneCapacity"),
lambda: fit.ship.getModifiedItemAttr("fighterCapacity"),
lambda: fit.ship.getModifiedItemAttr("droneBandwidth"),
lambda: fit.ship.getModifiedItemAttr("capacity"),
)
else:
resMax = None
i = 0
for resourceType in ("cpu", "pg", "droneBay", "fighterBay", "droneBandwidth", "cargoBay"):
if fit is not None:
capitalizedType = resourceType[0].capitalize() + resourceType[1:]
gauge = getattr(self, "gauge%s%s" % (panel, capitalizedType))
resUsed = getattr(fit, "%sUsed" % resourceType)
gauge.SetValueRange(resUsed or 0, resMax[i]() or 0)
i += 1
else:
capitalizedType = resourceType[0].capitalize() + resourceType[1:]
gauge = getattr(self, "gauge%s%s" % (panel, capitalizedType))
gauge.SetValueRange(0, 0)
i += 1
self.panel.Layout()
self.headerPanel.Layout()
ResourcesViewFull.register()
| bsmr-eve/Pyfa | gui/builtinStatsViews/resourcesViewFull.py | Python | gpl-3.0 | 15,284 | 0.002748 |
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.forms.models import modelformset_factory
from worldcup.common.msg_util import *
from worldcup.common.user_util import *
from worldcup.matches.models import *
from worldcup.predictions.models import *
from worldcup.predictions.forms import *
from worldcup.teams.models import get_team_not_determined
from worldcup.predictions.standings import get_current_standings
from datetime import datetime
def view_prediction_list(request):
"""
"Landing Page" with links to forms and some stats
"""
if not request.user.is_authenticated():
return view_auth_page(request)
lu = get_username(request)
lu.update({'match_types' :MatchType.objects.all()
, 'number_predictions' : Prediction.objects.all().count()
, 'number_users' : Prediction.objects.values('user').distinct().count()
, 'standings' : get_current_standings()
, 'num_matches_played' : Match.objects.filter(score_recorded=True).count()
})
return render_to_response('predictions/prediction_home.html', lu, context_instance=RequestContext(request) )
def get_new_prediction(user, match_type, match):
if user is None or match_type is None:
return None
if match_type.name == MATCH_TYPE_KNOCKOUT_STAGE:
p = PredictionStage2(user=user
, match=match
, team1=get_team_not_determined()
, team2=get_team_not_determined()
)
else:
p = Prediction(user=user, match=m)
p.save()
return p
def get_users_predictions(request, user, match_type):
"""For a given user and match type, return Prediction objects.
If they don't exist, create them."""
if user is None or match_type is None:
return None
#msgt('get_users_predictions')
#msg('match_type: [%s]' % match_type)
# minimal check, either user has no predictions or all of them
num_matches = Match.objects.filter(match_type=match_type).count()
if match_type.name == MATCH_TYPE_KNOCKOUT_STAGE:
PredictionObj = eval('PredictionStage2')
else:
PredictionObj = eval('Prediction')
# get user's predictions for this match type
qset = PredictionObj.objects.filter(user=user, match__match_type=match_type)
#msg(qset)
if qset.count() == 0:
#
# need to create Predictions for this user
#msg('zero qset')
for m in Match.objects.filter(match_type=match_type):
get_new_prediction(user, match_type, m)
#p = PredictionObj(user=user, match=m)
#p.save()
return PredictionObj.objects.filter(user=user, match__match_type=match_type)
elif qset.count() == num_matches:
#
# correct number of Predictions
#msg('matched: %s' % num_matches)
return qset
else:
#
# wrong number of predictions, create new ones
#msg('wrong number of Predictions [%s]'% qset)
for m in Match.objects.filter(match_type=match_type):
#msg('match: %s' % m)
if PredictionObj.objects.filter(user=user, match=m).count() > 0:
pass
else:
get_new_prediction(user, match_type, m)
return PredictionObj.objects.filter(user=request.user, match__match_type=match_type)
#msg('wrong number of Predictions: %s' % qset.count())
#assert(False, "wrong number of Predictions")
#return None
def view_prediction_saved_success(request, match_type_slug):
if not request.user.is_authenticated():
return view_auth_page(request)
lu = get_username(request)
try:
match_type = MatchType.objects.get(slug=match_type_slug)
lu.update({ 'match_type' : match_type })
except MatchType.DoesNotExist:
lu.update({ 'Err_found':True
, 'MatchType_not_Found': True})
return render_to_response('predictions/add_prediction_success.html', lu, context_instance=RequestContext(request))
qset = get_users_predictions(request, request.user, match_type)
lu.update({ 'predictions':qset })
return render_to_response('predictions/add_prediction_success.html', lu, context_instance=RequestContext(request) )
def view_prediction_form2(request, match_type_slug):
"""
Prediction form for the group stage
"""
if not request.user.is_authenticated():
return view_auth_page(request)
lu = get_username(request)
try:
match_type = MatchType.objects.get(slug=match_type_slug)
lu.update({ 'match_type' : match_type })
except MatchType.DoesNotExist:
lu.update({ 'Err_found':True
, 'MatchType_not_Found': True})
return render_to_response('predictions/add_prediction.html', lu, context_instance=RequestContext(request) )
# Is it too late to make a prediction?
#
if datetime.now() > match_type.last_day_to_predict:
lu.update({ 'Err_found':True
, 'Too_late_to_predict': True})
return render_to_response('predictions/add_prediction.html', lu, context_instance=RequestContext(request) )
lu.update({ 'user' : request.user })
PredictionFormSet = modelformset_factory(Prediction, form=PredictionForm, extra=0)
qset = get_users_predictions(request, request.user, match_type)
if request.method == 'POST':
#deal with posting the data
formset = PredictionFormSet(request.POST, queryset=qset)
if formset.is_valid():
formset.save()
redirect_url = reverse('view_prediction_saved_success'
, kwargs={ 'match_type_slug':match_type.slug })
return HttpResponseRedirect(redirect_url)
#else:
# msg(formset.errors)
else:
formset = PredictionFormSet(queryset=qset)
lu.update({ 'formset':formset })
return render_to_response('predictions/add_prediction.html', lu, context_instance=RequestContext(request) )
| raprasad/worldcup | worldcup/worldcup/predictions/views.py | Python | mit | 6,415 | 0.018083 |
import recurly
class RecurlyError(Exception):
@classmethod
def error_from_status(cls, status):
return recurly.errors.ERROR_MAP.get(status, "")
class ApiError(RecurlyError):
def __init__(self, message, error):
super(ApiError, self).__init__(message)
self.error = error
class NetworkError(RecurlyError):
pass
| recurly/recurly-client-python | recurly/base_errors.py | Python | mit | 353 | 0 |
'''
Contains physical constants used in snow modeling.
@var a_gravity: Gravitational acceleration [m s-2]
@var eta0: Viscosity of snow at T=0C and density=0 [N s m- 2 = kg m-1 s-1]
@var rho_air: Density of air [kg m-3], dry air at 0 C and 100 kPa
@var rho_water: Density of water [kg m-3]
@var rho_ice: Density of ice [kg m-3]
@var k_ice0: Thermal conductivity of ice [W m-1 K-1] at 0 C
@var k_ice10: Thermal conductivity of ice [W m-1 K-1] at -10 C
@var secperday: Seconds per day [s]
@var boltzmann: Boltzmann constant [J K-1].
The Boltzmann constant (k or kB) is the physical constant relating energy
at the particle level with temperature observed at the bulk level.
It is the gas constant R divided by the Avogadro constant NA: k = \frac{R}{N_{\rm A}}\,
It has the same units as entropy.
@var boltzmann_eV: Boltzmann constant [eV K-1]
@author: kmu
@since: 25. mai 2010
'''
# gravitational acceleration [m s-2]
a_gravity = 9.81
# viscosity of snow at T=0C and density=0 [N s m- 2= kg m-1 s-1]
eta0 = 3.6e6
# Density of air [kg m-3], dry air at 0 C and 100 kPa
rho_air = 1.2754
# Density of water [kg m-3]
rho_water = 1000.0
# Density of ice [kg m-3]
rho_ice = 916.0
# Thermal conductivity of ice [W m-1 K-1]
k_ice0 = 2.22 # at 0 C
k_ice10 = 2.30 # at -10 C
# Seconds per day [s]
secperday = 86400.0
# Boltzmann constant [J K-1]
# The Boltzmann constant (k or kB) is the physical constant relating energy
# at the particle level with temperature observed at the bulk level.
# It is the gas constant R divided by the Avogadro constant NA:
# k = \frac{R}{N_{\rm A}}\,
# It has the same units as entropy.
boltzmann = 1.380650424e-23
boltzmann_eV = 8.61734315e-5 # [eV K-1]
# Stefan-Boltzmann constant [W m-2 K-4]
stefan_boltzmann = 5.67040004e-8 | kmunve/pysenorge | pysenorge/constants.py | Python | gpl-3.0 | 1,844 | 0.005965 |
# Copyright 2014-2020 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Benchmarks which test JSON serialization and deserialization with various json libraries.
"""
from __future__ import absolute_import
import json
import six
import pytest
from scalyr_agent.util import set_json_lib
from scalyr_agent.util import get_json_lib
from scalyr_agent.util import json_encode
from scalyr_agent.util import json_decode
import scalyr_agent.util
from .utils import generate_random_dict
from .utils import read_bytes_from_log_fixture_file
# We cache some data to avoid loading it for each test. Keep in mind that actual "setup" / loading
# phase is not included in the actual benchmarking timing data.
CACHED_TEST_DATA = {
"encode": {},
"decode": {},
} # type: dict
@pytest.mark.parametrize("sort_keys", [False, True], ids=["no_sort_keys", "sort_keys"])
@pytest.mark.parametrize("keys_count", [10, 100, 1000])
@pytest.mark.parametrize("json_lib", ["json", "ujson", "orjson"])
@pytest.mark.benchmark(group="json_encode")
def test_json_encode_with_custom_options(benchmark, json_lib, keys_count, sort_keys):
# NOTE: orjson doesn't support sort_keys=True
if json_lib == "orjson":
if not six.PY3:
pytest.skip(
"Skipping under Python 2, orjson is only available for Python 3"
)
elif sort_keys is True:
pytest.skip("orjson doesn't support sort_keys=True")
set_json_lib(json_lib)
scalyr_agent.util.SORT_KEYS = sort_keys
data = generate_random_dict(keys_count=keys_count)
def run_benchmark():
return json_encode(data)
result = benchmark.pedantic(run_benchmark, iterations=50, rounds=100)
assert get_json_lib() == json_lib
assert scalyr_agent.util.SORT_KEYS == sort_keys
assert isinstance(result, six.text_type)
assert json_decode(result) == data
# fmt: off
@pytest.mark.parametrize("log_tuple",
[
("agent_debug_5_mb.log", 3 * 1024),
("agent_debug_5_mb.log", 500 * 1024),
],
ids=[
"agent_debug_log_3k",
"agent_debug_log_500k",
],
)
# fmt: on
@pytest.mark.parametrize("json_lib", ["json", "ujson", "orjson"])
@pytest.mark.benchmark(group="json_encode")
def test_json_encode(benchmark, json_lib, log_tuple):
if not six.PY3 and json_lib == "orjson":
pytest.skip("Skipping under Python 2, orjson is only available for Python 3")
return
_test_json_encode(benchmark, json_lib, log_tuple)
# fmt: off
@pytest.mark.parametrize("log_tuple",
[
("json_log_5_mb.log", 3 * 1024),
("json_log_5_mb.log", 500 * 1024),
],
ids=[
"json_log_3k",
"json_log_500k",
],
)
# fmt: on
@pytest.mark.parametrize("json_lib", ["json", "ujson", "orjson"])
@pytest.mark.benchmark(group="json_decode")
def test_json_decode(benchmark, json_lib, log_tuple):
if not six.PY3 and json_lib == "orjson":
pytest.skip("Skipping under Python 2, orjson is only available for Python 3")
return
_test_json_decode(benchmark, json_lib, log_tuple)
def _test_json_encode(benchmark, json_lib, log_tuple):
"""
:param json_lib: JSON library to use.
:param log_tuple: Tuple with (log_filename, log_bytes_to_use).
"""
set_json_lib(json_lib)
file_name, bytes_to_read = log_tuple
if log_tuple not in CACHED_TEST_DATA["encode"]:
data = read_bytes_from_log_fixture_file(file_name, bytes_to_read)
data = six.ensure_text(data)
CACHED_TEST_DATA["encode"][log_tuple] = data
data = CACHED_TEST_DATA["encode"][log_tuple]
def run_benchmark():
return json_encode(data)
result = benchmark.pedantic(run_benchmark, iterations=20, rounds=50)
assert get_json_lib() == json_lib
assert isinstance(result, six.text_type)
# assert json.dumps(data) == result
def _test_json_decode(benchmark, json_lib, log_tuple):
"""
:param json_lib: JSON library to use.
:param log_tuple: Tuple with (log_filename, log_bytes_to_use).
"""
set_json_lib(json_lib)
file_name, bytes_to_read = log_tuple
if log_tuple not in CACHED_TEST_DATA["decode"]:
data = read_bytes_from_log_fixture_file(file_name, bytes_to_read).strip()
obj = {"lines": []}
for line in data.split(b"\n"):
line_decoded = json.loads(six.ensure_text(line))
obj["lines"].append(line_decoded)
data = json.dumps(obj)
CACHED_TEST_DATA["decode"][log_tuple] = six.ensure_text(data)
data = CACHED_TEST_DATA["decode"][log_tuple]
def run_benchmark():
return json_decode(data)
result = benchmark.pedantic(run_benchmark, iterations=20, rounds=50)
assert get_json_lib() == json_lib
assert isinstance(result, dict)
# assert json.loads(result) == data
| imron/scalyr-agent-2 | benchmarks/micro/test_json_serialization.py | Python | apache-2.0 | 5,318 | 0.00188 |
""".. Ignore pydocstyle D400.
==================
Storage Management
==================
.. automodule:: resolwe.storage.management.commands.run_storage_manager
:members:
"""
| genialis/resolwe | resolwe/storage/management/__init__.py | Python | apache-2.0 | 180 | 0 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyLrudict(PythonPackage):
""" A fast LRU cache"""
homepage = "https://github.com/amitdev/lru-dict"
url = "https://pypi.io/packages/source/l/lru-dict/lru-dict-1.1.6.tar.gz"
version('1.1.6', 'b33f54f1257ab541f4df4bacc7509f5a')
depends_on('python@2.7:')
depends_on('py-setuptools', type=('build'))
| krafczyk/spack | var/spack/repos/builtin/packages/py-lrudict/package.py | Python | lgpl-2.1 | 1,591 | 0.001257 |
"""Sections variable used for grouping Gjoneska 2015 GO IDs."""
__copyright__ = "Copyright (C) 2016-2018, DV Klopfenstein, H Tang. All rights reserved."
__author__ = "DV Klopfenstein"
SECTIONS = [ # 18 sections
("immune", [ # 15 GO-headers
"GO:0002376", # BP 564 L01 D01 M immune system process
"GO:0002682", # BP 1,183 L02 D02 AB regulation of immune system process
"GO:0030155", # BP 246 L02 D02 AB regulation of cell adhesion
"GO:0006955", # BP 100 L02 D02 GM immune response
"GO:0001817", # BP 476 L03 D03 AB regulation of cytokine production
"GO:0001775", # BP 162 L03 D03 CD cell activation
"GO:0001816", # BP 110 L03 D03 DK cytokine production
"GO:1903037", # BP 155 L04 D04 AB regulation of leukocyte cell-cell adhesion
"GO:0034097", # BP 59 L04 D04 G response to cytokine
"GO:0006954", # BP 25 L04 D04 G inflammatory response
"GO:0045087", # BP 25 L03 D04 GM innate immune response
"GO:0002521", # BP 72 L05 D05 CDF leukocyte differentiation
"GO:0007229", # BP 0 L05 D05 AB integrin-mediated signaling pathway
"GO:0050900", # BP 57 L02 D06 CDMN leukocyte migration
"GO:0042130", # BP 9 L07 D08 AB negative regulation of T cell proliferation
#"GO:0002252", # BP 138 L02 D02 L immune effector process
]),
("viral/bacteria", [ # 4 GO-headers
"GO:0016032", # BP 301 L03 D04 CJ viral process
"GO:0050792", # BP 119 L03 D04 AB regulation of viral process
"GO:0098542", # BP 37 L03 D05 GJ defense response to other organism
"GO:0009617", # BP 12 L03 D05 GJ response to bacterium
]),
("neuro", [ # 25 GO-headers
"GO:0099531", # BP 32 L01 D01 U presynaptic process in chemical synaptic Xmission
"GO:0042391", # BP 117 L03 D03 A regulation of membrane potential
"GO:0050877", # BP 96 L03 D03 K neurological system process
"GO:0050808", # BP 20 L03 D03 CDI synapse organization
"GO:0007272", # BP 13 L03 D03 CD ensheathment of neurons
"GO:0051960", # BP 236 L04 D04 AB regulation of nervous system development
"GO:0050804", # BP 120 L03 D04 AB modulation of synaptic transmission
"GO:0097485", # BP 34 L04 D04 CD neuron projection guidance
"GO:0031644", # BP 30 L04 D04 AB regulation of neurological system process
"GO:0031175", # BP 14 L04 D04 CDI neuron projection development
"GO:0035418", # BP 14 L04 D04 H protein localization to synapse
"GO:0007399", # BP 0 L04 D04 F nervous system development
"GO:0050767", # BP 192 L05 D05 AB regulation of neurogenesis
"GO:0030182", # BP 71 L05 D05 CDF neuron differentiation
"GO:0099536", # BP 40 L04 D05 CDR synaptic signaling
"GO:0048666", # BP 29 L04 D05 CDF neuron development
"GO:0010001", # BP 17 L05 D05 CDF glial cell differentiation
"GO:0051969", # BP 5 L03 D05 AB regulation of transmission of nerve impulse
"GO:0022008", # BP 3 L05 D05 CDF neurogenesis
"GO:0007158", # BP 0 L04 D05 DP neuron cell-cell adhesion
"GO:0014002", # BP 1 L05 D06 CDF astrocyte development
"GO:0048812", # BP 27 L05 D07 CDFI neuron projection morphogenesis
"GO:0048667", # BP 6 L06 D07 CDFI cell morphogenesis involved in neuron differen.
"GO:0072578", # BP 5 L05 D07 CDHI neurotransmitter-gated ion channel clustering
"GO:0007409", # BP 23 L06 D08 CDFI axonogenesis
]),
("cell death", [ # 6 GO-headers
"GO:0010941", # BP 316 L03 D03 AB regulation of cell death
"GO:0008219", # BP 104 L03 D03 CD cell death
"GO:0060548", # BP 103 L04 D04 AB negative regulation of cell death
"GO:0097190", # BP 22 L04 D04 AB apoptotic signaling pathway
"GO:0097527", # BP 0 L04 D04 AB necroptotic signaling pathway
"GO:0008637", # BP 7 L05 D05 CI apoptotic mitochondrial changes
]),
("lipid", [ # 7 GO-headers
"GO:0006629", # BP 623 L03 D03 DE lipid metabolic process
"GO:0019216", # BP 243 L04 D04 AB regulation of lipid metabolic process
"GO:0032368", # BP 130 L04 D04 AB regulation of lipid transport
"GO:0033993", # BP 112 L04 D04 G response to lipid
"GO:0006869", # BP 93 L04 D05 DH lipid transport
"GO:0055088", # BP 10 L05 D05 A lipid homeostasis
"GO:0042158", # BP 3 L05 D06 CE lipoprotein biosynthetic process
]),
("adhesion", [ # 3 GO-headers
"GO:0022610", # BP 194 L01 D01 P biological adhesion
"GO:0030155", # BP 246 L02 D02 AB regulation of cell adhesion
"GO:0007155", # BP 165 L02 D02 P cell adhesion
]),
("cell cycle", [ # 9 GO-headers
"GO:0022402", # BP 463 L02 D02 C cell cycle process
"GO:0022403", # BP 46 L02 D02 S cell cycle phase
"GO:0051726", # BP 411 L03 D03 AB regulation of cell cycle
"GO:0051301", # BP 54 L03 D03 CD cell division
"GO:0007049", # BP 12 L03 D03 CD cell cycle
"GO:0070192", # BP 17 L03 D05 CIL chromosome organization in meiotic cell cycle
"GO:0007051", # BP 19 L03 D06 CDI spindle organization
"GO:0007067", # BP 1 L04 D06 CI mitotic nuclear division
"GO:0030071", # BP 11 L06 D09 AB regulation of mitotic metaphase/anaphase transition
]),
("chromosome", [ # 9 GO-headers
"GO:0032259", # BP 119 L02 D02 E methylation
"GO:0051983", # BP 108 L03 D03 AB regulation of chromosome segregation
"GO:0007059", # BP 11 L03 D03 CD chromosome segregation
"GO:0006325", # BP 184 L04 D04 CI chromatin organization
"GO:0051276", # BP 107 L04 D04 CI chromosome organization
"GO:0032204", # BP 29 L03 D06 AB regulation of telomere maintenance
"GO:0034502", # BP 21 L06 D06 H protein localization to chromosome
"GO:0031497", # BP 11 L05 D06 CI chromatin assembly
"GO:0006334", # BP 3 L06 D07 CI nucleosome assembly
]),
("development", [ # 10 GO-headers
"GO:0032502", # BP 3,173 L01 D01 F developmental process
"GO:0022414", # BP 847 L01 D01 L reproductive process
"GO:0050793", # BP 1,881 L02 D02 AB regulation of developmental process
"GO:0048856", # BP 1,016 L02 D02 F anatomical structure development
"GO:0048646", # BP 331 L02 D02 F anatomical structure formation in morphogenesis
"GO:0007568", # BP 18 L03 D03 DF aging
"GO:0022604", # BP 129 L04 D04 AB regulation of cell morphogenesis
"GO:0000902", # BP 65 L04 D05 CDFI cell morphogenesis
"GO:0045765", # BP 14 L04 D05 AB regulation of angiogenesis
]),
("extracellular matrix", [ # 1 GO-headers
"GO:0030198", # BP 27 L04 D04 CDI extracellular matrix organization
]),
("ion", [ # 3 GO-headers
"GO:0006811", # BP 422 L04 D04 H ion transport
"GO:0055085", # BP 330 L04 D04 H transmembrane transport
"GO:0006874", # BP 33 L08 D09 ACD cellular calcium ion homeostasis
]),
("localization", [ # 3 GO-headers
"GO:0051179", # BP 2,142 L01 D01 H localization
"GO:0040011", # BP 394 L01 D01 N locomotion
"GO:0032879", # BP 1,682 L02 D02 AB regulation of localization
]),
("membrane", [ # 1 GO-headers
"GO:0061024", # BP 273 L03 D03 CI membrane organization
]),
("metabolic", [ # 7 GO-headers
"GO:0008152", # BP 6,418 L01 D01 E metabolic process
"GO:0019222", # BP 3,243 L02 D02 AB regulation of metabolic process
"GO:0009056", # BP 1,369 L02 D02 E catabolic process
"GO:0044281", # BP 2,139 L03 D03 DE small molecule metabolic process
"GO:0050790", # BP 620 L03 D03 A regulation of catalytic activity
"GO:0051186", # BP 373 L03 D03 CE cofactor metabolic process
"GO:0006259", # BP 300 L04 D06 CE DNA metabolic process
]),
("phosphorylation", [ # 3 GO-headers
"GO:0006793", # BP 798 L03 D03 CE phosphorus metabolic process
"GO:0016310", # BP 138 L05 D05 CE phosphorylation
"GO:0006468", # BP 97 L06 D07 CE protein phosphorylation
]),
("signaling", [ # 4 GO-headers
"GO:0023052", # BP 116 L01 D01 R signaling
"GO:0023051", # BP 1,364 L02 D02 AB regulation of signaling
"GO:0007165", # BP 717 L03 D03 AB signal transduction
"GO:0007267", # BP 99 L03 D04 CDR cell-cell signaling
]),
("stimulus", [ # 4 GO-headers
"GO:0050896", # BP 2,218 L01 D01 G response to stimulus
"GO:0048583", # BP 2,377 L02 D02 AB regulation of response to stimulus
"GO:0006950", # BP 492 L02 D02 G response to stress
"GO:0080134", # BP 940 L03 D03 AB regulation of response to stress
]),
("prolif_differ", [ # 3 GO-headers
"GO:0008283", # BP 158 L02 D02 D cell proliferation
"GO:0030154", # BP 494 L04 D04 CDF cell differentiation
"GO:0045595", # BP 828 L03 D03 AB regulation of cell differentiation
"GO:0042127", # BP 268 L03 D03 AB regulation of cell proliferation
]),
]
# Copyright (C) 2016-2018, DV Klopfenstein, H Tang. All rights reserved.
| tanghaibao/goatools | goatools/test_data/sections/gjoneska_pfenning.py | Python | bsd-2-clause | 9,848 | 0.017465 |
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.biz/status_available
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisBizStatusAvailable(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.biz/status_available.txt"
host = "whois.biz"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, None)
def test_available(self):
eq_(self.record.available, True)
def test_domain(self):
eq_(self.record.domain, "u34jedzcq.biz")
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(self.record.nameservers, [])
def test_admin_contacts(self):
eq_(self.record.admin_contacts.__class__.__name__, 'list')
eq_(self.record.admin_contacts, [])
def test_registered(self):
eq_(self.record.registered, False)
def test_created_on(self):
eq_(self.record.created_on, None)
def test_registrar(self):
eq_(self.record.registrar, None)
def test_registrant_contacts(self):
eq_(self.record.registrant_contacts.__class__.__name__, 'list')
eq_(self.record.registrant_contacts, [])
def test_technical_contacts(self):
eq_(self.record.technical_contacts.__class__.__name__, 'list')
eq_(self.record.technical_contacts, [])
def test_updated_on(self):
eq_(self.record.updated_on, None)
def test_domain_id(self):
eq_(self.record.domain_id, None)
def test_expires_on(self):
eq_(self.record.expires_on, None)
| huyphan/pyyawhois | test/record/parser/test_response_whois_biz_status_available.py | Python | mit | 1,928 | 0.002593 |
#!/usr/bin/env python3
# See [1] https://pubs.acs.org/doi/pdf/10.1021/j100247a015
# Banerjee, 1985
# [2] https://aip.scitation.org/doi/abs/10.1063/1.2104507
# Heyden, 2005
# [3] https://onlinelibrary.wiley.com/doi/abs/10.1002/jcc.540070402
# Baker, 1985
# [4] 10.1007/s002140050387
# Bofill, 1998, Restricted-Step-RFO
# [5] https://link.springer.com/article/10.1007/s00214-016-1847-3
# Birkholz, 2016
import numpy as np
from pysisyphus.optimizers.HessianOptimizer import HessianOptimizer
class RSRFOptimizer(HessianOptimizer):
"""Optimizer to find first-order saddle points."""
rfo_dict = {
"min": (0, "min"),
"max": (-1, "max"),
}
def __init__(self, geometry, max_micro_cycles=50, **kwargs):
super().__init__(geometry, **kwargs)
self.max_micro_cycles = int(max_micro_cycles)
assert max_micro_cycles >= 1
self.alpha0 = 1
self.alpha_max = 1e8
def solve_rfo(self, rfo_mat, kind="min"):
# So if I use eig instead of eigh here it even works ...
# my bad, ahhh! The unscaled RFO matrix may be symmetric,
# but the scaled ones aren't anymore.
eigenvalues, eigenvectors = np.linalg.eig(rfo_mat)
eigenvalues = eigenvalues.real
eigenvectors = eigenvectors.real
sorted_inds = np.argsort(eigenvalues)
# Depending on wether we want to minimize (maximize) along
# the mode(s) in the rfo mat we have to select the smallest
# (biggest) eigenvalue and corresponding eigenvector.
first_or_last, verbose = self.rfo_dict[kind]
ind = sorted_inds[first_or_last]
# Given sorted eigenvalue-indices (sorted_inds) use the first
# (smallest eigenvalue) or the last (largest eigenvalue) index.
step_nu = eigenvectors.T[ind]
nu = step_nu[-1]
self.log(f"nu_{verbose}={nu:.4e}")
# Scale eigenvector so that its last element equals 1. The
# final is step is the scaled eigenvector without the last element.
step = step_nu[:-1] / nu
eigval = eigenvalues[ind]
self.log(f"eigenvalue_{verbose}={eigval:.4e}")
return step, eigval, nu
def optimize(self):
forces = self.geometry.forces
self.forces.append(forces)
self.energies.append(self.geometry.energy)
if self.cur_cycle > 0:
self.update_trust_radius()
self.update_hessian()
H = self.H
if self.geometry.internal:
H = self.geometry.internal.project_hessian(self.H)
eigvals, eigvecs = np.linalg.eigh(H)
# Transform to eigensystem of hessian
forces_trans = eigvecs.T.dot(forces)
# Minimize energy along all modes
min_mat = np.asarray(np.bmat((
(np.diag(eigvals), -forces_trans[:,None]),
(-forces_trans[None,:], [[0]])
)))
alpha = self.alpha0
min_diag_indices = np.diag_indices(eigvals.size)
for mu in range(self.max_micro_cycles):
assert alpha > 0, "alpha should not be negative"
self.log(f"RS-RFO micro cycle {mu:02d}, alpha={alpha:.6f}")
# We only have to update one eigenvalue
min_mat_scaled = min_mat.copy()
min_mat_scaled[min_diag_indices] /= alpha
min_mat_scaled[:-1,-1] /= alpha
rfo_step, eigval_min, nu_min = self.solve_rfo(min_mat_scaled, "min")
# As of Eq. (8a) of [4] max_eigval and min_eigval also
# correspond to:
# eigval_min_ = -forces_trans.dot(rfo_step)
# np.testing.assert_allclose(eigval_min, eigval_min_)
# Create the full PRFO step
rfo_norm = np.linalg.norm(rfo_step)
self.log(f"rfo_norm={rfo_norm:.6f}")
inside_trust = rfo_norm < self.trust_radius + 1e-3
if inside_trust:
self.log("step is inside trust radius. breaking.")
break
elif alpha > self.alpha_max:
print("alpha > alpha_max. breaking.")
break
# Derivative of the squared step w.r.t. alpha
tval = 2*eigval_min/(1+rfo_norm**2 * alpha)
numer = forces_trans**2
denom = (eigvals - eigval_min * alpha)**3
quot = np.sum(numer / denom)
self.log(f"quot={quot:.6f}")
dstep2_dalpha = (2*eigval_min/(1+rfo_norm**2 * alpha)
* np.sum(forces_trans**2
/ ((eigvals - eigval_min * alpha)**3)
)
)
self.log(f"analytic deriv.={dstep2_dalpha:.6f}")
# Update alpha
alpha_step = (2*(self.trust_radius*rfo_norm - rfo_norm**2)
/ dstep2_dalpha
)
self.log(f"alpha_step={alpha_step:.4f}")
alpha += alpha_step
self.log("")
# Right now the step is still given in the Hessians eigensystem. We
# transform it back now.
step = eigvecs.dot(rfo_step)
step_norm = np.linalg.norm(step)
# This would correspond to "pure" RFO without the iterative
# step-restriction. Here we will just scale down the step, if it
# is too big.
if self.max_micro_cycles == 1 and step_norm > self.trust_radius:
self.log("Scaled down step")
step = step / step_norm * self.trust_radius
step_norm = np.linalg.norm(step)
self.log(f"norm(step)={np.linalg.norm(step):.6f}")
# Calculating the energy change from eigval_min and nu_min seems to give
# big problems.
# predicted_energy_change = 1/2 * eigval_min / nu_min**2
predicted_change = step.dot(-forces) + 0.5 * step.dot(self.H).dot(step)
self.predicted_energy_changes.append(predicted_change)
self.log("")
return step
| eljost/pysisyphus | deprecated/optimizers/RSRFOptimizer.py | Python | gpl-3.0 | 5,961 | 0.001342 |
from time import sleep
class TestPyTest:
def testOne(self):
sleep(1.5) # To check duration
assert 4 == 2*2
def testTwo(self):
assert True
def testThree():
assert 4 == 2*2
| paplorinc/intellij-community | python/testData/testRunner/env/pytest/test1.py | Python | apache-2.0 | 209 | 0.014354 |
"""Facebook platform for notify component."""
import json
import logging
from aiohttp.hdrs import CONTENT_TYPE
import requests
import voluptuous as vol
from homeassistant.const import CONTENT_TYPE_JSON
import homeassistant.helpers.config_validation as cv
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TARGET,
PLATFORM_SCHEMA,
BaseNotificationService,
)
_LOGGER = logging.getLogger(__name__)
CONF_PAGE_ACCESS_TOKEN = "page_access_token"
BASE_URL = "https://graph.facebook.com/v2.6/me/messages"
CREATE_BROADCAST_URL = "https://graph.facebook.com/v2.11/me/message_creatives"
SEND_BROADCAST_URL = "https://graph.facebook.com/v2.11/me/broadcast_messages"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_PAGE_ACCESS_TOKEN): cv.string}
)
def get_service(hass, config, discovery_info=None):
"""Get the Facebook notification service."""
return FacebookNotificationService(config[CONF_PAGE_ACCESS_TOKEN])
class FacebookNotificationService(BaseNotificationService):
"""Implementation of a notification service for the Facebook service."""
def __init__(self, access_token):
"""Initialize the service."""
self.page_access_token = access_token
def send_message(self, message="", **kwargs):
"""Send some message."""
payload = {"access_token": self.page_access_token}
targets = kwargs.get(ATTR_TARGET)
data = kwargs.get(ATTR_DATA)
body_message = {"text": message}
if data is not None:
body_message.update(data)
# Only one of text or attachment can be specified
if "attachment" in body_message:
body_message.pop("text")
if not targets:
_LOGGER.error("At least 1 target is required")
return
# broadcast message
if targets[0].lower() == "broadcast":
broadcast_create_body = {"messages": [body_message]}
_LOGGER.debug("Broadcast body %s : ", broadcast_create_body)
resp = requests.post(
CREATE_BROADCAST_URL,
data=json.dumps(broadcast_create_body),
params=payload,
headers={CONTENT_TYPE: CONTENT_TYPE_JSON},
timeout=10,
)
_LOGGER.debug("FB Messager broadcast id %s : ", resp.json())
# at this point we get broadcast id
broadcast_body = {
"message_creative_id": resp.json().get("message_creative_id"),
"notification_type": "REGULAR",
}
resp = requests.post(
SEND_BROADCAST_URL,
data=json.dumps(broadcast_body),
params=payload,
headers={CONTENT_TYPE: CONTENT_TYPE_JSON},
timeout=10,
)
if resp.status_code != 200:
log_error(resp)
# non-broadcast message
else:
for target in targets:
# If the target starts with a "+", it's a phone number,
# otherwise it's a user id.
if target.startswith("+"):
recipient = {"phone_number": target}
else:
recipient = {"id": target}
body = {"recipient": recipient, "message": body_message}
resp = requests.post(
BASE_URL,
data=json.dumps(body),
params=payload,
headers={CONTENT_TYPE: CONTENT_TYPE_JSON},
timeout=10,
)
if resp.status_code != 200:
log_error(resp)
def log_error(response):
"""Log error message."""
obj = response.json()
error_message = obj["error"]["message"]
error_code = obj["error"]["code"]
_LOGGER.error(
"Error %s : %s (Code %s)", response.status_code, error_message, error_code
)
| fbradyirl/home-assistant | homeassistant/components/facebook/notify.py | Python | apache-2.0 | 3,953 | 0.000253 |
from __future__ import unicode_literals
from djblets.webapi.errors import WebAPIError
class WebAPITokenGenerationError(Exception):
"""An error generating a Web API token."""
pass
#
# Standard error messages
#
UNSPECIFIED_DIFF_REVISION = WebAPIError(
200,
'Diff revision not specified.',
http_status=400) # 400 Bad Request
INVALID_DIFF_REVISION = WebAPIError(
201,
'Invalid diff revision.',
http_status=404) # 404 Not Found
INVALID_ACTION = WebAPIError(
202,
'Invalid action specified.',
http_status=400) # 400 Bad Request
INVALID_CHANGE_NUMBER = WebAPIError(
203,
'The commit ID specified could not be found.',
http_status=404) # 404 Not Found
CHANGE_NUMBER_IN_USE = WebAPIError(
204,
'The commit ID specified has already been used.',
http_status=409) # 409 Conflict
MISSING_REPOSITORY = WebAPIError(
205,
'There was no repository found at the specified path.',
http_status=400) # 400 Bad Request
INVALID_REPOSITORY = WebAPIError(
206,
'The repository path specified is not in the list of known repositories.',
http_status=400) # 400 Bad Request
REPO_FILE_NOT_FOUND = WebAPIError(
207,
'The file was not found in the repository.',
http_status=400) # 400 Bad Request
INVALID_USER = WebAPIError(
208,
'User does not exist.',
http_status=400) # 400 Bad Request
REPO_NOT_IMPLEMENTED = WebAPIError(
209,
'The specified repository is not able to perform this action.',
http_status=501) # 501 Not Implemented
REPO_INFO_ERROR = WebAPIError(
210,
'There was an error fetching extended information for this repository.',
http_status=500) # 500 Internal Server Error
NOTHING_TO_PUBLISH = WebAPIError(
211,
'You attempted to publish a review request without any modifications.',
http_status=400) # 400 Bad Request
EMPTY_CHANGESET = WebAPIError(
212,
'The commit ID specified represents an empty changeset.',
http_status=400) # 400 Bad Request
SERVER_CONFIG_ERROR = WebAPIError(
213,
'There was an error storing configuration on the server.',
http_status=500) # 500 Internal Server Error
BAD_HOST_KEY = WebAPIError(
214,
'The SSH key on the host does ot match the stored key.',
http_status=403) # 403 Forbidden
UNVERIFIED_HOST_KEY = WebAPIError(
215,
'The SSH key on the host is unverified.',
http_status=403) # 403 Forbidden
UNVERIFIED_HOST_CERT = WebAPIError(
216,
'The HTTPS certificate on the host is unverified.',
http_status=403) # 403 Forbidden
MISSING_USER_KEY = WebAPIError(
217,
'A public SSH key was requested, but no SSH key was available to send.',
http_status=403) # 403 Forbidden
REPO_AUTHENTICATION_ERROR = WebAPIError(
218,
'Unable to authenticate with the repository using the provided '
'credentials.',
http_status=403) # 403 Forbidden
DIFF_EMPTY = WebAPIError(
219,
'The specified diff file is empty.',
http_status=400) # 400 Bad Request
DIFF_TOO_BIG = WebAPIError(
220,
'The specified diff file is too large.',
http_status=400) # 400 Bad Request
FILE_RETRIEVAL_ERROR = WebAPIError(
221,
'There was an error fetching a source file.',
http_status=500) # 500 Internal Server Error
HOSTINGSVC_AUTH_ERROR = WebAPIError(
222,
'There was an error authorizing with a service.',
http_status=403) # 403 Forbidden
GROUP_ALREADY_EXISTS = WebAPIError(
223,
'A group with this name already exists.',
http_status=409) # 409 Conflict
DIFF_PARSE_ERROR = WebAPIError(
224,
'The specified diff file could not be parsed.',
http_status=400) # 400 Bad Request
PUBLISH_ERROR = WebAPIError(
225,
'An error occurred during publishing.',
http_status=500) # 500 Internal Server Error
USER_QUERY_ERROR = WebAPIError(
226,
'An error occurred querying the user list.',
http_status=500) # 500 Internal Server Error
COMMIT_ID_ALREADY_EXISTS = WebAPIError(
227,
'Review request with this commit ID already exists in the repository.',
http_status=409) # 409 Conflict
TOKEN_GENERATION_FAILED = WebAPIError(
228,
'There was an error generating the API token. Please try again.',
http_status=500) # 500 Internal Server Error.
REPOSITORY_ALREADY_EXISTS = WebAPIError(
229,
'A repository with this name already exists.',
http_status=409) # 409 Conflict
CLOSE_ERROR = WebAPIError(
230,
'An error occurred while closing the review request.',
http_status=500) # 500 Internal Server Error
REOPEN_ERROR = WebAPIError(
231,
'An error occurred while reopening the review request.',
http_status=500) # 500 Internal Server Error
| custode/reviewboard | reviewboard/webapi/errors.py | Python | mit | 4,753 | 0 |
from t_core.composer import Composer
from t_core.matcher import Matcher
from t_core.iterator import Iterator
from t_core.rewriter import Rewriter
from t_core.resolver import Resolver
class ARule(Composer):
'''
Applies the transformation on one match.
'''
def __init__(self, LHS, RHS):
'''
Applies the transformation on one match.
@param LHS: The pre-condition pattern (LHS + NACs).
@param RHS: The post-condition pattern (RHS).
'''
super(ARule, self).__init__()
self.M = Matcher(condition=LHS, max=1)
self.I = Iterator(max_iterations=1)
self.W = Rewriter(condition=RHS)
def packet_in(self, packet):
self.exception = None
self.is_success = False
# Match
packet = self.M.packet_in(packet)
if not self.M.is_success:
self.exception = self.M.exception
return packet
# Choose the only match
packet = self.I.packet_in(packet)
if not self.I.is_success:
self.exception = self.I.exception
return packet
# Rewrite
packet = self.W.packet_in(packet)
if not self.W.is_success:
self.exception = self.W.exception
return packet
# Output success packet
self.is_success = True
return packet
class ARule_r(ARule):
'''
Applies the transformation on one match.
'''
def __init__(self, LHS, RHS, external_matches_only=False, custom_resolution=lambda packet: False):
'''
Applies the transformation on one match.
@param LHS: The pre-condition pattern (LHS + NACs).
@param RHS: The post-condition pattern (RHS).
@param external_matches_only: Resolve conflicts ignoring the matches found in this ARule.
@param custom_resolution: Override the default resolution function.
'''
super(ARule_r, self).__init__()
self.R = Resolver(external_matches_only=external_matches_only,
custom_resolution=custom_resolution)
def packet_in(self, packet):
packet = super(ARule_r, self).packet_in(packet)
# is_success is True
if self.exception is None:
# Resolve any conflicts if necessary
packet = self.R.packet_in(packet)
if not self.R.is_success:
self.exception = self.R.exception
return packet
# Output success packet
else:
self.is_success = False
return packet
| levilucio/SyVOLT | t_core/tc_python/arule.py | Python | mit | 2,673 | 0.001871 |
# Copyright 2004-2012 Tom Rothamel <pytom@bishoujo.us>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import renpy.display
from renpy.display.render import render, Render, Matrix2D
# This file contains displayables that are image-like, because they take
# up a rectangular area of the screen, and do not respond to input.
class Solid(renpy.display.core.Displayable):
"""
:doc: disp_imagelike
A displayable that fills the area its assigned with `color`.
::
image white = Solid("#fff")
"""
def __init__(self, color, **properties):
super(Solid, self).__init__(**properties)
if color is not None:
self.color = renpy.easy.color(color)
else:
self.color = None
def visit(self):
return [ ]
def render(self, width, height, st, at):
color = self.color or self.style.color
rv = Render(width, height)
if color is None or width <= 0 or height <= 0:
return rv
SIZE = 10
if width < SIZE or height < SIZE:
tex = renpy.display.draw.solid_texture(width, height, color)
else:
tex = renpy.display.draw.solid_texture(SIZE, SIZE, color)
rv.forward = Matrix2D(1.0 * SIZE / width, 0, 0, 1.0 * SIZE / height)
rv.reverse = Matrix2D(1.0 * width / SIZE, 0, 0, 1.0 * height / SIZE)
rv.blit(tex, (0, 0))
return rv
class Frame(renpy.display.core.Displayable):
"""
:doc: disp_imagelike
:args: (image, xborder, yborder, tile=False, **properties)
A displayable that resizes an image to fill the available area,
while preserving the width and height of its borders. is often
used as the background of a window or button.
.. figure:: frame_example.png
Using a frame to resize an image to double its size.
`image`
An image manipulator that will be resized by this frame.
`left`
The size of the border on the left side.
`top`
The size of the border on the top.
`right`
The size of the border on the right side. If None, defaults
to `left`.
`bottom`
The side of the border on the bottom. If None, defaults to `top`.
`tile`
If true, tiling is used to resize sections of the image,
rather than scaling.
::
# Resize the background of the text window if it's too small.
init python:
style.window.background = Frame("frame.png", 10, 10)
"""
__version__ = 1
def after_upgrade(self, version):
if version < 2:
self.left = self.xborder
self.right = self.xborder
self.top = self.yborder
self.bottom = self.yborder
def __init__(self, image, left, top, right=None, bottom=None, bilinear=True, tile=False, **properties):
super(Frame, self).__init__(**properties)
self.image = renpy.easy.displayable(image)
self.tile = tile
if right is None:
right = left
if bottom is None:
bottom = top
self.left = left
self.top = top
self.right = right
self.bottom = bottom
def render(self, width, height, st, at):
crend = render(self.image, width, height, st, at)
sw, sh = crend.get_size()
sw = int(sw)
sh = int(sh)
dw = int(width)
dh = int(height)
bw = self.left + self.right
bh = self.top + self.bottom
xborder = min(bw, sw - 2, dw)
if xborder:
left = self.left * xborder / bw
right = self.right * xborder / bw
else:
left = 0
right = 0
yborder = min(bh, sh - 2, dh)
if yborder:
top = self.top * yborder / bh
bottom = self.bottom * yborder / bh
else:
top = 0
bottom = 0
if renpy.display.draw.info["renderer"] == "sw":
return self.sw_render(crend, dw, dh, left, top, right, bottom)
def draw(x0, x1, y0, y1):
# Compute the coordinates of the left, right, top, and
# bottom sides of the region, for both the source and
# destination surfaces.
# left side.
if x0 >= 0:
dx0 = x0
sx0 = x0
else:
dx0 = dw + x0
sx0 = sw + x0
# right side.
if x1 > 0:
dx1 = x1
sx1 = x1
else:
dx1 = dw + x1
sx1 = sw + x1
# top side.
if y0 >= 0:
dy0 = y0
sy0 = y0
else:
dy0 = dh + y0
sy0 = sh + y0
# bottom side
if y1 > 0:
dy1 = y1
sy1 = y1
else:
dy1 = dh + y1
sy1 = sh + y1
# Quick exit.
if sx0 == sx1 or sy0 == sy1:
return
# Compute sizes.
csw = sx1 - sx0
csh = sy1 - sy0
cdw = dx1 - dx0
cdh = dy1 - dy0
if csw <= 0 or csh <= 0 or cdh <= 0 or cdw <= 0:
return
# Get a subsurface.
cr = crend.subsurface((sx0, sy0, csw, csh))
# Scale or tile if we have to.
if csw != cdw or csh != cdh:
if self.tile:
newcr = Render(cdw, cdh)
newcr.clipping = True
for x in xrange(0, cdw, csw):
for y in xrange(0, cdh, csh):
newcr.blit(cr, (x, y))
cr = newcr
else:
newcr = Render(cdw, cdh)
newcr.forward = Matrix2D(1.0 * csw / cdw, 0, 0, 1.0 * csh / cdh)
newcr.reverse = Matrix2D(1.0 * cdw / csw, 0, 0, 1.0 * cdh / csh)
newcr.blit(cr, (0, 0))
cr = newcr
# Blit.
rv.blit(cr, (dx0, dy0))
return
rv = Render(dw, dh)
self.draw_pattern(draw, left, top, right, bottom)
return rv
def draw_pattern(self, draw, left, top, right, bottom):
# Top row.
if top:
if left:
draw(0, left, 0, top)
draw(left, -right, 0, top)
if right:
draw(-right, 0, 0, top)
# Middle row.
if left:
draw(0, left, top, -bottom)
draw(left, -right, top, -bottom)
if right:
draw(-right, 0, top, -bottom)
# Bottom row.
if bottom:
if left:
draw(0, left, -bottom, 0)
draw(left, -right, -bottom, 0)
if right:
draw(-right, 0, -bottom, 0)
def sw_render(self, crend, dw, dh, left, top, right, bottom):
source = crend.render_to_texture(True)
sw, sh = source.get_size()
dest = renpy.display.swdraw.surface(dw, dh, True)
rv = dest
def draw(x0, x1, y0, y1):
# Compute the coordinates of the left, right, top, and
# bottom sides of the region, for both the source and
# destination surfaces.
# left side.
if x0 >= 0:
dx0 = x0
sx0 = x0
else:
dx0 = dw + x0
sx0 = sw + x0
# right side.
if x1 > 0:
dx1 = x1
sx1 = x1
else:
dx1 = dw + x1
sx1 = sw + x1
# top side.
if y0 >= 0:
dy0 = y0
sy0 = y0
else:
dy0 = dh + y0
sy0 = sh + y0
# bottom side
if y1 > 0:
dy1 = y1
sy1 = y1
else:
dy1 = dh + y1
sy1 = sh + y1
# Quick exit.
if sx0 == sx1 or sy0 == sy1 or dx1 <= dx0 or dy1 <= dy0:
return
# Compute sizes.
srcsize = (sx1 - sx0, sy1 - sy0)
dstsize = (int(dx1 - dx0), int(dy1 - dy0))
# Get a subsurface.
surf = source.subsurface((sx0, sy0, srcsize[0], srcsize[1]))
# Scale or tile if we have to.
if dstsize != srcsize:
if self.tile:
tilew, tileh = srcsize
dstw, dsth = dstsize
surf2 = renpy.display.pgrender.surface_unscaled(dstsize, surf)
for y in range(0, dsth, tileh):
for x in range(0, dstw, tilew):
surf2.blit(surf, (x, y))
surf = surf2
else:
surf2 = renpy.display.scale.real_transform_scale(surf, dstsize)
surf = surf2
# Blit.
dest.blit(surf, (dx0, dy0))
self.draw_pattern(draw, left, top, right, bottom)
rrv = renpy.display.render.Render(dw, dh)
rrv.blit(rv, (0, 0))
rrv.depends_on(crend)
# And, finish up.
return rrv
def visit(self):
return [ self.image ]
| MSEMJEJME/Get-Dumped | renpy/display/imagelike.py | Python | gpl-2.0 | 10,715 | 0.004666 |
# Copyright 2013-2016 Jaap Karssenberg <jaap.karssenberg@gmail.com>
'''This module defines the L{main()} function for executing the zim
application. It also defines a number of command classes that implement
specific commandline commands and an singleton application object that
takes core of the process life cycle.
'''
# TODO:
# - implement weakvalue dict to ensure uniqueness of notebook objects
import os
import sys
import logging
import signal
logger = logging.getLogger('zim')
import zim
import zim.fs
import zim.errors
import zim.config
import zim.config.basedirs
from zim import __version__
from zim.utils import get_module, lookup_subclass
from zim.errors import Error
from zim.notebook import Notebook, Path, \
get_notebook_list, resolve_notebook, build_notebook
from zim.formats import get_format
from zim.config import ConfigManager
from zim.plugins import PluginManager
from .command import Command, GtkCommand, UsageError, GetoptError
from .ipc import dispatch as _ipc_dispatch
from .ipc import start_listening as _ipc_start_listening
class HelpCommand(Command):
'''Class implementing the C{--help} command'''
usagehelp = '''\
usage: zim [OPTIONS] [NOTEBOOK [PAGE]]
or: zim --server [OPTIONS] [NOTEBOOK]
or: zim --export [OPTIONS] NOTEBOOK [PAGE]
or: zim --search NOTEBOOK QUERY
or: zim --index NOTEBOOK
or: zim --plugin PLUGIN [ARGUMENTS]
or: zim --manual [OPTIONS] [PAGE]
or: zim --help
'''
optionhelp = '''\
General Options:
--gui run the editor (this is the default)
--server run the web server
--export export to a different format
--search run a search query on a notebook
--index build an index for a notebook
--plugin call a specific plugin function
--manual open the user manual
-V, --verbose print information to terminal
-D, --debug print debug messages
-v, --version print version and exit
-h, --help print this text
GUI Options:
--list show the list with notebooks instead of
opening the default notebook
--geometry window size and position as WxH+X+Y
--fullscreen start in fullscreen mode
--standalone start a single instance, no background process
Server Options:
--port port to use (defaults to 8080)
--template name of the template to use
--private serve only to localhost
--gui run the gui wrapper for the server
Export Options:
-o, --output output directory (mandatory option)
--format format to use (defaults to 'html')
--template name of the template to use
--root-url url to use for the document root
--index-page index page name
-r, --recursive when exporting a page, also export sub-pages
-s, --singlefile export all pages to a single output file
-O, --overwrite force overwriting existing file(s)
Search Options:
None
Index Options:
-f, --flush flush the index first and force re-building
Try 'zim --manual' for more help.
'''
def run(self):
print(self.usagehelp)
print(self.optionhelp) # TODO - generate from commands
class VersionCommand(Command):
'''Class implementing the C{--version} command'''
def run(self):
print('zim %s\n' % zim.__version__)
print(zim.__copyright__, '\n')
print(zim.__license__)
class NotebookLookupError(Error):
'''Error when failing to locate a notebook'''
description = _('Could not find the file or folder for this notebook')
# T: Error verbose description
class NotebookCommand(Command):
'''Base class for commands that act on a notebook'''
def get_default_or_only_notebook(self):
'''Helper to get a default notebook'''
notebooks = get_notebook_list()
if notebooks.default:
uri = notebooks.default.uri
elif len(notebooks) == 1:
uri = notebooks[0].uri
else:
return None
return resolve_notebook(uri, pwd=self.pwd) # None if not found
def get_notebook_argument(self):
'''Get the notebook and page arguments for this command
@returns: a 2-tuple of an L{NotebookInfo} object and an
optional L{Path} or C{(None, None)} if the notebook
argument is optional and not given
@raises NotebookLookupError: if the notebook is mandatory and
not given, or if it is given but could not be resolved
'''
assert self.arguments[0] in ('NOTEBOOK', '[NOTEBOOK]')
args = self.get_arguments()
notebook = args[0]
if notebook is None:
if self.arguments[0] == 'NOTEBOOK': # not optional
raise NotebookLookupError(_('Please specify a notebook'))
# T: Error when looking up a notebook
else:
return None, None
notebookinfo = resolve_notebook(notebook, pwd=self.pwd)
if not notebookinfo:
raise NotebookLookupError(_('Could not find notebook: %s') % notebook)
# T: error message
if len(self.arguments) > 1 \
and self.arguments[1] in ('PAGE', '[PAGE]') \
and args[1] is not None:
pagename = Path.makeValidPageName(args[1])
return notebookinfo, Path(pagename)
else:
return notebookinfo, None
def build_notebook(self, ensure_uptodate=True):
'''Get the L{Notebook} object for this command
Tries to automount the file location if needed.
@param ensure_uptodate: if C{True} index is updated when needed.
Only set to C{False} when index update is handled explicitly
(e.g. in the main gui).
@returns: a L{Notebook} object and a L{Path} object or C{None}
@raises NotebookLookupError: if the notebook could not be
resolved or is not given
@raises FileNotFoundError: if the notebook location does not
exist and could not be mounted.
'''
# Explicit page argument has priority over implicit from uri
# mounting is attempted by zim.notebook.build_notebook()
notebookinfo, page = self.get_notebook_argument() # can raise NotebookLookupError
if not notebookinfo:
raise NotebookLookupError(_('Please specify a notebook'))
notebook, uripage = build_notebook(notebookinfo) # can raise FileNotFound
if ensure_uptodate and not notebook.index.is_uptodate:
for info in notebook.index.update_iter():
#logger.info('Indexing %s', info)
pass # TODO meaningful info for above message
return notebook, page or uripage
class GuiCommand(NotebookCommand, GtkCommand):
'''Class implementing the C{--gui} command and run the gtk interface'''
arguments = ('[NOTEBOOK]', '[PAGE]')
options = (
('list', '', 'show the list with notebooks instead of\nopening the default notebook'),
('geometry=', '', 'window size and position as WxH+X+Y'),
('fullscreen', '', 'start in fullscreen mode'),
('standalone', '', 'start a single instance, no background process'),
)
def build_notebook(self, ensure_uptodate=False):
# Bit more complicated here due to options to use default and
# allow using notebookdialog to prompt
# Explicit page argument has priority over implicit from uri
# mounting is attempted by zim.notebook.build_notebook()
from zim.notebook import FileNotFoundError
def prompt_notebook_list():
import zim.gui.notebookdialog
return zim.gui.notebookdialog.prompt_notebook()
# Can return None if dialog is cancelled
used_default = False
page = None
if self.opts.get('list'):
notebookinfo = prompt_notebook_list()
else:
notebookinfo, page = self.get_notebook_argument()
if notebookinfo is None:
notebookinfo = self.get_default_or_only_notebook()
used_default = notebookinfo is not None
if notebookinfo is None:
notebookinfo = prompt_notebook_list()
if notebookinfo is None:
return None, None # Cancelled prompt
try:
notebook, uripage = build_notebook(notebookinfo) # can raise FileNotFound
except FileNotFoundError:
if used_default:
# Default notebook went missing? Fallback to dialog to allow changing it
notebookinfo = prompt_notebook_list()
if notebookinfo is None:
return None, None # Cancelled prompt
notebook, uripage = build_notebook(notebookinfo) # can raise FileNotFound
else:
raise
if ensure_uptodate and not notebook.index.is_uptodate:
for info in notebook.index.update_iter():
#logger.info('Indexing %s', info)
pass # TODO meaningful info for above message
return notebook, page or uripage
def run(self):
from gi.repository import Gtk
from zim.gui.mainwindow import MainWindow
windows = [
w for w in Gtk.Window.list_toplevels()
if isinstance(w, MainWindow)
]
notebook, page = self.build_notebook()
if notebook is None:
logger.debug('NotebookDialog cancelled - exit')
return
for window in windows:
if window.notebook.uri == notebook.uri:
self._present_window(window, page)
return window
else:
return self._run_new_window(notebook, page)
def _present_window(self, window, page):
window.present()
if page:
window.open_page(page)
geometry = self.opts.get('geometry', None)
if geometry is not None:
window.parse_geometry(geometry)
if self.opts.get('fullscreen', False):
window.toggle_fullscreen(True)
def _run_new_window(self, notebook, page):
from gi.repository import GObject
from zim.gui.mainwindow import MainWindow
pluginmanager = PluginManager()
preferences = ConfigManager.preferences['General']
preferences.setdefault('plugins_list_version', 'none')
if preferences['plugins_list_version'] != '0.70':
if not preferences['plugins']:
pluginmanager.load_plugins_from_preferences(
[ # Default plugins
'pageindex', 'pathbar', 'toolbar',
'insertsymbol', 'printtobrowser',
'versioncontrol', 'osx_menubar'
]
)
else:
# Upgrade version <0.70 where these were core functions
pluginmanager.load_plugins_from_preferences(['pageindex', 'pathbar'])
if 'calendar' in pluginmanager.failed:
ConfigManager.preferences['JournalPlugin'] = \
ConfigManager.preferences['CalendarPlugin']
pluginmanager.load_plugins_from_preferences(['journal'])
preferences['plugins_list_version'] = '0.70'
window = MainWindow(
notebook,
page=page,
**self.get_options('geometry', 'fullscreen')
)
window.present()
if not window.notebook.index.is_uptodate:
window._uiactions.check_and_update_index(update_only=True) # XXX
else:
# Start a lightweight background check of the index
# put a small delay to ensure window is shown before we start
def start_background_check():
notebook.index.start_background_check(notebook)
return False # only run once
GObject.timeout_add(500, start_background_check)
return window
class ManualCommand(GuiCommand):
'''Like L{GuiCommand} but always opens the manual'''
arguments = ('[PAGE]',)
options = tuple(t for t in GuiCommand.options if t[0] != 'list')
# exclude --list
def run(self):
from zim.config import data_dir
self.arguments = ('NOTEBOOK', '[PAGE]') # HACK
self.args.insert(0, data_dir('manual').path)
return GuiCommand.run(self)
class ServerCommand(NotebookCommand):
'''Class implementing the C{--server} command and running the web
server.
'''
arguments = ('NOTEBOOK',)
options = (
('port=', 'p', 'port number to use (defaults to 8080)'),
('template=', 't', 'name or path of the template to use'),
('standalone', '', 'start a single instance, no background process'),
('private', '', 'serve only to localhost')
)
def run(self):
import zim.www
self.opts['port'] = int(self.opts.get('port', 8080))
self.opts.setdefault('template', 'Default')
notebook, page = self.build_notebook()
is_public = not self.opts.get('private', False)
self.server = httpd = zim.www.make_server(notebook, public=is_public, **self.get_options('template', 'port'))
# server attribute used in testing to stop sever in thread
logger.info("Serving HTTP on %s port %i...", httpd.server_name, httpd.server_port)
httpd.serve_forever()
class ServerGuiCommand(NotebookCommand, GtkCommand):
'''Like L{ServerCommand} but uses the graphical interface for the
server defined in L{zim.gui.server}.
'''
arguments = ('[NOTEBOOK]',)
options = (
('port=', 'p', 'port number to use (defaults to 8080)'),
('template=', 't', 'name or path of the template to use'),
('standalone', '', 'start a single instance, no background process'),
)
def run(self):
import zim.gui.server
self.opts['port'] = int(self.opts.get('port', 8080))
notebookinfo, page = self.get_notebook_argument()
if notebookinfo is None:
# Prefer default to be selected in drop down, user can still change
notebookinfo = self.get_default_or_only_notebook()
window = zim.gui.server.ServerWindow(
notebookinfo,
public=True,
**self.get_options('template', 'port')
)
window.show_all()
return window
class ExportCommand(NotebookCommand):
'''Class implementing the C{--export} command'''
arguments = ('NOTEBOOK', '[PAGE]')
options = (
('format=', '', 'format to use (defaults to \'html\')'),
('template=', '', 'name or path of the template to use'),
('output=', 'o', 'output folder, or output file name'),
('root-url=', '', 'url to use for the document root'),
('index-page=', '', 'index page name'),
('recursive', 'r', 'when exporting a page, also export sub-pages'),
('singlefile', 's', 'export all pages to a single output file'),
('overwrite', 'O', 'overwrite existing file(s)'),
)
def get_exporter(self, page):
from zim.fs import File, Dir
from zim.export import \
build_mhtml_file_exporter, \
build_single_file_exporter, \
build_page_exporter, \
build_notebook_exporter
format = self.opts.get('format', 'html')
if not 'output' in self.opts:
raise UsageError(_('Output location needed for export')) # T: error in export command
output = Dir(self.opts['output'])
if not output.isdir():
output = File(self.opts.get('output'))
template = self.opts.get('template', 'Default')
if output.exists() and not self.opts.get('overwrite'):
if output.isdir():
if len(output.list()) > 0:
raise Error(_('Output folder exists and not empty, specify "--overwrite" to force export')) # T: error message for export
else:
pass
else:
raise Error(_('Output file exists, specify "--overwrite" to force export')) # T: error message for export
if format == 'mhtml':
self.ignore_options('index-page')
if output.isdir():
raise UsageError(_('Need output file to export MHTML')) # T: error message for export
exporter = build_mhtml_file_exporter(
output, template,
document_root_url=self.opts.get('root-url'),
)
elif self.opts.get('singlefile'):
self.ignore_options('index-page')
if output.exists() and output.isdir():
ext = get_format(format).info['extension']
output = output.file(page.basename) + '.' + ext
exporter = build_single_file_exporter(
output, format, template, namespace=page,
document_root_url=self.opts.get('root-url'),
)
elif page:
self.ignore_options('index-page')
if output.exists() and output.isdir():
ext = get_format(format).info['extension']
output = output.file(page.basename) + '.' + ext
exporter = build_page_exporter(
output, format, template, page,
document_root_url=self.opts.get('root-url'),
)
else:
if not output.exists():
output = Dir(output.path)
elif not output.isdir():
raise UsageError(_('Need output folder to export full notebook')) # T: error message for export
exporter = build_notebook_exporter(
output, format, template,
index_page=self.opts.get('index-page'),
document_root_url=self.opts.get('root-url'),
)
return exporter
def run(self):
from zim.export.selections import AllPages, SinglePage, SubPages
notebook, page = self.build_notebook()
notebook.index.check_and_update()
if page and self.opts.get('recursive'):
selection = SubPages(notebook, page)
elif page:
selection = SinglePage(notebook, page)
else:
selection = AllPages(notebook)
exporter = self.get_exporter(page)
exporter.export(selection)
class SearchCommand(NotebookCommand):
'''Class implementing the C{--search} command'''
arguments = ('NOTEBOOK', 'QUERY')
def run(self):
from zim.search import SearchSelection, Query
notebook, p = self.build_notebook()
n, query = self.get_arguments()
if query and not query.isspace():
logger.info('Searching for: %s', query)
query = Query(query)
else:
raise ValueError('Empty query')
selection = SearchSelection(notebook)
selection.search(query)
for path in sorted(selection, key=lambda p: p.name):
print(path.name)
class IndexCommand(NotebookCommand):
'''Class implementing the C{--index} command'''
arguments = ('NOTEBOOK',)
options = (
('flush', 'f', 'flush the index first and force re-building'),
)
def run(self):
# Elevate logging level of indexer to ensure "zim --index -V" gives
# some meaningfull output
def elevate_index_logging(log_record):
if log_record.levelno == logging.DEBUG:
log_record.levelno = logging.INFO
log_record.levelname = 'INFO'
return True
mylogger = logging.getLogger('zim.notebook.index')
mylogger.setLevel(logging.DEBUG)
mylogger.addFilter(elevate_index_logging)
notebook, p = self.build_notebook(ensure_uptodate=False)
if self.opts.get('flush'):
notebook.index.flush()
notebook.index.update()
else:
# Effectively the same as check_and_update_index ui action
logger.info('Checking notebook index')
notebook.index.check_and_update()
logger.info('Index up to date!')
commands = {
'help': HelpCommand,
'version': VersionCommand,
'gui': GuiCommand,
'manual': ManualCommand,
'server': ServerCommand,
'servergui': ServerGuiCommand,
'export': ExportCommand,
'search': SearchCommand,
'index': IndexCommand,
}
def build_command(args, pwd=None):
'''Parse all commandline options
@returns: a L{Command} object
@raises UsageError: if args is not correct
'''
args = list(args)
if args and args[0] == '--plugin':
args.pop(0)
try:
cmd = args.pop(0)
except IndexError:
raise UsageError('Missing plugin name')
try:
mod = get_module('zim.plugins.' + cmd)
klass = lookup_subclass(mod, Command)
except:
if '-D' in args or '--debug' in args:
logger.exception('Error while loading: zim.plugins.%s.Command', cmd)
# Can't use following because log level not yet set:
# logger.debug('Error while loading: zim.plugins.%s.Command', cmd, exc_info=sys.exc_info())
raise UsageError('Could not load commandline command for plugin "%s"' % cmd)
else:
if args and args[0].startswith('--') and args[0][2:] in commands:
cmd = args.pop(0)[2:]
if cmd == 'server' and '--gui' in args:
args.remove('--gui')
cmd = 'servergui'
elif args and args[0] == '-v':
args.pop(0)
cmd = 'version'
elif args and args[0] == '-h':
args.pop(0)
cmd = 'help'
else:
cmd = 'gui' # default
klass = commands[cmd]
obj = klass(cmd, pwd=pwd)
obj.parse_options(*args)
return obj
class ZimApplication(object):
'''This object is repsonsible for managing the life cycle of the
application process.
To do so, it decides whether to dispatch a command to an already
running zim process or to handle it in the current process.
For gtk based commands it keeps track of the toplevel objects
for re-use and to be able to end the process when no toplevel
objects are left.
'''
def __init__(self):
self._running = False
self._log_started = False
self._standalone = False
self._windows = set()
@property
def toplevels(self):
return iter(self._windows)
@property
def notebooks(self):
return frozenset(
w.notebook for w in self.toplevels
if hasattr(w, 'notebook')
)
def get_mainwindow(self, notebook, _class=None):
'''Returns an existing L{MainWindow} for C{notebook} or C{None}'''
from zim.gui.mainwindow import MainWindow
_class = _class or MainWindow # test seam
for w in self.toplevels:
if isinstance(w, _class) and w.notebook.uri == notebook.uri:
return w
else:
return None
def present(self, notebook, page=None):
'''Present notebook and page in a mainwindow, may not return for
standalone processes.
'''
uri = notebook if isinstance(notebook, str) else notebook.uri
pagename = page if isinstance(page, str) else page.name
self.run('--gui', uri, pagename)
def add_window(self, window):
if not window in self._windows:
logger.debug('Add window: %s', window.__class__.__name__)
assert hasattr(window, 'destroy')
window.connect('destroy', self._on_destroy_window)
self._windows.add(window)
def remove_window(self, window):
logger.debug('Remove window: %s', window.__class__.__name__)
try:
self._windows.remove(window)
except KeyError:
pass
def _on_destroy_window(self, window):
self.remove_window(window)
if not self._windows:
from gi.repository import Gtk
logger.debug('Last toplevel destroyed, quit')
if Gtk.main_level() > 0:
Gtk.main_quit()
def run(self, *args, **kwargs):
'''Run a commandline command, either in this process, an
existing process, or a new process.
@param args: commandline arguments
@param kwargs: optional arguments for L{build_command}
'''
PluginManager().load_plugins_from_preferences(
ConfigManager.preferences['General']['plugins']
)
cmd = build_command(args, **kwargs)
self._run_cmd(cmd, args) # test seam
def _run_cmd(self, cmd, args):
if not self._log_started:
self._log_start()
if self._running:
# This is not the first command that we process
if isinstance(cmd, GtkCommand):
if self._standalone or cmd.standalone_process:
self._spawn_standalone(args)
else:
w = cmd.run()
if w is not None:
self.add_window(w)
w.present()
else:
cmd.run()
else:
# Although a-typical, this path could be re-entrant if a
# run_local() dispatches another command - therefore we set
# standalone before calling run_local()
if isinstance(cmd, GtkCommand):
self._standalone = self._standalone or cmd.standalone_process
if cmd.run_local():
return
if not self._standalone and self._try_dispatch(args, cmd.pwd):
pass # We are done
else:
self._running = True
self._run_main_loop(cmd)
else:
cmd.run()
def _run_main_loop(self, cmd):
# Run for the 1st gtk command in a primary process,
# but can still be standalone process
from gi.repository import Gtk
from gi.repository import GObject
#######################################################################
# WARNING: commented out "GObject.threads_init()" because it leads to
# various segfaults on linux. See github issue #7
# However without this init, gobject does not properly release the
# python GIL during C calls, so threads may block while main loop is
# waiting. Thus threads become very slow and unpredictable unless we
# actively monitor them from the mainloop, causing python to run
# frequently. So be very carefull relying on threads.
# Re-evaluate when we are above PyGObject 3.10.2 - threading should
# wotk bettter there even without this statement. (But even then,
# no Gtk calls from threads, just "GObject.idle_add()". )
# Kept for windows, because we need thread to run ipc listener, and no
# crashes observed there.
if os.name == 'nt':
GObject.threads_init()
#######################################################################
from zim.gui.widgets import gtk_window_set_default_icon
gtk_window_set_default_icon()
zim.errors.set_use_gtk(True)
self._setup_signal_handling()
if self._standalone:
logger.debug('Starting standalone process')
else:
logger.debug('Starting primary process')
self._daemonize()
if not _ipc_start_listening(self._handle_incoming):
logger.warn('Failure to setup socket, falling back to "--standalone" mode')
self._standalone = True
w = cmd.run()
if w is not None:
self.add_window(w)
while self._windows:
Gtk.main()
for toplevel in list(self._windows):
try:
toplevel.destroy()
except:
logger.exception('Exception while destroying window')
self.remove_window(toplevel) # force removal
# start main again if toplevels remaining ..
# exit immediatly if no toplevel created
def _log_start(self):
self._log_started = True
logger.info('This is zim %s', __version__)
level = logger.getEffectiveLevel()
if level == logging.DEBUG:
import sys
import os
import zim.config
logger.debug('Python version is %s', str(sys.version_info))
logger.debug('Platform is %s', os.name)
zim.config.log_basedirs()
def _setup_signal_handling(self):
def handle_sigterm(signal, frame):
from gi.repository import Gtk
logger.info('Got SIGTERM, quit')
if Gtk.main_level() > 0:
Gtk.main_quit()
signal.signal(signal.SIGTERM, handle_sigterm)
def _spawn_standalone(self, args):
from zim import ZIM_EXECUTABLE
from zim.applications import Application
args = list(args)
if not '--standalone' in args:
args.append('--standalone')
# more detailed logging has lower number, so WARN > INFO > DEBUG
loglevel = logging.getLogger().getEffectiveLevel()
if loglevel <= logging.DEBUG:
args.append('-D',)
elif loglevel <= logging.INFO:
args.append('-V',)
Application([ZIM_EXECUTABLE] + args).spawn()
def _try_dispatch(self, args, pwd):
try:
_ipc_dispatch(pwd, *args)
except AssertionError as err:
logger.debug('Got error in dispatch: %s', str(err))
return False
except Exception:
logger.exception('Got error in dispatch')
return False
else:
logger.debug('Dispatched command %r', args)
return True
def _handle_incoming(self, pwd, *args):
self.run(*args, pwd=pwd)
def _daemonize(self):
# Decouple from parent environment
# and redirect standard file descriptors
os.chdir(zim.fs.Dir('~').path)
# Using HOME because this folder will not disappear normally
# and because it is a sane starting point for file choosers etc.
try:
si = file(os.devnull, 'r')
os.dup2(si.fileno(), sys.stdin.fileno())
except:
pass
loglevel = logging.getLogger().getEffectiveLevel()
if loglevel <= logging.INFO and sys.stdout.isatty() and sys.stderr.isatty():
# more detailed logging has lower number, so WARN > INFO > DEBUG
# log to file unless output is a terminal and logging <= INFO
pass
else:
# Redirect output to file
dir = zim.fs.get_tmpdir()
zim.debug_log_file = os.path.join(dir.path, "zim.log")
err_stream = open(zim.debug_log_file, "w")
# Try to flush standards out and error, if there
for pipe in (sys.stdout, sys.stderr):
if pipe is not None:
try:
pipe.flush()
except OSError:
pass
# First try to dup handles for anyone who still has a reference
# if that fails, just set them (maybe not real files in the first place)
try:
os.dup2(err_stream.fileno(), sys.stdout.fileno())
os.dup2(err_stream.fileno(), sys.stderr.fileno())
except:
sys.stdout = err_stream
sys.stderr = err_stream
# Re-initialize logging handler, in case it keeps reference
# to the old stderr object
rootlogger = logging.getLogger()
try:
for handler in rootlogger.handlers:
rootlogger.removeHandler(handler)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
rootlogger.addHandler(handler)
except:
pass
if rootlogger.getEffectiveLevel() != logging.DEBUG:
rootlogger.setLevel(logging.DEBUG) # else file remains empty
self._log_start()
ZIM_APPLICATION = ZimApplication() # Singleton per process
def main(*argv):
'''Run full zim application
@returns: exit code (if error handled, else just raises)
'''
import zim.config
# Check if we can find our own data files
_file = zim.config.data_file('zim.png')
if not (_file and _file.exists()): #pragma: no cover
raise AssertionError(
'ERROR: Could not find data files in path: \n'
'%s\n'
'Try setting XDG_DATA_DIRS'
% list(map(str, zim.config.data_dirs()))
)
try:
ZIM_APPLICATION.run(*argv[1:])
except KeyboardInterrupt:
# Don't show error dialog for this error..
logger.error('KeyboardInterrupt')
return 1
except Exception:
zim.errors.exception_handler('Exception in main()')
return 1
else:
return 0
| jaap-karssenberg/zim-desktop-wiki | zim/main/__init__.py | Python | gpl-2.0 | 28,210 | 0.026728 |
__author__ = 'jhala'
import Helpers
import json
import os
''' creates a location lookup, and an associated image lookup '''
''' main '''
if __name__ == "__main__":
fil = r"c:\capstone\featureInfo.csv"
outLoc = r"c:\capstone\locationLookup.json"
imageBaseDir="C:\\Users\\jhala\\angular-seed\\app\\images\\"
fileArr = Helpers.fileInfo(fil)
headerArr = Helpers.getHeader(fileArr)
locationsDict = {}
locations=[]
locationsFinal=[]
locationId = 0
rowId = 0
existingLoc={}
for row in fileArr[1:]:
colCounter = 0
thisImageCount=0
imagesForThisLocation=[]
imagesForThisLocationTmp=[]
for col in row:
if headerArr[colCounter] == 'imgName':
imgName = col.replace("E:/GeoImages/", "")
locationArr = imgName.split("/")
locationName = locationArr[0]
if not os.path.exists(imageBaseDir + locationName ):
break
if len(locationArr[0:len(locationArr) - 1]) > 1:
print "Nested loc alert"
print locationArr[0]
try:
locIndex=locations.index(locationName)
imagesForThisLocationTmp = locationsFinal[locIndex]['images']
imagesForThisLocationTmp.append( { 'name' : imgName})
locationsFinal[locIndex] = { 'name' : locationsFinal[locIndex]['name'] , 'id' : locationsFinal[locIndex]['id'] , 'numImages' : locationsFinal[locIndex]['numImages']+1 , 'images' : imagesForThisLocationTmp }
except ValueError:
locationId += 1
locations.append(locationName)
thisImageCount += 1
imagesForThisLocation = { 'name': imgName}
locationsFinal.append({ 'name' : locationName , 'id' : locationId, 'numImages' : thisImageCount, 'images': [ imagesForThisLocation ]})
break
colCounter += 1
rowId += 1
ol = open(outLoc,'w')
json.dump(locationsFinal,ol,indent=4, separators=(',', ': '))
ol.close()
| dbk138/ImageRegionRecognition-FrontEnd | PythonScripts/LocationLookup.py | Python | mit | 2,163 | 0.017568 |
# pylint: disable=missing-docstring
import json
from urlparse import urlparse
from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.test import TestCase
import jwt
import provider.scope
from oauth2_provider.models import TrustedClient
from oauth2_provider.tests.util import normpath
from oauth2_provider.tests.factories import (
UserFactory,
ClientFactory,
AccessTokenFactory,
TrustedClientFactory
)
class BaseTestCase(TestCase):
def setUp(self):
self.client_secret = 'some_secret'
self.auth_client = ClientFactory(client_secret=self.client_secret)
self.password = 'some_password'
self.user_factory = UserFactory
self.user = None
self.access_token = None
self.set_user(self.make_user())
def make_user(self):
return self.user_factory(password=self.password)
def set_user(self, user):
self.user = user
def set_trusted(self, client, trusted=True):
if trusted:
TrustedClientFactory.create(client=client)
else:
TrustedClient.objects.filter(client=client).delete()
class OAuth2TestCase(BaseTestCase):
def setUp(self):
super(OAuth2TestCase, self).setUp()
def login_and_authorize(self, scope=None, claims=None, trusted=False):
""" Login into client using OAuth2 authorization flow. """
self.set_trusted(self.auth_client, trusted)
self.client.login(username=self.user.username, password=self.password)
payload = {
'client_id': self.auth_client.client_id,
'redirect_uri': self.auth_client.redirect_uri,
'response_type': 'code',
'state': 'some_state',
}
_add_values(payload, 'id_token', scope, claims)
response = self.client.get(reverse('oauth2:capture'), payload)
self.assertEqual(302, response.status_code)
response = self.client.get(reverse('oauth2:authorize'), payload)
return response
def get_access_token_response(self, scope=None, claims=None):
""" Get a new access token using the OAuth2 authorization flow. """
response = self.login_and_authorize(scope, claims, trusted=True)
self.assertEqual(302, response.status_code)
self.assertEqual(reverse('oauth2:redirect'), normpath(response['Location']))
response = self.client.get(reverse('oauth2:redirect'))
self.assertEqual(302, response.status_code)
query = QueryDict(urlparse(response['Location']).query)
payload = {
'grant_type': 'authorization_code',
'client_id': self.auth_client.client_id,
'client_secret': self.client_secret,
'code': query['code'],
}
_add_values(payload, 'id_token', scope, claims)
response = self.client.post(reverse('oauth2:access_token'), payload)
return response
class IDTokenTestCase(OAuth2TestCase):
def get_id_token_values(self, scope=None, claims=None):
""" Get a new id_token using the OIDC authorization flow. """
self.assertIn('openid', scope.split())
response = self.get_access_token_response(scope, claims)
self.assertEqual(response.status_code, 200)
values = json.loads(response.content)
self.assertIn('access_token', values)
id_token = values['id_token']
secret = self.auth_client.client_secret
audience = self.auth_client.client_id
self.assertValidIDToken(id_token, secret, audience)
scopes = values['scope'].split()
claims = self.parse_id_token(id_token)
# Should always be included
self.assertIn('iss', claims)
self.assertIn('sub', claims)
return scopes, claims
def parse_id_token(self, id_token):
claims = jwt.decode(id_token, verify=False)
return claims
def assertValidIDToken(self, id_token, secret, audience):
try:
jwt.decode(id_token, secret, audience=audience)
except jwt.DecodeError:
assert False
class UserInfoTestCase(BaseTestCase):
def setUp(self):
super(UserInfoTestCase, self).setUp()
self.path = reverse('oauth2:user_info')
self.set_user(self.user)
def set_user(self, user):
super(UserInfoTestCase, self).set_user(user)
self.access_token = AccessTokenFactory(user=self.user, client=self.auth_client)
def set_access_token_scope(self, scope):
self.access_token.scope = provider.scope.to_int(*scope.split())
self.access_token.save() # pylint: disable=no-member
def get_with_authorization(self, path, access_token=None, payload=None):
kwargs = {}
if access_token:
kwargs['HTTP_AUTHORIZATION'] = 'Bearer %s' % access_token
return self.client.get(path, payload, **kwargs)
def get_userinfo(self, token=None, scope=None, claims=None):
payload = _add_values({}, 'userinfo', scope, claims)
response = self.get_with_authorization(self.path, token, payload)
values = json.loads(response.content)
return response, values
def _add_values(data, endpoint, scope=None, claims=None):
if scope:
data['scope'] = scope
if claims:
data['claims'] = json.dumps({endpoint: claims})
return data
| GbalsaC/bitnamiP | venv/src/oauth2-provider/oauth2_provider/tests/base.py | Python | agpl-3.0 | 5,353 | 0.000374 |
import logging, couchdb, oauth2, json, sys
from decorator import decorator
from pylons import config, request as r, response as res, session
from pylons.controllers.util import abort
from functools import wraps
log = logging.getLogger(__name__)
appConfig = config['app_conf']
class Error(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occurred.'):
self._message = message
@property
def message(self):
"""A hack to get around the deprecation errors in 2.6."""
return self._message
def __str__(self):
return self._message
class BadOAuthSignature(Error):
pass
class OAuthJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, (oauth2.Consumer, oauth2.Token)):
return { "key": o.key, "secret": o.secret }
elif isinstance(o, Exception):
return {
"type": type(o).__name__,
"message": o.message }
try:
return json.JSONEncoder.default(self, o)
except Exception as e:
log.exception("Encoded Type: {0}\nrepr: {1}".format(type(o), repr(o)))
raise e
class CouchDBOAuthUtil():
def __init__(self, couchdb_dba_url=appConfig['couchdb.url.dbadmin'], users_db=appConfig['couchdb.db.users'], oauth_view=appConfig['couchdb.db.users.oauthview']):
self.server = couchdb.Server(couchdb_dba_url)
self.users = self.server[users_db]
self.oauth_view = oauth_view
def find_possible(self, consumer, token, mapper=None):
def wrap_row(row):
# log.error("wrap_row: "+json.dumps(row))
row_result = {}
if "doc" in row:
row_result["name"] = row["doc"]["name"]
row_result["consumer"] = oauth2.Consumer(key=consumer, secret=row["doc"]["oauth"]["consumer_keys"][consumer])
row_result["token"] = oauth2.Token(key=token, secret=row["doc"]["oauth"]["tokens"][token])
row_result["id"] = row["doc"]["_id"]
row_result["roles"] = row["doc"]["roles"]
if mapper:
mapper(row_result, row)
return row_result
view_opts = {
"key":[consumer, token],
"include_docs":True
}
view_results = self.users.view(self.oauth_view, wrapper=wrap_row, **view_opts)
return view_results.rows
def check_request(self, request, mapper=None):
http_method = request.method
http_url = request.host_url + request.path_info
headers = request.headers
query_string = request.query_string
info = None
parameters = None
# log.error("*** CHECK_REQUEST *** "+json.dumps({
# "query_string": query_string,
# "headers": {}.update(headers),
# "http_method": http_method,
# "http_url": http_url
# }))
oa_request = oauth2.Request.from_request(http_method, http_url, headers, query_string=query_string)
if oa_request and all([ x in oa_request for x in ['oauth_consumer_key', 'oauth_token']]):
server = oauth2.Server()
server.add_signature_method(oauth2.SignatureMethod_HMAC_SHA1())
last_exc = None
for row in self.find_possible(oa_request['oauth_consumer_key'], oa_request['oauth_token'], mapper):
try:
parameters = server.verify_request(oa_request, row["consumer"], row["token"])
except oauth2.Error as e:
last_exc = BadOAuthSignature("OAuth2 Error: %s" % e.message)
except:
import sys
log.exception("Caught Exception in CouchDBOAuthUtil")
last_exc = BadOAuthSignature(sys.exc_info()[1])
if parameters != None:
info = row
break
if parameters == None and last_exc != None:
raise last_exc
return (parameters, info)
_authobj = CouchDBOAuthUtil()
DEFAULT_SESSION_KEY = "oauth"
class status(object):
Okay = "Okay"
NoSignature = "No Signature"
BadSignature = "Bad Signature"
Error = "Error"
Unknown = "Unknown"
def authorize(session_key=DEFAULT_SESSION_KEY, service_doc=None, roles=None, mapper=None, realm=None, pre_cond=None, post_cond=None):
_roles = roles
_mapper = mapper
_session_key=session_key
_realm = realm or ""
_pre_cond = pre_cond
_post_cond = post_cond
_service_doc = service_doc
def wrapper(fn, self, *args, **kwargs):
if _service_doc:
sdoc = _service_doc()
try:
if "oauth" not in sdoc["service_auth"]["service_authz"]:
return fn(self, *args, **kwargs)
except:
raise ValueError("Missing service_document for checking if OAUTH access is enabled.")
if _pre_cond:
precond = cont = _pre_cond()
else:
precond = cont = True
if precond:
success = { "status": status.Unknown, "user": None, "parameters": None }
try:
success["parameters"], success["user"] = _authobj.check_request(r._current_obj(), _mapper)
if success["parameters"] is None:
success["status"] = status.NoSignature
else:
success["status"] = status.Okay
except BadOAuthSignature as e:
success["status"] = status.BadSignature
success["detail"] = e.message
cont = False
except:
success["status"] = status.Error
success["detail"] = repr(sys.exc_info())
log.exception("Caught Exception in authorize")
cont = False
sess = session._current_obj()
sess[_session_key] = success
# log.error("in wrap:"+repr(sess[_session_key]))
if cont and _roles:
cont = UserHasRoles(_session_key, _roles)
if _post_cond:
cont = _post_cond(cont)
if cont:
try:
return fn(self, *args, **kwargs)
finally:
pass
else:
h = {"WWW-Authenticate": "OAuth realm=\"{0}\"".format(_realm)}
log.error("Authorization Required")
res.headers.update(h)
abort(401, "OAuth Authorization Required", headers=h)
return decorator(wrapper)
def UserHasRoles(session_key, roles=[] ):
hasRoles = False
try:
s = session._current_obj()
hasRoles = all([role in s[session_key]["user"]["roles"] for role in roles])
except:
pass
return hasRoles
| jimklo/LearningRegistry | LR/lr/lib/oauth.py | Python | apache-2.0 | 6,907 | 0.007963 |
# encoding: utf-8
# module PyKDE4.kio
# from /usr/lib/python3/dist-packages/PyKDE4/kio.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdeui as __PyKDE4_kdeui
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
class KIconDialog(__PyKDE4_kdeui.KDialog):
# no doc
def getIcon(self, *args, **kwargs): # real signature unknown
pass
def iconSize(self, *args, **kwargs): # real signature unknown
pass
def newIconName(self, *args, **kwargs): # real signature unknown
pass
def openDialog(self, *args, **kwargs): # real signature unknown
pass
def setCustomLocation(self, *args, **kwargs): # real signature unknown
pass
def setIconSize(self, *args, **kwargs): # real signature unknown
pass
def setStrictIconSize(self, *args, **kwargs): # real signature unknown
pass
def setup(self, *args, **kwargs): # real signature unknown
pass
def showDialog(self, *args, **kwargs): # real signature unknown
pass
def slotOk(self, *args, **kwargs): # real signature unknown
pass
def strictIconSize(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyKDE4/kio/KIconDialog.py | Python | gpl-2.0 | 1,319 | 0.010614 |
#
# ICRAR - International Centre for Radio Astronomy Research
# (c) UWA - The University of Western Australia, 2020
# Copyright by UWA (in the framework of the ICRAR)
# All rights reserved
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
from ..common import tool
def include_dir(_parser, _args):
from . import get_include_dir
print(get_include_dir())
def register_commands():
tool.cmdwrap('nm', 'Starts a Node Manager', 'dlg.manager.cmdline:dlgNM')
tool.cmdwrap('dim', 'Starts a Drop Island Manager', 'dlg.manager.cmdline:dlgDIM')
tool.cmdwrap('mm', 'Starts a Master Manager', 'dlg.manager.cmdline:dlgMM')
tool.cmdwrap('replay', 'Starts a Replay Manager', 'dlg.manager.cmdline:dlgReplay')
tool.cmdwrap('daemon', 'Starts a DALiuGE Daemon process', 'dlg.manager.proc_daemon:run_with_cmdline')
tool.cmdwrap('proxy', 'A reverse proxy to be used in restricted environments to contact the Drop Managers', 'dlg.deploy.pawsey.dfms_proxy:run')
tool.cmdwrap('monitor', 'A proxy to be used in conjunction with the dlg proxy in restricted environments', 'dlg.deploy.pawsey.dfms_monitor:run')
tool.cmdwrap('include_dir', 'Print the directory where C header files can be found', include_dir) | steve-ord/daliuge | daliuge-engine/dlg/runtime/tool_commands.py | Python | lgpl-2.1 | 1,948 | 0.00462 |
from typing import Any
from typing import Dict
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import declared_attr
Base = declarative_base()
class Foo(Base):
@declared_attr
def __tablename__(cls) -> str:
return "name"
@declared_attr
def __mapper_args__(cls) -> Dict[Any, Any]:
return {}
@declared_attr
def __table_args__(cls) -> Dict[Any, Any]:
return {}
| sqlalchemy/sqlalchemy | test/ext/mypy/plugin_files/issue_7321.py | Python | mit | 427 | 0 |
"""Schema migrations."""
| yongwen/makahiki | makahiki/apps/widgets/smartgrid_play_tester/migrations/__init__.py | Python | mit | 25 | 0 |
from django.apps import AppConfig
from django.utils.translation import ugettext as __, ugettext_lazy as _
class TaskerConfig(AppConfig):
name = 'django_tasker'
verbose_name = _('tasker')
| wooyek/django-tasker | django_tasker/apps.py | Python | mit | 197 | 0 |
import numpy as np
from sklearn import linear_model
import matplotlib.pyplot as plt
from scipy.special import zeta
from .distribution import frequency_distribution, powerlaw_series, random_series
from .utils import unique
from math import pow, e, log, sqrt
import sys
import random
def least_square_regression(x, y, xlabel = "x", ylabel = "y", prefix="", suffix=""):
"""
Perform least square regression to find the best fit line and returns the slope of the line.
**Parameters**
x : List of values along x axis.
y : List of values along y axis.
"""
X = np.asarray(x).reshape((len(x), 1))
Y = np.asarray(y).reshape((len(y), 1))
regr = linear_model.LinearRegression()
regr.fit(X, Y)
label_string = "Best fit line, y = "+str(regr.coef_[0][0])+" * x + "+str(regr.intercept_[0])
print(label_string)
print("Residual sum of squares: %.2f" % np.mean((regr.predict(X) - Y) ** 2))
print("Variance score: %.2f" % regr.score(X, Y))
# Plot outputs
original_data, = plt.plot(X, Y,'go', label="original data")
# best_fit_line, = plt.plot(X, map(lambda x: pow(e, -x), X), 'bo', label=label_string)
best_fit_line, = plt.plot(X, regr.predict(X), color='blue', linewidth=3, label=label_string)
plt.title("Least Square Regression"+suffix)
plt.ylabel(ylabel)
plt.xlabel(xlabel)
curves = [original_data, best_fit_line]
labels = [curve.get_label() for curve in curves]
plt.legend(curves, labels)
plt.savefig(prefix+"least_square_regression_fit"+suffix+".png")
plt.show()
return regr.coef_[0][0]
def estimate_scaling_parameter(series, xmin = 1, discrete = False):
"""
Perform Method of Maximum Liklihood (MLE) to find the best fit value of Alpha.
**Parameters**
series : series of data to be fit.
xmin : Float/Integer, xmin for the distribution - assumed to be known before-hand. Default value is 1.0
discrete : Boolean, whether to treat series as discrete or continous. Default value is False.
**Returns**
Estimated Alpha value.
"""
normalizing_constant = 0.0
if(discrete):
normalizing_constant = 0.5
partial_sum = 0.0
count = 0.0
# print series
for x in series:
partial_sum += log (x/(xmin - normalizing_constant))
count+=1
Alpha = 1.0 + count*(1/partial_sum)
return Alpha
def estimate_parameters(series, min_size_series = 50, discrete = False):
"""
Apply Clauset et al.'s method to find the best fit value of xmin and Alpha.
**Parameters**
series : series of data to be fit.
min_size_series : Minimum possible size of the distribution to which power-law fit will be attempted. Fitting power-law to a very small series would give biased results where power-law may appear to be a good fit even when data is not drawn from power-law distribution. The default value is taken to be 50 as suggested in the paper.
discrete : Boolean, whether to treat series as discrete or continous. Default value is False
**Returns**
Tuple of (Estimated xmin, Estimated Alpha value, minimum KS statistics score).
"""
sorted_series = sorted(series)
xmin_candidates = []
x_prev = sorted_series[0]
xmin_candidates.append(x_prev)
for x in sorted_series:
if(x>x_prev):
x_prev = x
xmin_candidates.append(x_prev)
ks_statistics_min = sys.maxsize;
xmin_result = 0
Alpha_result = 2
for xmin in xmin_candidates[:-1*(min_size_series-1)]:
data = [x for x in sorted_series if x>=xmin]
estimated_Alpha = estimate_scaling_parameter(data, xmin)
if(discrete):
Px = [zeta(estimated_Alpha, x)/zeta(estimated_Alpha, xmin) for x in unique(data)]
else:
Px = [pow(float(x)/xmin, 1 - estimated_Alpha ) for x in unique(data)]
n = len(Px)
Sx = [i[1]/n for i in frequency_distribution(data, pdf=False)]
ks_statistics = max( [abs(Sx[counter] - Px[counter]) for counter in range(0, n)] )
if(ks_statistics<ks_statistics_min):
ks_statistics_min = ks_statistics
xmin_result = xmin
Alpha_result = estimated_Alpha
return (xmin_result, Alpha_result, ks_statistics_min)
def generate_dataset(series, xmin, alpha, epsilon = 0.01):
"""
Generator to generate datasets for goodness_of_fit test.
**Parameters**
series : series of data on which the power-law model was fitted.
xmin : xmin for the fitted power-law model.
alpha : alpha for the fitted power-law model.
epsilon : desired accuracy in p-value. Default is set to 0.01
**Returns**
A generator to generate list of numbers (datasets).
"""
number_of_datasets = int(round(0.25/(epsilon**2)) +1)
print(number_of_datasets)
n = len(series)
non_powerlaw_series = [x for x in series if x<xmin]
ntail = n - len(non_powerlaw_series)
p = float(ntail)/n
# print p
# print ntail
# print n
for i in range(0, number_of_datasets):
dataset = []
count_powerlaw_series = 0
# how many numbers are to be picked from powerlaw distribution
for random_number in random_series(n):
if(random_number<=p):
count_powerlaw_series+=1
# generate number from power-law distribution
else:
# pick number from non_powerlaw_series
dataset.append(random.choice(non_powerlaw_series))
dataset = dataset + [i for i in powerlaw_series(Alpha = alpha, xmin = xmin, n = count_powerlaw_series)]
yield dataset
def goodness_of_fit(series, xmin, alpha, ks_statistics, epsilon = 0.01, min_size_series = 50):
"""
Function to calculate the p-value as a measure of goodness_of_fit for the fitted model.
**Parameters**
series : series of data on which the power-law model was fitted.
xmin : xmin for the fitted power-law model.
alpha : alpha for the fitted power-law model.
ks_statistics : KS statistics for the fitted power-law model.
epsilon : desired accuracy in p-value. Default is set to 0.01.
min_size_series : Minimum possible size of the distribution to which power-law fit will be attempted. This value is used when fitting power-law to the generated datasets. The default value is taken to be 50. For further details, see `estimate_parameters()`.
**Returns**
p-value for the fitted model.
"""
count_dataset = 0.0
# number of synthetic datasets tested
n1 = 0.0
# number of synthetic datasets where ks value is greater than ks value for given data
for dataset in generate_dataset(series=series, xmin=xmin, alpha=alpha, epsilon=epsilon):
count_dataset+=1.0
(xmin_dataset, alpha_dataset, ks_statistics_dataset) = estimate_parameters(series=dataset, min_size_series = min_size_series)
if(ks_statistics_dataset>ks_statistics):
n1+=1.0
return n1/count_dataset
if __name__ == "__main__":
n = 10
data = [i for i in powerlaw_series(n=n, xmin = 20, Alpha = 2.6)]
# print data
(xmin, alpha, ks_statistics) = estimate_parameters(series=data, min_size_series = 5)
print("xmin = "+str(xmin))
print("alpha = "+str(alpha))
print(goodness_of_fit(series=data, xmin=xmin, alpha=alpha, ks_statistics=ks_statistics, epsilon = 0.01, min_size_series = 50))
| shagunsodhani/powerlaw | powerlaw/regression.py | Python | mit | 7,543 | 0.012727 |
#!/usr/bin/env python
from setuptools import setup, find_packages
REPO_NAME = 'chickenzord/dotenvy'
VERSION = '0.2.0'
ARCHIVE_URL = 'https://github.com/%s/archive/v%s.tar.gz' % (REPO_NAME, VERSION)
setup(
# packaging
packages=find_packages('src'),
package_dir={'': 'src'},
package_data={},
install_requires=[
'future',
],
setup_requires=[
'pytest-runner',
'flake8',
],
tests_require=[
'pytest',
'pytest-cov',
'pytest-travis-fold',
'mock',
'backports.tempfile',
],
entry_points={
"console_scripts": ['dotenvy = dotenvy.cli:main']
},
zip_safe=False,
# metadata
name='dotenvy',
version=VERSION,
author='Akhyar Amarullah',
author_email='akhyrul@gmail.com',
description='Dotenv handler for Python',
long_description=open('README.rst').read(),
download_url=ARCHIVE_URL,
license='MIT',
keywords=['dotenv', 'configuration', 'environment'],
url='https://github.com/%s' % (REPO_NAME),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
],
)
| chickenzord/dotenvy | setup.py | Python | mit | 1,627 | 0 |
# coding: utf8
"""
webencodings.tests
~~~~~~~~~~~~~~~~~~
A basic test suite for Encoding.
:copyright: Copyright 2012 by Simon Sapin
:license: BSD, see LICENSE for details.
"""
from __future__ import unicode_literals
from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode,
IncrementalDecoder, IncrementalEncoder, UTF8)
def assert_raises(exception, function, *args, **kwargs):
try:
function(*args, **kwargs)
except exception:
return
else: # pragma: no cover
raise AssertionError('Did not raise %s.' % exception)
def test_labels():
assert lookup('utf-8').name == 'utf-8'
assert lookup('Utf-8').name == 'utf-8'
assert lookup('UTF-8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8 ').name == 'utf-8'
assert lookup(' \r\nutf8\t').name == 'utf-8'
assert lookup('u8') is None # Python label.
assert lookup('utf-8 ') is None # Non-ASCII white space.
assert lookup('US-ASCII').name == 'windows-1252'
assert lookup('iso-8859-1').name == 'windows-1252'
assert lookup('latin1').name == 'windows-1252'
assert lookup('LATIN1').name == 'windows-1252'
assert lookup('latin-1') is None
assert lookup('LATİN1') is None # ASCII-only case insensitivity.
def test_all_labels():
for label in LABELS:
assert decode(b'', label) == ''
assert encode('', label) == b''
for repeat in [0, 1, 12]:
output, _ = iter_decode([b''] * repeat, label)
assert list(output) == []
assert list(iter_encode([''] * repeat, label)) == []
decoder = IncrementalDecoder(label)
assert decoder.decode(b'') == ''
assert decoder.decode(b'', final=True) == ''
encoder = IncrementalEncoder(label)
assert encoder.encode('') == b''
assert encoder.encode('', final=True) == b''
# All encoding names are valid labels too:
for name in set(LABELS.values()):
assert lookup(name).name == name
def test_invalid_label():
assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid')
assert_raises(LookupError, encode, 'é', 'invalid')
assert_raises(LookupError, iter_decode, [], 'invalid')
assert_raises(LookupError, iter_encode, [], 'invalid')
assert_raises(LookupError, IncrementalDecoder, 'invalid')
assert_raises(LookupError, IncrementalEncoder, 'invalid')
def test_decode():
assert decode(b'\x80', 'latin1') == '€'
assert decode(b'\x80', lookup('latin1')) == '€'
assert decode(b'\xc3\xa9', 'utf8') == 'é'
assert decode(b'\xc3\xa9', UTF8) == 'é'
assert decode(b'\xc3\xa9', 'ascii') == 'é'
assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == 'é' # UTF-8 with BOM
assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == 'é' # UTF-16-BE with BOM
assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == 'é' # UTF-16-LE with BOM
assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == '\ue900'
assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == '\ue900'
assert decode(b'\x00\xe9', 'UTF-16BE') == 'é'
assert decode(b'\xe9\x00', 'UTF-16LE') == 'é'
assert decode(b'\xe9\x00', 'UTF-16') == 'é'
assert decode(b'\xe9\x00', 'UTF-16BE') == '\ue900'
assert decode(b'\x00\xe9', 'UTF-16LE') == '\ue900'
assert decode(b'\x00\xe9', 'UTF-16') == '\ue900'
def test_encode():
assert encode('é', 'latin1') == b'\xe9'
assert encode('é', 'utf8') == b'\xc3\xa9'
assert encode('é', 'utf8') == b'\xc3\xa9'
assert encode('é', 'utf-16') == b'\xe9\x00'
assert encode('é', 'utf-16le') == b'\xe9\x00'
assert encode('é', 'utf-16be') == b'\x00\xe9'
def test_iter_decode():
def iter_decode_to_string(input, fallback_encoding):
output, _encoding = iter_decode(input, fallback_encoding)
return ''.join(output)
assert iter_decode_to_string([], 'latin1') == ''
assert iter_decode_to_string([b''], 'latin1') == ''
assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é'
assert iter_decode_to_string([b'hello'], 'latin1') == 'hello'
assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello'
assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello'
assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é'
assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é'
assert iter_decode_to_string([
b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é'
assert iter_decode_to_string([
b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD'
assert iter_decode_to_string([
b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é'
assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == ''
assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»'
assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é'
assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é'
assert iter_decode_to_string([
b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é'
assert iter_decode_to_string([
b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo'
def test_iter_encode():
assert b''.join(iter_encode([], 'latin1')) == b''
assert b''.join(iter_encode([''], 'latin1')) == b''
assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9'
assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9'
assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00'
assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00'
assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9'
assert b''.join(iter_encode([
'', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo'
def test_x_user_defined():
encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca'
decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca'
encoded = b'aa'
decoded = 'aa'
assert decode(encoded, 'x-user-defined') == decoded
assert encode(decoded, 'x-user-defined') == encoded
| aeroaks/httpProfiler | methods/webencodings/tests.py | Python | mit | 6,184 | 0 |
# Author: Marvin Pinto <me@marvinp.ca>
# Author: Dennis Lutter <lad1337@gmail.com>
# Author: Shawn Conroyd <mongo527@gmail.com>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import urllib
import urllib2
import sickbeard
from sickbeard import logger
from sickbeard.common import notifyStrings, NOTIFY_SNATCH, NOTIFY_DOWNLOAD
from sickbeard.exceptions import ex
API_URL = "https://new.boxcar.io/api/notifications"
class Boxcar2Notifier:
def _sendBoxcar2(self, title, msg, accessToken, sound):
"""
Sends a boxcar2 notification to the address provided
msg: The message to send (unicode)
title: The title of the message
accessToken: The access token to send notification to
returns: True if the message succeeded, False otherwise
"""
# build up the URL and parameters
msg = msg.strip().encode('utf-8')
data = urllib.urlencode({
'user_credentials': accessToken,
'notification[title]': title + " - " + msg,
'notification[long_message]': msg,
'notification[sound]': sound,
'notification[source_name]': "SickBeard"
})
# send the request to boxcar2
try:
req = urllib2.Request(API_URL)
handle = urllib2.urlopen(req, data)
handle.close()
except urllib2.URLError, e:
# FIXME: Python 2.5 hack, it wrongly reports 201 as an error
if hasattr(e, 'code') and e.code == 201:
logger.log(u"BOXCAR2: Notification successful.", logger.MESSAGE)
return True
# if we get an error back that doesn't have an error code then who knows what's really happening
if not hasattr(e, 'code'):
logger.log(u"BOXCAR2: Notification failed." + ex(e), logger.ERROR)
else:
logger.log(u"BOXCAR2: Notification failed. Error code: " + str(e.code), logger.ERROR)
if e.code == 404:
logger.log(u"BOXCAR2: Access token is wrong/not associated to a device.", logger.ERROR)
elif e.code == 401:
logger.log(u"BOXCAR2: Access token not recognized.", logger.ERROR)
elif e.code == 400:
logger.log(u"BOXCAR2: Wrong data sent to boxcar.", logger.ERROR)
elif e.code == 503:
logger.log(u"BOXCAR2: Boxcar server to busy to handle the request at this time.", logger.WARNING)
return False
logger.log(u"BOXCAR2: Notification successful.", logger.MESSAGE)
return True
def _notify(self, title, message, accessToken=None, sound=None, force=False):
"""
Sends a boxcar2 notification based on the provided info or SB config
title: The title of the notification to send
message: The message string to send
accessToken: The access token to send the notification to (optional, defaults to the access token in the config)
force: If True then the notification will be sent even if Boxcar is disabled in the config
"""
# suppress notifications if the notifier is disabled but the notify options are checked
if not sickbeard.USE_BOXCAR2 and not force:
return False
# fill in omitted parameters
if not accessToken:
accessToken = sickbeard.BOXCAR2_ACCESS_TOKEN
if not sound:
sound = sickbeard.BOXCAR2_SOUND
logger.log(u"BOXCAR2: Sending notification for " + message, logger.DEBUG)
return self._sendBoxcar2(title, message, accessToken, sound)
##############################################################################
# Public functions
##############################################################################
def notify_snatch(self, ep_name):
if sickbeard.BOXCAR2_NOTIFY_ONSNATCH:
self._notify(notifyStrings[NOTIFY_SNATCH], ep_name)
def notify_download(self, ep_name):
if sickbeard.BOXCAR2_NOTIFY_ONDOWNLOAD:
self._notify(notifyStrings[NOTIFY_DOWNLOAD], ep_name)
def test_notify(self, accessToken, sound):
return self._notify("Test", "This is a test notification from Sick Beard", accessToken, sound, force=True)
def update_library(self, ep_obj=None):
pass
notifier = Boxcar2Notifier
| imajes/Sick-Beard | sickbeard/notifiers/boxcar2.py | Python | gpl-3.0 | 5,011 | 0.002993 |
###############################################################################
# Name: smalltalk.py #
# Purpose: Define Smalltalk syntax for highlighting and other features #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2007 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""
FILE: smalltalk.py
AUTHOR: Cody Precord
@summary: Lexer configuration module for Smalltalk
@todo: more keywords, styling fixes
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: _smalltalk.py 68798 2011-08-20 17:17:05Z CJP $"
__revision__ = "$Revision: 68798 $"
#-----------------------------------------------------------------------------#
# Imports
import wx.stc as stc
# Local Imports
import synglob
import syndata
#-----------------------------------------------------------------------------#
#---- Keyword Definitions ----#
# Special Selectors
ST_KEYWORDS = (0, "ifTrue: ifFalse: whileTrue: whileFalse: ifNil: ifNotNil: "
"whileTrue repeat isNil put to at notNil super self "
"true false new not isNil inspect out nil do add for "
"methods methodsFor instanceVariableNames classVariableNames "
"poolDictionaries subclass")
#---- End Keyword Definitions ----#
#---- Syntax Style Specs ----#
SYNTAX_ITEMS = [(stc.STC_ST_ASSIGN, 'operator_style'),
(stc.STC_ST_BINARY, 'operator_style'),
(stc.STC_ST_BOOL, 'keyword_style'),
(stc.STC_ST_CHARACTER, 'char_style'),
(stc.STC_ST_COMMENT, 'comment_style'),
(stc.STC_ST_DEFAULT, 'default_style'),
(stc.STC_ST_GLOBAL, 'global_style'),
(stc.STC_ST_KWSEND, 'keyword_style'),
(stc.STC_ST_NIL, 'keyword_style'),
(stc.STC_ST_NUMBER, 'number_style'),
(stc.STC_ST_RETURN, 'keyword_style'),
(stc.STC_ST_SELF, 'keyword_style'),
(stc.STC_ST_SPECIAL, 'pre_style'),
(stc.STC_ST_SPEC_SEL, 'keyword_style'), # Words in keyword list
(stc.STC_ST_STRING, 'string_style'),
(stc.STC_ST_SUPER, 'class_style'),
(stc.STC_ST_SYMBOL, 'scalar_style')]
#---- Extra Properties ----#
#-----------------------------------------------------------------------------#
class SyntaxData(syndata.SyntaxDataBase):
"""SyntaxData object for Smalltalk"""
def __init__(self, langid):
super(SyntaxData, self).__init__(langid)
# Setup
self.SetLexer(stc.STC_LEX_SMALLTALK)
def GetKeywords(self):
"""Returns Specified Keywords List """
return [ST_KEYWORDS]
def GetSyntaxSpec(self):
"""Syntax Specifications """
return SYNTAX_ITEMS
def GetCommentPattern(self):
"""Returns a list of characters used to comment a block of code """
return [u'\"', u'\"']
#---- Syntax Modules Internal Functions ----#
def KeywordString():
"""Returns the specified Keyword String
@note: not used by most modules
"""
return ST_KEYWORDS[1]
#---- End Syntax Modules Internal Functions ----#
| garrettcap/Bulletproof-Backup | wx/tools/Editra/src/syntax/_smalltalk.py | Python | gpl-2.0 | 3,406 | 0.00411 |
import numdifftools
numdifftools.test() | maniteja123/numdifftools | conda_recipe/run_test.py | Python | bsd-3-clause | 40 | 0.025 |
from django.conf.urls import url
from ..views import (PowerCycleListView, PowerCycleCreateView, PowerCycleDetailView,
PowerCycleUpdateView, PowerCycleDeleteView)
from django.contrib.auth.decorators import login_required
urlpatterns = [
url(r'^create/$', # NOQA
login_required(PowerCycleCreateView.as_view()),
name="power_cycle_create"),
url(r'^(?P<pk>.+)/update/$',
login_required(PowerCycleUpdateView.as_view()),
name="power_cycle_update"),
url(r'^(?P<pk>.+)/delete/$',
login_required(PowerCycleDeleteView.as_view()),
name="power_cycle_delete"),
url(r'^(?P<pk>.+)/$',
PowerCycleDetailView.as_view(),
name="power_cycle_detail"),
url(r'^$',
PowerCycleListView.as_view(),
name="power_cycle_list"),
]
| Hattivat/hypergolic-django | hypergolic/catalog/urls/power_cycle_urls.py | Python | agpl-3.0 | 826 | 0.001211 |
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is configman
#
# The Initial Developer of the Original Code is
# Mozilla Foundation
# Portions created by the Initial Developer are Copyright (C) 2011
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# K Lars Lohn, lars@mozilla.com
# Peter Bengtsson, peterbe@mozilla.com
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import sys
import re
import datetime
import types
import inspect
import collections
import json
from required_config import RequiredConfig
from namespace import Namespace
from .datetime_util import datetime_from_ISO_string as datetime_converter
from .datetime_util import date_from_ISO_string as date_converter
import datetime_util
#------------------------------------------------------------------------------
def option_value_str(an_option):
"""return an instance of Option's value as a string.
The option instance doesn't actually have to be from the Option class. All
it requires is that the passed option instance has a ``value`` attribute.
"""
if an_option.value is None:
return ''
try:
converter = to_string_converters[type(an_option.value)]
s = converter(an_option.value)
except KeyError:
if not isinstance(an_option.value, basestring):
s = unicode(an_option.value)
else:
s = an_option.value
if an_option.from_string_converter in converters_requiring_quotes:
s = "'''%s'''" % s
return s
#------------------------------------------------------------------------------
def str_dict_keys(a_dict):
"""return a modified dict where all the keys that are anything but str get
converted to str.
E.g.
>>> result = str_dict_keys({u'name': u'Peter', u'age': 99, 1: 2})
>>> # can't compare whole dicts in doctests
>>> result['name']
u'Peter'
>>> result['age']
99
>>> result[1]
2
The reason for this is that in Python <= 2.6.4 doing
``MyClass(**{u'name': u'Peter'})`` would raise a TypeError
Note that only unicode types are converted to str types.
The reason for that is you might have a class that looks like this::
class Option(object):
def __init__(self, foo=None, bar=None, **kwargs):
...
And it's being used like this::
Option(**{u'foo':1, u'bar':2, 3:4})
Then you don't want to change that {3:4} part which becomes part of
`**kwargs` inside the __init__ method.
Using integers as parameter keys is a silly example but the point is that
due to the python 2.6.4 bug only unicode keys are converted to str.
"""
new_dict = {}
for key in a_dict:
if isinstance(key, unicode):
new_dict[str(key)] = a_dict[key]
else:
new_dict[key] = a_dict[key]
return new_dict
#------------------------------------------------------------------------------
def io_converter(input_str):
""" a conversion function for to select stdout, stderr or open a file for
writing"""
if type(input_str) is str:
input_str_lower = input_str.lower()
if input_str_lower == 'stdout':
return sys.stdout
if input_str_lower == 'stderr':
return sys.stderr
return open(input_str, "w")
return input_str
#------------------------------------------------------------------------------
def timedelta_converter(input_str):
"""a conversion function for time deltas"""
if isinstance(input_str, basestring):
days, hours, minutes, seconds = 0, 0, 0, 0
details = input_str.split(':')
if len(details) >= 4:
days = int(details[-4])
if len(details) >= 3:
hours = int(details[-3])
if len(details) >= 2:
minutes = int(details[-2])
if len(details) >= 1:
seconds = int(details[-1])
return datetime.timedelta(days=days,
hours=hours,
minutes=minutes,
seconds=seconds)
raise ValueError(input_str)
#------------------------------------------------------------------------------
def boolean_converter(input_str):
""" a conversion function for boolean
"""
return input_str.lower() in ("true", "t", "1", "y", "yes")
#------------------------------------------------------------------------------
import __builtin__
_all_named_builtins = dir(__builtin__)
def class_converter(input_str):
""" a conversion that will import a module and class name
"""
if not input_str:
return None
if '.' not in input_str and input_str in _all_named_builtins:
return eval(input_str)
parts = [x.strip() for x in input_str.split('.') if x.strip()]
try:
# first try as a complete module
package = __import__(input_str)
except ImportError:
# it must be a class from a module
if len(parts) == 1:
# since it has only one part, it must be a class from __main__
parts = ('__main__', input_str)
package = __import__('.'.join(parts[:-1]), globals(), locals(), [])
obj = package
for name in parts[1:]:
obj = getattr(obj, name)
return obj
#------------------------------------------------------------------------------
def classes_in_namespaces_converter(template_for_namespace="cls%d",
name_of_class_option='cls',
instantiate_classes=False):
"""take a comma delimited list of class names, convert each class name
into an actual class as an option within a numbered namespace. This
function creates a closure over a new function. That new function,
in turn creates a class derived from RequiredConfig. The inner function,
'class_list_converter', populates the InnerClassList with a Namespace for
each of the classes in the class list. In addition, it puts the each class
itself into the subordinate Namespace. The requirement discovery mechanism
of configman then reads the InnerClassList's requried config, pulling in
the namespaces and associated classes within.
For example, if we have a class list like this: "Alpha, Beta", then this
converter will add the following Namespaces and options to the
configuration:
"cls0" - the subordinate Namespace for Alpha
"cls0.cls" - the option containing the class Alpha itself
"cls1" - the subordinate Namespace for Beta
"cls1.cls" - the option containing the class Beta itself
Optionally, the 'class_list_converter' inner function can embue the
InnerClassList's subordinate namespaces with aggregates that will
instantiate classes from the class list. This is a convenience to the
programmer who would otherwise have to know ahead of time what the
namespace names were so that the classes could be instantiated within the
context of the correct namespace. Remember the user could completely
change the list of classes at run time, so prediction could be difficult.
"cls0" - the subordinate Namespace for Alpha
"cls0.cls" - the option containing the class Alpha itself
"cls0.cls_instance" - an instance of the class Alpha
"cls1" - the subordinate Namespace for Beta
"cls1.cls" - the option containing the class Beta itself
"cls1.cls_instance" - an instance of the class Beta
parameters:
template_for_namespace - a template for the names of the namespaces
that will contain the classes and their
associated required config options. The
namespaces will be numbered sequentially. By
default, they will be "cls1", "cls2", etc.
class_option_name - the name to be used for the class option within
the nested namespace. By default, it will choose:
"cls1.cls", "cls2.cls", etc.
instantiate_classes - a boolean to determine if there should be an
aggregator added to each namespace that
instantiates each class. If True, then each
Namespace will contain elements for the class, as
well as an aggregator that will instantiate the
class.
"""
#--------------------------------------------------------------------------
def class_list_converter(class_list_str):
"""This function becomes the actual converter used by configman to
take a string and convert it into the nested sequence of Namespaces,
one for each class in the list. It does this by creating a proxy
class stuffed with its own 'required_config' that's dynamically
generated."""
if isinstance(class_list_str, basestring):
class_list = [x.strip() for x in class_list_str.split(',')]
else:
raise TypeError('must be derivative of a basestring')
#======================================================================
class InnerClassList(RequiredConfig):
"""This nested class is a proxy list for the classes. It collects
all the config requirements for the listed classes and places them
each into their own Namespace.
"""
# we're dynamically creating a class here. The following block of
# code is actually adding class level attributes to this new class
required_config = Namespace() # 1st requirement for configman
subordinate_namespace_names = [] # to help the programmer know
# what Namespaces we added
namespace_template = template_for_namespace # save the template
# for future reference
class_option_name = name_of_class_option # save the class's option
# name for the future
# for each class in the class list
for namespace_index, a_class in enumerate(class_list):
# figure out the Namespace name
namespace_name = template_for_namespace % namespace_index
subordinate_namespace_names.append(namespace_name)
# create the new Namespace
required_config[namespace_name] = Namespace()
# add the option for the class itself
required_config[namespace_name].add_option(
name_of_class_option,
#doc=a_class.__doc__ # not helpful if too verbose
default=a_class,
from_string_converter=class_converter
)
if instantiate_classes:
# add an aggregator to instantiate the class
required_config[namespace_name].add_aggregation(
"%s_instance" % name_of_class_option,
lambda c, lc, a: lc[name_of_class_option](lc))
@classmethod
def to_str(cls):
"""this method takes this inner class object and turns it back
into the original string of classnames. This is used
primarily as for the output of the 'help' option"""
return ', '.join(
py_obj_to_str(v[name_of_class_option].value)
for v in cls.get_required_config().values()
if isinstance(v, Namespace))
return InnerClassList # result of class_list_converter
return class_list_converter # result of classes_in_namespaces_converter
#------------------------------------------------------------------------------
def regex_converter(input_str):
return re.compile(input_str)
compiled_regexp_type = type(re.compile(r'x'))
#------------------------------------------------------------------------------
from_string_converters = {
int: int,
float: float,
str: str,
unicode: unicode,
bool: boolean_converter,
dict: json.loads,
datetime.datetime: datetime_converter,
datetime.date: date_converter,
datetime.timedelta: timedelta_converter,
type: class_converter,
types.FunctionType: class_converter,
compiled_regexp_type: regex_converter,
}
#------------------------------------------------------------------------------
def py_obj_to_str(a_thing):
if a_thing is None:
return ''
if inspect.ismodule(a_thing):
return a_thing.__name__
if a_thing.__module__ == '__builtin__':
return a_thing.__name__
if a_thing.__module__ == "__main__":
return a_thing.__name__
if hasattr(a_thing, 'to_str'):
return a_thing.to_str()
return "%s.%s" % (a_thing.__module__, a_thing.__name__)
#------------------------------------------------------------------------------
def list_to_str(a_list):
return ', '.join(to_string_converters[type(x)](x) for x in a_list)
#------------------------------------------------------------------------------
to_string_converters = {
int: str,
float: str,
str: str,
unicode: unicode,
list: list_to_str,
tuple: list_to_str,
bool: lambda x: 'True' if x else 'False',
dict: json.dumps,
datetime.datetime: datetime_util.datetime_to_ISO_string,
datetime.date: datetime_util.date_to_ISO_string,
datetime.timedelta: datetime_util.timedelta_to_str,
type: py_obj_to_str,
types.ModuleType: py_obj_to_str,
types.FunctionType: py_obj_to_str,
compiled_regexp_type: lambda x: x.pattern,
}
#------------------------------------------------------------------------------
#converters_requiring_quotes = [eval, eval_to_regex_converter]
converters_requiring_quotes = [eval, regex_converter]
| AdrianGaudebert/configman | configman/converters.py | Python | bsd-3-clause | 15,400 | 0.002013 |
# -*- coding: utf-8 -*-
"""
Various i18n functions.
Helper functions for both the internal translation system
and for TranslateWiki-based translations.
By default messages are assumed to reside in a package called
'scripts.i18n'. In pywikibot 2.0, that package is not packaged
with pywikibot, and pywikibot 2.0 does not have a hard dependency
on any i18n messages. However, there are three user input questions
in pagegenerators which will use i18 messages if they can be loaded.
The default message location may be changed by calling
L{set_message_package} with a package name. The package must contain
an __init__.py, and a message bundle called 'pywikibot' containing
messages. See L{twntranslate} for more information on the messages.
"""
#
# (C) Pywikibot team, 2004-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
import sys
import re
import locale
import json
import os
import pkgutil
from collections import defaultdict
from pywikibot import Error
from .plural import plural_rules
import pywikibot
from . import config2 as config
if sys.version_info[0] > 2:
basestring = (str, )
PLURAL_PATTERN = r'{{PLURAL:(?:%\()?([^\)]*?)(?:\)d)?\|(.*?)}}'
# Package name for the translation messages. The messages data must loaded
# relative to that package name. In the top of this package should be
# directories named after for each script/message bundle, and each directory
# should contain JSON files called <lang>.json
_messages_package_name = 'scripts.i18n'
# Flag to indicate whether translation messages are available
_messages_available = None
# Cache of translated messages
_cache = defaultdict(dict)
def set_messages_package(package_name):
"""Set the package name where i18n messages are located."""
global _messages_package_name
global _messages_available
_messages_package_name = package_name
_messages_available = None
def messages_available():
"""
Return False if there are no i18n messages available.
To determine if messages are available, it looks for the package name
set using L{set_messages_package} for a message bundle called 'pywikibot'
containing messages.
@rtype: bool
"""
global _messages_available
if _messages_available is not None:
return _messages_available
try:
__import__(_messages_package_name)
except ImportError:
_messages_available = False
return False
_messages_available = True
return True
def _altlang(code):
"""Define fallback languages for particular languages.
If no translation is available to a specified language, translate() will
try each of the specified fallback languages, in order, until it finds
one with a translation, with 'en' and '_default' as a last resort.
For example, if for language 'xx', you want the preference of languages
to be: xx > fr > ru > en, you let this method return ['fr', 'ru'].
This code is used by other translating methods below.
@param code: The language code
@type code: string
@return: language codes
@rtype: list of str
"""
# Akan
if code in ['ak', 'tw']:
return ['ak', 'tw']
# Amharic
if code in ['aa', 'ti']:
return ['am']
# Arab
if code in ['arc', 'arz', 'so']:
return ['ar']
if code == 'kab':
return ['ar', 'fr']
# Bulgarian
if code in ['cu', 'mk']:
return ['bg', 'sr', 'sh']
# Czech
if code in ['cs', 'sk']:
return ['cs', 'sk']
# German
if code in ['bar', 'frr', 'ksh', 'pdc', 'pfl']:
return ['de']
if code == 'lb':
return ['de', 'fr']
if code in ['als', 'gsw']:
return ['als', 'gsw', 'de']
if code == 'nds':
return ['nds-nl', 'de']
if code in ['dsb', 'hsb']:
return ['hsb', 'dsb', 'de']
if code == 'sli':
return ['de', 'pl']
if code == 'rm':
return ['de', 'it']
if code == 'stq':
return ['nds', 'de']
# Greek
if code in ['grc', 'pnt']:
return ['el']
# Esperanto
if code in ['io', 'nov']:
return ['eo']
# Spanish
if code in ['an', 'arn', 'ast', 'ay', 'ca', 'ext', 'lad', 'nah', 'nv', 'qu',
'yua']:
return ['es']
if code in ['gl', 'gn']:
return ['es', 'pt']
if code == 'eu':
return ['es', 'fr']
if code == 'cbk-zam':
return ['es', 'tl']
# Estonian
if code in ['fiu-vro', 'vro']:
return ['fiu-vro', 'vro', 'et']
if code == 'liv':
return ['et', 'lv']
# Persian (Farsi)
if code == 'ps':
return ['fa']
if code in ['glk', 'mzn']:
return ['glk', 'mzn', 'fa', 'ar']
# Finnish
if code == 'vep':
return ['fi', 'ru']
if code == 'fit':
return ['fi', 'sv']
# French
if code in ['bm', 'br', 'ht', 'kg', 'ln', 'mg', 'nrm', 'pcd',
'rw', 'sg', 'ty', 'wa']:
return ['fr']
if code == 'oc':
return ['fr', 'ca', 'es']
if code in ['co', 'frp']:
return ['fr', 'it']
# Hindi
if code in ['sa']:
return ['hi']
if code in ['ne', 'new']:
return ['ne', 'new', 'hi']
if code in ['bh', 'bho']:
return ['bh', 'bho']
# Indonesian and Malay
if code in ['ace', 'bug', 'bjn', 'id', 'jv', 'ms', 'su']:
return ['id', 'ms', 'jv']
if code == 'map-bms':
return ['jv', 'id', 'ms']
# Inuit languages
if code in ['ik', 'iu']:
return ['iu', 'kl']
if code == 'kl':
return ['da', 'iu', 'no', 'nb']
# Italian
if code in ['eml', 'fur', 'lij', 'lmo', 'nap', 'pms', 'roa-tara', 'sc',
'scn', 'vec']:
return ['it']
# Lithuanian
if code in ['bat-smg', 'sgs']:
return ['bat-smg', 'sgs', 'lt']
# Latvian
if code == 'ltg':
return ['lv']
# Dutch
if code in ['af', 'fy', 'li', 'pap', 'srn', 'vls', 'zea']:
return ['nl']
if code == ['nds-nl']:
return ['nds', 'nl']
# Polish
if code in ['csb', 'szl']:
return ['pl']
# Portuguese
if code in ['fab', 'mwl', 'tet']:
return ['pt']
# Romanian
if code in ['roa-rup', 'rup']:
return ['roa-rup', 'rup', 'ro']
if code == 'mo':
return ['ro']
# Russian and Belarusian
if code in ['ab', 'av', 'ba', 'bxr', 'ce', 'cv', 'inh', 'kk', 'koi', 'krc',
'kv', 'ky', 'lbe', 'lez', 'mdf', 'mhr', 'mn', 'mrj', 'myv',
'os', 'sah', 'tg', 'udm', 'uk', 'xal']:
return ['ru']
if code in ['kbd', 'ady']:
return ['kbd', 'ady', 'ru']
if code == 'tt':
return ['tt-cyrl', 'ru']
if code in ['be', 'be-x-old', 'be-tarask']:
return ['be', 'be-x-old', 'be-tarask', 'ru']
if code == 'kaa':
return ['uz', 'ru']
# Serbocroatian
if code in ['bs', 'hr', 'sh']:
return ['sh', 'hr', 'bs', 'sr', 'sr-el']
if code == 'sr':
return ['sr-el', 'sh', 'hr', 'bs']
# Tagalog
if code in ['bcl', 'ceb', 'ilo', 'pag', 'pam', 'war']:
return ['tl']
# Turkish and Kurdish
if code in ['diq', 'ku']:
return ['ku', 'ku-latn', 'tr']
if code == 'gag':
return ['tr']
if code == 'ckb':
return ['ku']
# Ukrainian
if code in ['crh', 'crh-latn']:
return ['crh', 'crh-latn', 'uk', 'ru']
if code in ['rue']:
return ['uk', 'ru']
# Chinese
if code in ['zh-classical', 'lzh', 'minnan', 'zh-min-nan', 'nan', 'zh-tw',
'zh', 'zh-hans']:
return ['zh', 'zh-hans', 'zh-tw', 'zh-cn', 'zh-classical', 'lzh']
if code in ['cdo', 'gan', 'hak', 'ii', 'wuu', 'za', 'zh-classical', 'lzh',
'zh-cn', 'zh-yue', 'yue']:
return ['zh', 'zh-hans' 'zh-cn', 'zh-tw', 'zh-classical', 'lzh']
# Scandinavian languages
if code in ['da', 'sv']:
return ['da', 'no', 'nb', 'sv', 'nn']
if code in ['fo', 'is']:
return ['da', 'no', 'nb', 'nn', 'sv']
if code == 'nn':
return ['no', 'nb', 'sv', 'da']
if code in ['no', 'nb']:
return ['no', 'nb', 'da', 'nn', 'sv']
if code == 'se':
return ['sv', 'no', 'nb', 'nn', 'fi']
# Other languages
if code in ['bi', 'tpi']:
return ['bi', 'tpi']
if code == 'yi':
return ['he', 'de']
if code in ['ia', 'ie']:
return ['ia', 'la', 'it', 'fr', 'es']
if code == 'xmf':
return ['ka']
if code in ['nso', 'st']:
return ['st', 'nso']
if code in ['kj', 'ng']:
return ['kj', 'ng']
if code in ['meu', 'hmo']:
return ['meu', 'hmo']
if code == ['as']:
return ['bn']
# Default value
return []
class TranslationError(Error, ImportError):
"""Raised when no correct translation could be found."""
# Inherits from ImportError, as this exception is now used
# where previously an ImportError would have been raised,
# and may have been caught by scripts as such.
pass
def _get_translation(lang, twtitle):
"""
Return message of certain twtitle if exists.
For internal use, don't use it directly.
"""
if twtitle in _cache[lang]:
return _cache[lang][twtitle]
message_bundle = twtitle.split('-')[0]
trans_text = None
filename = '%s/%s.json' % (message_bundle, lang)
try:
trans_text = pkgutil.get_data(
_messages_package_name, filename).decode('utf-8')
except (OSError, IOError): # file open can cause several exceptions
_cache[lang][twtitle] = None
return
transdict = json.loads(trans_text)
_cache[lang].update(transdict)
try:
return transdict[twtitle]
except KeyError:
return
def _extract_plural(code, message, parameters):
"""Check for the plural variants in message and replace them.
@param message: the message to be replaced
@type message: unicode string
@param parameters: plural parameters passed from other methods
@type parameters: int, basestring, tuple, list, dict
"""
plural_items = re.findall(PLURAL_PATTERN, message)
if plural_items: # we found PLURAL patterns, process it
if len(plural_items) > 1 and isinstance(parameters, (tuple, list)) and \
len(plural_items) != len(parameters):
raise ValueError("Length of parameter does not match PLURAL "
"occurrences.")
i = 0
for selector, variants in plural_items:
if isinstance(parameters, dict):
num = int(parameters[selector])
elif isinstance(parameters, basestring):
num = int(parameters)
elif isinstance(parameters, (tuple, list)):
num = int(parameters[i])
i += 1
else:
num = parameters
# TODO: check against plural_rules[code]['nplurals']
try:
index = plural_rules[code]['plural'](num)
except KeyError:
index = plural_rules['_default']['plural'](num)
except TypeError:
# we got an int, not a function
index = plural_rules[code]['plural']
repl = variants.split('|')[index]
message = re.sub(PLURAL_PATTERN, repl, message, count=1)
return message
DEFAULT_FALLBACK = ('_default', )
def translate(code, xdict, parameters=None, fallback=False):
"""Return the most appropriate translation from a translation dict.
Given a language code and a dictionary, returns the dictionary's value for
key 'code' if this key exists; otherwise tries to return a value for an
alternative language that is most applicable to use on the wiki in
language 'code' except fallback is False.
The language itself is always checked first, then languages that
have been defined to be alternatives, and finally English. If none of
the options gives result, we just take the one language from xdict which may
not be always the same. When fallback is iterable it'll return None if no
code applies (instead of returning one).
For PLURAL support have a look at the twntranslate method
@param code: The language code
@type code: string or Site object
@param xdict: dictionary with language codes as keys or extended dictionary
with family names as keys containing language dictionaries or
a single (unicode) string. May contain PLURAL tags as
described in twntranslate
@type xdict: dict, string, unicode
@param parameters: For passing (plural) parameters
@type parameters: dict, string, unicode, int
@param fallback: Try an alternate language code. If it's iterable it'll
also try those entries and choose the first match.
@type fallback: boolean or iterable
"""
family = pywikibot.config.family
# If a site is given instead of a code, use its language
if hasattr(code, 'code'):
family = code.family.name
code = code.code
# Check whether xdict has multiple projects
if isinstance(xdict, dict):
if family in xdict:
xdict = xdict[family]
elif 'wikipedia' in xdict:
xdict = xdict['wikipedia']
# Get the translated string
if not isinstance(xdict, dict):
trans = xdict
elif not xdict:
trans = None
else:
codes = [code]
if fallback is True:
codes += _altlang(code) + ['_default', 'en']
elif fallback is not False:
codes += list(fallback)
for code in codes:
if code in xdict:
trans = xdict[code]
break
else:
if fallback is not True:
# this shouldn't simply return "any one" code but when fallback
# was True before 65518573d2b0, it did just that. When False it
# did just return None. It's now also returning None in the new
# iterable mode.
return
code = list(xdict.keys())[0]
trans = xdict[code]
if trans is None:
return # return None if we have no translation found
if parameters is None:
return trans
# else we check for PLURAL variants
trans = _extract_plural(code, trans, parameters)
if parameters:
try:
return trans % parameters
except (KeyError, TypeError):
# parameter is for PLURAL variants only, don't change the string
pass
return trans
def twtranslate(code, twtitle, parameters=None, fallback=True):
"""
Translate a message.
The translations are retrieved from json files in messages_package_name.
fallback parameter must be True for i18n and False for L10N or testing
purposes.
@param code: The language code
@param twtitle: The TranslateWiki string title, in <package>-<key> format
@param parameters: For passing parameters.
@param fallback: Try an alternate language code
@type fallback: boolean
"""
if not messages_available():
raise TranslationError(
'Unable to load messages package %s for bundle %s'
'\nIt can happen due to lack of i18n submodule or files. '
'Read https://mediawiki.org/wiki/PWB/i18n'
% (_messages_package_name, twtitle))
code_needed = False
# If a site is given instead of a code, use its language
if hasattr(code, 'code'):
lang = code.code
# check whether we need the language code back
elif isinstance(code, list):
lang = code.pop()
code_needed = True
else:
lang = code
# There are two possible failure modes: the translation dict might not have
# the language altogether, or a specific key could be untranslated. Both
# modes are caught with the KeyError.
langs = [lang]
if fallback:
langs += _altlang(lang) + ['en']
for alt in langs:
trans = _get_translation(alt, twtitle)
if trans:
break
else:
raise TranslationError(
'No English translation has been defined for TranslateWiki key'
' %r\nIt can happen due to lack of i18n submodule or files. '
'Read https://mediawiki.org/wiki/PWB/i18n' % twtitle)
# send the language code back via the given list
if code_needed:
code.append(alt)
if parameters:
return trans % parameters
else:
return trans
# Maybe this function should be merged with twtranslate
def twntranslate(code, twtitle, parameters=None):
r"""Translate a message with plural support.
Support is implemented like in MediaWiki extension. If the TranslateWiki
message contains a plural tag inside which looks like::
{{PLURAL:<number>|<variant1>|<variant2>[|<variantn>]}}
it takes that variant calculated by the plural_rules depending on the number
value. Multiple plurals are allowed.
As an examples, if we had several json dictionaries in test folder like:
en.json:
{
"test-plural": "Bot: Changing %(num)s {{PLURAL:%(num)d|page|pages}}.",
}
fr.json:
{
"test-plural": "Robot: Changer %(descr)s {{PLURAL:num|une page|quelques pages}}.",
}
and so on.
>>> from pywikibot import i18n
>>> i18n.set_messages_package('tests.i18n')
>>> # use a number
>>> str(i18n.twntranslate('en', 'test-plural', 0) % {'num': 'no'})
'Bot: Changing no pages.'
>>> # use a string
>>> str(i18n.twntranslate('en', 'test-plural', '1') % {'num': 'one'})
'Bot: Changing one page.'
>>> # use a dictionary
>>> str(i18n.twntranslate('en', 'test-plural', {'num':2}))
'Bot: Changing 2 pages.'
>>> # use additional format strings
>>> str(i18n.twntranslate('fr', 'test-plural', {'num': 1, 'descr': 'seulement'}))
'Robot: Changer seulement une page.'
>>> # use format strings also outside
>>> str(i18n.twntranslate('fr', 'test-plural', 10) % {'descr': 'seulement'})
'Robot: Changer seulement quelques pages.'
The translations are retrieved from i18n.<package>, based on the callers
import table.
@param code: The language code
@param twtitle: The TranslateWiki string title, in <package>-<key> format
@param parameters: For passing (plural) parameters.
"""
# If a site is given instead of a code, use its language
if hasattr(code, 'code'):
code = code.code
# we send the code via list and get the alternate code back
code = [code]
trans = twtranslate(code, twtitle)
# get the alternate language code modified by twtranslate
lang = code.pop()
# check for PLURAL variants
trans = _extract_plural(lang, trans, parameters)
# we always have a dict for replacement of translatewiki messages
if parameters and isinstance(parameters, dict):
try:
return trans % parameters
except KeyError:
# parameter is for PLURAL variants only, don't change the string
pass
return trans
def twhas_key(code, twtitle):
"""
Check if a message has a translation in the specified language code.
The translations are retrieved from i18n.<package>, based on the callers
import table.
No code fallback is made.
@param code: The language code
@param twtitle: The TranslateWiki string title, in <package>-<key> format
"""
# If a site is given instead of a code, use its language
if hasattr(code, 'code'):
code = code.code
transdict = _get_translation(code, twtitle)
if transdict is None:
return False
return True
def twget_keys(twtitle):
"""
Return all language codes for a special message.
@param twtitle: The TranslateWiki string title, in <package>-<key> format
"""
# obtain the directory containing all the json files for this package
package = twtitle.split("-")[0]
mod = __import__(_messages_package_name, fromlist=[str('__file__')])
pathname = os.path.join(os.path.dirname(mod.__file__), package)
# build a list of languages in that directory
langs = [filename.partition('.')[0]
for filename in sorted(os.listdir(pathname))
if filename.endswith('.json')]
# exclude languages does not have this specific message in that package
# i.e. an incomplete set of translated messages.
return [lang for lang in langs
if lang != 'qqq' and
_get_translation(lang, twtitle)]
def input(twtitle, parameters=None, password=False, fallback_prompt=None):
"""
Ask the user a question, return the user's answer.
The prompt message is retrieved via L{twtranslate} and either uses the
config variable 'userinterface_lang' or the default locale as the language
code.
@param twtitle: The TranslateWiki string title, in <package>-<key> format
@param parameters: The values which will be applied to the translated text
@param password: Hides the user's input (for password entry)
@param fallback_prompt: The English prompt if i18n is not available.
@rtype: unicode string
"""
if not messages_available():
if not fallback_prompt:
raise TranslationError(
'Unable to load messages package %s for bundle %s'
% (_messages_package_name, twtitle))
else:
prompt = fallback_prompt
else:
code = config.userinterface_lang or \
locale.getdefaultlocale()[0].split('_')[0]
prompt = twtranslate(code, twtitle, parameters)
return pywikibot.input(prompt, password)
| emijrp/pywikibot-core | pywikibot/i18n.py | Python | mit | 21,745 | 0.000368 |
#!/usr/bin/python
"""
*************************************************
* @Project: Self Balance
* @Platform: Raspberry PI 2 B+
* @Description: Motor module
* DC Motor with gearbox/encoder
* Motor driver VNH2SP30
* @Owner: Guilherme Chinellato
* @Email: guilhermechinellato@gmail.com
*************************************************
"""
import RPi.GPIO as GPIO
import time
from Motion.constants import *
from Utils.gpio_mapping import *
from Utils.traces.trace import *
class Motor():
def __init__(self, name, pinPWM, pinCW, pinCCW, debug=0):
self.debug = debug
self.name = name
self.pinPWM = pinPWM
self.pinCW = pinCW
self.pinCCW = pinCCW
#Set up BCM GPIO numbering
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
#Set GPIO as output
GPIO.setup(pinPWM, GPIO.OUT)
GPIO.setup(pinCW, GPIO.OUT)
GPIO.setup(pinCCW, GPIO.OUT)
GPIO.output(pinCW, False)
GPIO.output(pinCCW, False)
#Set GPIO as PWM output
self._motorPWM = GPIO.PWM(pinPWM, PWM_FREQ)
logging.info(("Motor " + str(name) + " module initialized"))
def start(self):
'''Start PWM (stopped)'''
self._motorPWM.start(0)
if (self.debug & MODULE_MOTION_MOTOR):
logging.debug(("Motor %s started" % (self.name)))
def stop(self):
'''Stop motor (speed to zero), it is not necessary to restart the motor'''
self._motorPWM.ChangeDutyCycle(0)
if (self.debug & MODULE_MOTION_MOTOR):
logging.debug(("Motor %s stopepd" % (self.name)))
def shutdown(self):
'''Disable motor, it is not necessary to restart the motor'''
self._motorPWM.stop()
GPIO.cleanup()
if (self.debug & MODULE_MOTION_MOTOR):
logging.debug(("Motor %s is down" % (self.name)))
def setSpeed(self, direction="", pwm=0):
'''Set motor speed'''
if direction == "CW":
GPIO.output(self.pinCW, True)
GPIO.output(self.pinCCW, False)
elif direction == "CCW":
GPIO.output(self.pinCW, False)
GPIO.output(self.pinCCW, True)
else:
GPIO.output(self.pinCW, False)
GPIO.output(self.pinCCW, False)
self._motorPWM.ChangeDutyCycle(pwm)
if (self.debug & MODULE_MOTION_MOTOR):
logging.debug(("Motor %s: Direction %s and Speed %d" % (self.name, direction, pwm)))
def TestMotor():
try:
setVerbosity("debug")
motorA = Motor("Left", MA_PWM_GPIO, MA_CLOCKWISE_GPIO, MA_ANTICLOCKWISE_GPIO, MODULE_MOTION_MOTOR)
motorB = Motor("Right", MB_PWM_GPIO, MB_CLOCKWISE_GPIO, MB_ANTICLOCKWISE_GPIO, MODULE_MOTION_MOTOR)
LP = 0.1
print "Start motor"
motorA.start()
motorB.start()
while True:
v = float((input("Inser PWM duty cycle: ")))
motorA.setSpeed(direction="CCW", pwm=v)
motorB.setSpeed(direction="CCW", pwm=v)
#motorA.setSpeed(direction="CW", pwm=10)
#motorB.setSpeed(direction="CW", pwm=10)
#time.sleep(1000)
'''for i in range(100):
print "Set speed CW: " + str(i)
motorA.setSpeed(direction="CW", pwm=i)
motorB.setSpeed(direction="CW", pwm=i)
time.sleep(LP)
for i in range(100):
print "Set speed CCW: " + str(i)
motorA.setSpeed(direction="CCW", pwm=i)
motorB.setSpeed(direction="CCW", pwm=i)
time.sleep(LP) '''
print "Stop motor"
motorA.setSpeed()
motorB.setSpeed()
motorA.stop()
motorB.stop()
except KeyboardInterrupt:
print "Shutdown motor"
motorA.shutdown()
motorB.shutdown()
if __name__ == '__main__':
TestMotor()
| gchinellato/Self-Balance-Robot | nfs-server/modules/Motion/Motor/motor.py | Python | gpl-3.0 | 3,971 | 0.007555 |
import ssl
import sys
from tornado import web, httpserver, ioloop, process, autoreload
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader
from RESTDIRAC.RESTSystem.Base.RESTHandler import RESTHandler
from RESTDIRAC.ConfigurationSystem.Client.Helpers import RESTConf
class RESTApp( object ):
def __init__( self ):
self.__handlers = {}
def _logRequest( self, handler ):
status = handler.get_status()
if status < 400:
logm = gLogger.notice
elif status < 500:
logm = gLogger.warn
else:
logm = gLogger.error
request_time = 1000.0 * handler.request.request_time()
logm( "%d %s %.2fms" % ( status, handler._request_summary(), request_time ) )
def __reloadAppCB( self ):
gLogger.notice( "\n !!!!!! Reloading web app...\n" )
def bootstrap( self ):
gLogger.always( "\n === Bootstrapping REST Server === \n" )
ol = ObjectLoader( [ 'DIRAC', 'RESTDIRAC' ] )
result = ol.getObjects( "RESTSystem.API", parentClass = RESTHandler, recurse = True )
if not result[ 'OK' ]:
return result
self.__handlers = result[ 'Value' ]
if not self.__handlers:
return S_ERROR( "No handlers found" )
self.__routes = [ ( self.__handlers[ k ].getRoute(), self.__handlers[k] ) for k in self.__handlers if self.__handlers[ k ].getRoute() ]
gLogger.info( "Routes found:" )
for t in sorted( self.__routes ):
gLogger.info( " - %s : %s" % ( t[0], t[1].__name__ ) )
balancer = RESTConf.balancer()
kw = dict( debug = RESTConf.debug(), log_function = self._logRequest )
if balancer and RESTConf.numProcesses not in ( 0, 1 ):
process.fork_processes( RESTConf.numProcesses(), max_restarts = 0 )
kw[ 'debug' ] = False
if kw[ 'debug' ]:
gLogger.always( "Starting in debug mode" )
self.__app = web.Application( self.__routes, **kw )
port = RESTConf.port()
if balancer:
gLogger.notice( "Configuring REST HTTP service for balancer %s on port %s" % ( balancer, port ) )
self.__sslops = False
else:
gLogger.notice( "Configuring REST HTTPS service on port %s" % port )
self.__sslops = dict( certfile = RESTConf.cert(),
keyfile = RESTConf.key(),
cert_reqs = ssl.CERT_OPTIONAL,
ca_certs = RESTConf.generateCAFile() )
self.__httpSrv = httpserver.HTTPServer( self.__app, ssl_options = self.__sslops )
self.__httpSrv.listen( port )
return S_OK()
def run( self ):
port = RESTConf.port()
if self.__sslops:
url = "https://0.0.0.0:%s" % port
else:
url = "http://0.0.0.0:%s" % port
gLogger.always( "Starting REST server on %s" % url )
autoreload.add_reload_hook( self.__reloadAppCB )
ioloop.IOLoop.instance().start()
| DIRACGrid/RESTDIRAC | RESTSystem/private/RESTApp.py | Python | gpl-3.0 | 2,840 | 0.044366 |
"""
The Tornado Framework
By Ali Pesaranghader
University of Ottawa, Ontario, Canada
E-mail: apesaran -at- uottawa -dot- ca / alipsgh -at- gmail -dot- com
---
*** The Page Hinkley (PH) Method Implementation ***
Paper: Page, Ewan S. "Continuous inspection schemes."
Published in: Biometrika 41.1/2 (1954): 100-115.
URL: http://www.jstor.org/stable/2333009
"""
from dictionary.tornado_dictionary import TornadoDic
from drift_detection.detector import SuperDetector
class PH(SuperDetector):
"""The Page Hinkley (PH) drift detection method class."""
DETECTOR_NAME = TornadoDic.PH
def __init__(self, min_instance=30, delta=0.005, lambda_=50, alpha=1 - 0.0001):
super().__init__()
self.MINIMUM_NUM_INSTANCES = min_instance
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
self.delta = delta
self.lambda_ = lambda_
self.alpha = alpha
def run(self, pr):
pr = 1 if pr is False else 0
warning_status = False
drift_status = False
# 1. UPDATING STATS
self.x_mean = self.x_mean + (pr - self.x_mean) / self.m_n
self.sum = self.alpha * self.sum + (pr - self.x_mean - self.delta)
self.m_n += 1
# 2. UPDATING WARNING AND DRIFT STATUSES
if self.m_n >= self.MINIMUM_NUM_INSTANCES:
if self.sum > self.lambda_:
drift_status = True
return warning_status, drift_status
def reset(self):
super().reset()
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
def get_settings(self):
return [str(self.MINIMUM_NUM_INSTANCES) + "." + str(self.delta) + "." +
str(self.lambda_) + "." + str(self.alpha),
"$n_{min}$:" + str(self.MINIMUM_NUM_INSTANCES) + ", " +
"$\delta$:" + str(self.delta).upper() + ", " +
"$\lambda$:" + str(self.lambda_).upper() + ", " +
"$\\alpha$:" + str(self.alpha).upper()]
| alipsgh/tornado | drift_detection/page_hinkley.py | Python | mit | 2,052 | 0.001462 |
"""
Development specific settings for troupon project.
"""
from .base import *
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'troupon',
'USER': os.getenv('DB_USER'),
'PASSWORD': os.getenv('DB_PASSWORD'),
'HOST': '127.0.0.1',
'PORT': '5432',
}
} | andela/troupon | troupon/troupon/settings/development.py | Python | mit | 420 | 0.002381 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import textwrap
from buildbot import config
from buildbot.db import buildrequests
from buildbot.db import builds
from buildbot.db import buildsets
from buildbot.db import buildslaves
from buildbot.db import changes
from buildbot.db import enginestrategy
from buildbot.db import model
from buildbot.db import pool
from buildbot.db import schedulers
from buildbot.db import sourcestamps
from buildbot.db import sourcestampsets
from buildbot.db import state
from buildbot.db import users
from twisted.application import internet
from twisted.application import service
from twisted.internet import defer
from twisted.python import log
class DatabaseNotReadyError(Exception):
pass
upgrade_message = textwrap.dedent("""\
The Buildmaster database needs to be upgraded before this version of
buildbot can run. Use the following command-line
buildbot upgrade-master path/to/master
to upgrade the database, and try starting the buildmaster again. You may
want to make a backup of your buildmaster before doing so.
""").strip()
class DBConnector(config.ReconfigurableServiceMixin, service.MultiService):
# The connection between Buildbot and its backend database. This is
# generally accessible as master.db, but is also used during upgrades.
#
# Most of the interesting operations available via the connector are
# implemented in connector components, available as attributes of this
# object, and listed below.
# Period, in seconds, of the cleanup task. This master will perform
# periodic cleanup actions on this schedule.
CLEANUP_PERIOD = 3600
def __init__(self, master, basedir):
service.MultiService.__init__(self)
self.setName('db')
self.master = master
self.basedir = basedir
# not configured yet - we don't build an engine until the first
# reconfig
self.configured_url = None
# set up components
self._engine = None # set up in reconfigService
self.pool = None # set up in reconfigService
self.model = model.Model(self)
self.changes = changes.ChangesConnectorComponent(self)
self.schedulers = schedulers.SchedulersConnectorComponent(self)
self.sourcestamps = sourcestamps.SourceStampsConnectorComponent(self)
self.sourcestampsets = sourcestampsets.SourceStampSetsConnectorComponent(self)
self.buildsets = buildsets.BuildsetsConnectorComponent(self)
self.buildrequests = buildrequests.BuildRequestsConnectorComponent(self)
self.state = state.StateConnectorComponent(self)
self.builds = builds.BuildsConnectorComponent(self)
self.buildslaves = buildslaves.BuildslavesConnectorComponent(self)
self.users = users.UsersConnectorComponent(self)
self.cleanup_timer = internet.TimerService(self.CLEANUP_PERIOD,
self._doCleanup)
self.cleanup_timer.setServiceParent(self)
def setup(self, check_version=True, verbose=True):
db_url = self.configured_url = self.master.config.db['db_url']
log.msg("Setting up database with URL %r" % (db_url,))
# set up the engine and pool
self._engine = enginestrategy.create_engine(db_url,
basedir=self.basedir)
self.pool = pool.DBThreadPool(self._engine, verbose=verbose)
# make sure the db is up to date, unless specifically asked not to
if check_version:
d = self.model.is_current()
def check_current(res):
if not res:
for l in upgrade_message.split('\n'):
log.msg(l)
raise DatabaseNotReadyError()
d.addCallback(check_current)
else:
d = defer.succeed(None)
return d
def reconfigService(self, new_config):
# double-check -- the master ensures this in config checks
assert self.configured_url == new_config.db['db_url']
return config.ReconfigurableServiceMixin.reconfigService(self,
new_config)
def _doCleanup(self):
"""
Perform any periodic database cleanup tasks.
@returns: Deferred
"""
# pass on this if we're not configured yet
if not self.configured_url:
return
d = self.changes.pruneChanges(self.master.config.changeHorizon)
d.addErrback(log.err, 'while pruning changes')
return d
| mitya57/debian-buildbot | buildbot/db/connector.py | Python | gpl-2.0 | 5,291 | 0.000756 |
import ninjag
from ninjag.tk.ioTK import read_all
def test():
f_input = "input/in5.yaml"
f_answer = "output/out5.ninja"
f_solution = "solution/sol5.ninja"
ninjag.main(f_answer, [f_input])
answer = read_all(f_answer)
solution = read_all(f_solution)
assert answer == solution
| yuhangwang/ninjag-python | test/frontend/build_dep/test_5.py | Python | mit | 304 | 0 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras SavedModel deserialization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
import types
from tensorflow.python.eager import context
from tensorflow.python.eager import function as defun
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.keras import backend
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine import input_spec
from tensorflow.python.keras.saving import saving_utils
from tensorflow.python.keras.saving.saved_model import constants
from tensorflow.python.keras.saving.saved_model import json_utils
from tensorflow.python.keras.saving.saved_model import utils
from tensorflow.python.keras.saving.saved_model.serialized_attributes import CommonEndpoints
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import metrics_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import load as tf_load
from tensorflow.python.saved_model import nested_structure_coder
from tensorflow.python.saved_model import revived_types
from tensorflow.python.training.tracking import base as trackable
from tensorflow.python.training.tracking.tracking import delete_tracking
from tensorflow.python.util import compat
from tensorflow.python.util import nest
from tensorflow.python.util import object_identity
from tensorflow.python.util.lazy_loader import LazyLoader
# To avoid circular dependencies between keras/engine and keras/saving,
# code in keras/saving must delay imports.
# TODO(b/134426265): Switch back to single-quotes to match the rest of the file
# once the issue with copybara is fixed.
# pylint:disable=g-inconsistent-quotes
models_lib = LazyLoader("models_lib", globals(),
"tensorflow.python.keras.models")
base_layer = LazyLoader(
"base_layer", globals(),
"tensorflow.python.keras.engine.base_layer")
layers_module = LazyLoader(
"layers_module", globals(),
"tensorflow.python.keras.layers")
input_layer = LazyLoader(
"input_layer", globals(),
"tensorflow.python.keras.engine.input_layer")
network_lib = LazyLoader(
"network_lib", globals(),
"tensorflow.python.keras.engine.network")
training_lib = LazyLoader(
"training_lib", globals(),
"tensorflow.python.keras.engine.training")
training_lib_v1 = LazyLoader(
"training_lib_v1", globals(),
"tensorflow.python.keras.engine.training_v1")
metrics = LazyLoader("metrics", globals(),
"tensorflow.python.keras.metrics")
recurrent = LazyLoader(
"recurrent", globals(),
"tensorflow.python.keras.layers.recurrent")
# pylint:enable=g-inconsistent-quotes
PUBLIC_ATTRIBUTES = CommonEndpoints.all_functions.union(
CommonEndpoints.all_checkpointable_objects)
PUBLIC_ATTRIBUTES.add(constants.KERAS_ATTR)
KERAS_OBJECT_IDENTIFIERS = (
'_tf_keras_layer', '_tf_keras_input_layer', '_tf_keras_network',
'_tf_keras_model', '_tf_keras_sequential', '_tf_keras_metric',
'_tf_keras_rnn_layer')
def load(path, compile=True): # pylint: disable=redefined-builtin
"""Loads Keras objects from a SavedModel.
Any Keras layer or model saved to the SavedModel will be loaded back
as Keras objects. Other objects are loaded as regular trackable objects (same
as `tf.saved_model.load`).
Currently, Keras saving/loading only retains the Keras object's weights,
losses, and call function.
The loaded model can be re-compiled, but the original optimizer, compiled loss
functions, and metrics are not retained. This is temporary, and `model.save`
will soon be able to serialize compiled models.
Args:
path: Path to SavedModel.
compile: If true, compile the model after loading it.
Returns:
Object loaded from SavedModel.
"""
# TODO(kathywu): Add saving/loading of optimizer, compiled losses and metrics.
# TODO(kathywu): Add code to load from objects that contain all endpoints
model = tf_load.load_internal(path, loader_cls=KerasObjectLoader)
# pylint: disable=protected-access
if isinstance(model, training_lib.Model) and compile:
# TODO(kathywu): Use compiled objects from SavedModel, instead of
# creating new objects from the training config.
training_config = model._serialized_attributes['metadata'].get(
'training_config', None)
if training_config is not None:
model.compile(**saving_utils.compile_args_from_training_config(
training_config))
else:
logging.warning('No training configuration found in save file, so the '
'model was *not* compiled. Compile it manually.')
# pylint: enable=protected-access
# Force variables and resources to initialize.
if not context.executing_eagerly():
sess = backend.get_session() # Variables are initialized by this call.
sess.run(ops.get_collection(ops.GraphKeys.TABLE_INITIALIZERS))
return model
def _is_graph_network(layer):
"""Determines whether the layer is a graph network."""
# pylint: disable=protected-access
if isinstance(layer, RevivedNetwork):
return False
elif isinstance(layer, network_lib.Network):
return (layer._is_graph_network or
isinstance(layer, models_lib.Sequential))
return False
class KerasObjectLoader(tf_load.Loader):
"""Loader that recreates Keras objects (e.g. layers, models).
Layers and models are revived from either the config or SavedModel following
these rules:
1. If object is a graph network (i.e. Sequential or Functional) then it will
be initialized using the structure from the config only after the children
layers have been created. Graph networks must be initialized with inputs
and outputs, so all child layers must be created beforehand.
2. If object's config exists and the class can be found, then revive from
config.
3. Object may have already been created if its parent was revived from config.
In this case, do nothing.
4. If nothing of the above applies, compose the various artifacts from the
SavedModel to create a subclassed layer or model. At this time, custom
metrics are not supported.
"""
def __init__(self, *args, **kwargs):
# Maps node id -> (node, revive setter function)
# Nodes recreated from the config may generate other nodes. This list
# records all nodes that were generated directly/indirectly from the config,
# so that they do not get recreated multiple times.
self._nodes_recreated_from_config = {}
self._all_nodes_recreated_from_config = (
object_identity.ObjectIdentityWeakSet())
# Store all node ids that have already been traversed when tracking nodes
# that were recreated from the config.
self._traversed_nodes_from_config = []
# Maps model id -> (blank model obj, list of child layer or their node ids)
# This tracks all layers in functional and sequential models. These models
# are only reconstructed after all of their child layers have been created.
self.model_layer_dependencies = {}
self._models_to_reconstruct = []
super(KerasObjectLoader, self).__init__(*args, **kwargs)
# Now that the node object has been fully loaded, and the checkpoint has
# been restored, the object no longer needs to track objects added from
# SerializedAttributes. (Note that saving a training checkpoint still
# functions correctly, because layers and variables are tracked separately
# by the Layer object.)
# TODO(kathywu): Instead of outright deleting these nodes (which would
# make restoring from a different checkpoint tricky), mark them as extra
# dependencies that are OK to overwrite.
for node in self._nodes:
if not isinstance(node, base_layer.Layer):
continue
for name in PUBLIC_ATTRIBUTES:
delete_tracking(node, name)
def _load_all(self):
"""Reconstruct the object graph from the SavedModel."""
# Load layer and model objects from either config or SavedModel. The objects
# loaded from config may create variables / other objects during
# initialization. These are recorded in `_nodes_recreated_from_config`.
self._layer_nodes = self._load_layers()
# Load all other nodes and functions.
super(KerasObjectLoader, self)._load_all()
# Finish setting up layers and models. See function docstring for more info.
self._finalize_objects()
@property
def _expect_partial_checkpoint(self):
return True
def _recreate(self, proto, node_id):
"""Creates a Python object from a SavedObject protocol buffer."""
if node_id in self._layer_nodes:
return self._layer_nodes[node_id]
if node_id in self._nodes_recreated_from_config:
obj, setter = self._nodes_recreated_from_config[node_id]
# Overwrite variable names with the ones saved in the SavedModel.
if proto.WhichOneof('kind') == 'variable' and proto.variable.name:
obj._handle_name = proto.variable.name + ':0' # pylint: disable=protected-access
else:
obj, setter = super(KerasObjectLoader, self)._recreate(proto, node_id)
return obj, setter
def _add_children_recreated_from_config(self, obj, proto, node_id):
"""Recursively records objects recreated from config."""
# pylint: disable=protected-access
if node_id in self._traversed_nodes_from_config:
return
self._traversed_nodes_from_config.append(node_id)
obj._maybe_initialize_trackable()
if isinstance(obj, base_layer.Layer) and not obj.built:
metadata = json_utils.decode(proto.user_object.metadata)
self._try_build_layer(obj, node_id, metadata.get('build_input_shape'))
# Create list of all possible children
children = []
# Look for direct children
for reference in proto.children:
obj_child = obj._lookup_dependency(reference.local_name)
children.append((obj_child, reference.node_id))
# Add metrics that may have been added to the layer._metrics list.
# This is stored in the SavedModel as layer.keras_api.layer_metrics in
# SavedModels created after Tf 2.2.
metric_list_node_id = self._search_for_child_node(
node_id, [constants.KERAS_ATTR, 'layer_metrics'], raise_error=False)
if metric_list_node_id is not None and hasattr(obj, '_metrics'):
obj_metrics = {m.name: m for m in obj._metrics}
for reference in self._proto.nodes[metric_list_node_id].children:
metric = obj_metrics.get(reference.local_name)
if metric is not None:
children.append((metric, reference.node_id))
for (obj_child, child_id) in children:
child_proto = self._proto.nodes[child_id]
if not isinstance(obj_child, trackable.Trackable):
continue
if (child_proto.user_object.identifier in
revived_types.registered_identifiers()):
setter = revived_types.get_setter(child_proto.user_object)
elif obj_child._object_identifier in KERAS_OBJECT_IDENTIFIERS:
setter = _revive_setter
else:
setter = setattr
# pylint: enable=protected-access
if (child_id in self._nodes_recreated_from_config and
self._nodes_recreated_from_config[child_id][0] is not obj_child):
# This means that the same trackable object is referenced by two
# different objects that were recreated from the config.
logging.warn('Looks like there is an object (perhaps variable or layer)'
' that is shared between different layers/models. This '
'may cause issues when restoring the variable values.'
'Object: {}'.format(obj_child))
self._nodes_recreated_from_config[child_id] = (
obj_child, self._config_node_setter(setter))
self._all_nodes_recreated_from_config.add(obj_child)
self._add_children_recreated_from_config(
obj_child, child_proto, child_id)
def _load_layers(self):
layers = {}
# Load metrics after models and layers, since it's likely that models
# and layers will create the metric when initialized (this avoids wasting
# time by creating objects multiple times).
metric_list = []
for node_id, proto in enumerate(self._proto.nodes):
if (proto.WhichOneof('kind') != 'user_object' or
proto.user_object.identifier not in KERAS_OBJECT_IDENTIFIERS):
continue
if proto.user_object.identifier == '_tf_keras_metric':
metric_list.append((node_id, proto))
continue
layers[node_id] = self._load_layer(proto.user_object, node_id)
for node_id, proto in metric_list:
layers[node_id] = self._load_layer(proto.user_object, node_id)
return layers
def _load_layer(self, proto, node_id):
"""Load a single layer from a SavedUserObject proto."""
metadata = json_utils.decode(proto.metadata)
# If node was already created
if node_id in self._nodes_recreated_from_config:
node, setter = self._nodes_recreated_from_config[node_id]
# Revive setter requires the object to have a `_serialized_attributes`
# property. Add it here.
_maybe_add_serialized_attributes(node, metadata)
config = metadata.get('config')
if _is_graph_network(node) and generic_utils.validate_config(config):
self.model_layer_dependencies[node_id] = (
node, self._get_child_layer_node_ids(node_id, node.name))
return node, setter
# Detect whether this object can be revived from the config. If not, then
# revive from the SavedModel instead.
obj, setter = self._revive_from_config(proto.identifier, metadata, node_id)
if obj is None:
obj, setter = revive_custom_object(proto.identifier, metadata)
# Add an attribute that stores the extra functions/objects saved in the
# SavedModel. Most of these functions/objects are ignored, but some are
# used later in the loading process (e.g. the list of regularization
# losses, or the training config of compiled models).
_maybe_add_serialized_attributes(obj, metadata)
return obj, setter
def _revive_from_config(self, identifier, metadata, node_id):
"""Revives a layer/model from config, or returns None."""
if identifier == '_tf_keras_metric':
obj = self._revive_metric_from_config(metadata, node_id)
else:
obj = (
self._revive_graph_network(metadata, node_id) or
self._revive_layer_from_config(metadata, node_id))
if obj is None:
return None, None
setter = self._config_node_setter(_revive_setter)
self._nodes_recreated_from_config[node_id] = obj, setter
self._all_nodes_recreated_from_config.add(obj)
self._add_children_recreated_from_config(
obj, self._proto.nodes[node_id], node_id)
return obj, setter
def _revive_graph_network(self, metadata, node_id):
"""Revives a graph network from config."""
class_name = compat.as_str(metadata['class_name'])
config = metadata.get('config')
# Determine whether the metadata contains information for reviving a
# functional or Sequential model.
model_is_functional_or_sequential = (
metadata.get('is_graph_network', False) or
metadata['class_name'] == 'Sequential')
if not (generic_utils.validate_config(config) and
model_is_functional_or_sequential):
return None # Revive as custom model.
# Revive functional and sequential models as blank model objects for now (
# must be initialized to enable setattr tracking and attribute caching).
# Reconstruction of the network is deferred until all of the model's layers
# have been revived.
if class_name == 'Sequential':
model = models_lib.Sequential(name=config['name'])
else:
model = models_lib.Model(name=config['name'])
# Record this model and its layers. This will later be used to reconstruct
# the model.
layers = self._get_child_layer_node_ids(node_id, model.name)
self.model_layer_dependencies[node_id] = (model, layers)
return model
def _revive_layer_from_config(self, metadata, node_id):
"""Revives a layer from config, or returns None if infeasible."""
# Check that the following requirements are met for reviving from config:
# 1. Object can be deserialized from config.
# 2. If the object needs to be built, then the build input shape can be
# found.
class_name = metadata.get('class_name')
config = metadata.get('config')
if not generic_utils.validate_config(config):
return None
try:
obj = layers_module.deserialize(
generic_utils.serialize_keras_class_and_config(class_name, config))
except ValueError:
return None
# Use the dtype, name, and trainable status. Often times these are not
# specified in custom configs, so retrieve their values from the metadata.
# pylint: disable=protected-access
obj._name = metadata['name']
if metadata.get('trainable') is not None:
obj.trainable = metadata['trainable']
if metadata.get('dtype') is not None:
obj._set_dtype_policy(metadata['dtype'])
if metadata.get('stateful') is not None:
obj.stateful = metadata['stateful']
# pylint: enable=protected-access
build_input_shape = metadata.get('build_input_shape')
built = self._try_build_layer(obj, node_id, build_input_shape)
if not built:
# If the layer cannot be built, revive a custom layer instead.
return None
return obj
def _revive_metric_from_config(self, metadata, node_id):
class_name = compat.as_str(metadata['class_name'])
config = metadata.get('config')
if not generic_utils.validate_config(config):
return None
try:
obj = metrics.deserialize(
generic_utils.serialize_keras_class_and_config(class_name, config))
except ValueError:
return None
build_input_shape = metadata.get('build_input_shape')
if build_input_shape is not None and hasattr(obj, '_build'):
obj._build(build_input_shape) # pylint: disable=protected-access
return obj
def _try_build_layer(self, obj, node_id, build_input_shape):
"""Attempts to build the layer."""
if obj.built or hasattr(obj.build, '_is_default'):
obj.built = True
return True
if build_input_shape is None:
build_input_shape = self._infer_inputs(node_id, convert_to_shapes=True)
if build_input_shape is not None:
obj.build(build_input_shape)
base_layer.Layer.build(obj, build_input_shape)
return True
return False
def _load_edges(self):
"""Add edges for all nodes that are not waiting on initialization."""
for node_id, proto in enumerate(self._proto.nodes):
if node_id not in self.model_layer_dependencies:
self._add_object_graph_edges(proto, node_id)
def _finalize_objects(self):
"""Finish setting up Keras objects.
This function is executed after all objects and functions have been created.
Call functions and losses are attached to each layer, and once all layers
have been fully set up, graph networks are initialized.
Subclassed models that are revived from the SavedModel are treated like
layers, and have their call/loss functions attached here.
"""
# Finish setting up layers and subclassed models. This step attaches call
# functions and losses to each object, and sets model inputs/outputs.
layers_revived_from_config = []
layers_revived_from_saved_model = []
for node_id, node in enumerate(self._nodes):
if (not isinstance(node, base_layer.Layer) or
# Don't finalize models until all layers have finished loading.
node_id in self.model_layer_dependencies):
continue
self._unblock_model_reconstruction(node_id, node)
if isinstance(node, input_layer.InputLayer):
continue
elif isinstance(node, metrics.Metric):
continue
if node_id in self._nodes_recreated_from_config:
layers_revived_from_config.append(node)
else:
layers_revived_from_saved_model.append(node)
_finalize_saved_model_layers(layers_revived_from_saved_model)
_finalize_config_layers(layers_revived_from_config)
# Initialize graph networks, now that layer dependencies have been resolved.
self._reconstruct_all_models()
def _unblock_model_reconstruction(self, layer_id, layer):
"""Removes layer from blocking model reconstruction."""
for model_id, v in self.model_layer_dependencies.items():
_, layers = v
if layer_id not in layers:
continue
layers[layers.index(layer_id)] = layer
if all(isinstance(x, base_layer.Layer) for x in layers):
self._models_to_reconstruct.append(model_id)
def _reconstruct_all_models(self):
all_initialized_models = set()
while self._models_to_reconstruct:
model_id = self._models_to_reconstruct.pop(0)
all_initialized_models.add(model_id)
model, layers = self.model_layer_dependencies[model_id]
self._reconstruct_model(model_id, model, layers)
self._add_object_graph_edges(self._proto.nodes[model_id], model_id)
_finalize_config_layers([model])
if all_initialized_models != set(self.model_layer_dependencies.keys()):
# This should not happen.
uninitialized_model_ids = (
set(self.model_layer_dependencies.keys()) - all_initialized_models)
uninitialized_model_names = [
self.model_layer_dependencies[model_id][0].name
for model_id in uninitialized_model_ids]
raise ValueError('Error when loading from SavedModel -- the following '
'models could not be initialized: {}'
.format(uninitialized_model_names))
def _reconstruct_model(self, model_id, model, layers):
config = json_utils.decode(
self._proto.nodes[model_id].user_object.metadata)['config']
if isinstance(model, models_lib.Sequential):
if config['layers'][0]['class_name'] != 'InputLayer':
if 'batch_input_shape' in config['layers'][0]['config']:
batch_input_shape = config['layers'][0]['config']['batch_input_shape']
layers.insert(0, input_layer.InputLayer(
input_shape=batch_input_shape[1:],
batch_size=batch_input_shape[0],
dtype=layers[0].dtype,
name=layers[0].name + '_input'))
model.__init__(layers, name=config['name'])
if not model.inputs:
first_layer = self._get_child_layer_node_ids(model_id, model.name)[0]
input_specs = self._infer_inputs(first_layer)
input_shapes = self._infer_inputs(first_layer, convert_to_shapes=True)
model._set_inputs(input_specs) # pylint: disable=protected-access
if not model.built and not isinstance(input_specs, dict):
model.build(input_shapes)
else:
(inputs, outputs, created_layers) = network_lib.reconstruct_from_config(
config, created_layers={layer.name: layer for layer in layers})
model.__init__(inputs, outputs, name=config['name'])
network_lib.connect_ancillary_layers(model, created_layers)
# Set model dtype and trainable status.
_set_network_attributes_from_metadata(model)
# Unblock models that are dependent on this model.
self._unblock_model_reconstruction(model_id, model)
def _get_child_layer_node_ids(self, node_id, name):
"""Returns the node ids of the children layers of a node."""
# Retrieve the node id of layer.keras_api.layers.
layer_list = self._search_for_child_node(
node_id, [constants.KERAS_ATTR, 'layers'], name)
return [node.node_id for node in self._proto.nodes[layer_list].children]
def _search_for_child_node(
self, parent_id, path_to_child, debugging_name=None, raise_error=True):
"""Returns node id of child node.
A helper method for traversing the object graph proto.
As an example, say that the object graph proto in the SavedModel contains an
object with the following child and grandchild attributes:
`parent.child_a.child_b`
This method can be used to retrieve the node id of `child_b` using the
parent's node id by calling:
`_search_for_child_node(parent_id, ['child_a', 'child_b'])`.
Args:
parent_id: node id of parent node
path_to_child: list of children names.
debugging_name: the name to print out when raising an error.
raise_error: Whether to raise an error if the child isn't found.
Returns:
node_id of child, or None if child isn't found.
Raises:
ValueError: if child isn't found and raise_error is True.
"""
if not path_to_child:
return parent_id
for child in self._proto.nodes[parent_id].children:
if child.local_name == path_to_child[0]:
return self._search_for_child_node(child.node_id, path_to_child[1:],
debugging_name, raise_error)
if raise_error:
raise ValueError(
'Error when loading {}: could not find attribute {}.\n'
'Most likely this object was serialized incorrectly.'
.format(debugging_name or path_to_child[0], path_to_child[0]))
else:
return None
def _infer_inputs(self, layer_node_id, convert_to_shapes=False):
"""Infers input shape of layer from SavedModel functions."""
coder = nested_structure_coder.StructureCoder()
call_fn_id = self._search_for_child_node(
layer_node_id, ['call_and_return_all_conditional_losses'], None,
raise_error=False)
if call_fn_id is None:
return None
concrete_functions = (
self._proto.nodes[call_fn_id].function.concrete_functions)
if not concrete_functions:
return None
call_fn_name = concrete_functions[0]
call_fn_proto = self._proto.concrete_functions[call_fn_name]
structured_input_signature = coder.decode_proto(
call_fn_proto.canonicalized_input_signature)
inputs = structured_input_signature[0][0]
if convert_to_shapes:
return nest.map_structure(lambda spec: spec.shape, inputs)
else:
return inputs
def _config_node_setter(self, setter):
"""Creates edges for nodes that are recreated from config."""
def setattr_wrapper(obj, name, value):
# Avoid overwriting attributes of objects recreated from the config.
if obj._lookup_dependency(name) is None: # pylint: disable=protected-access
setter(obj, name, value)
return setattr_wrapper
def _finalize_saved_model_layers(layers):
"""Runs the final steps of loading Keras Layers from SavedModel."""
# pylint: disable=protected-access
# 1. Set up call functions for all layers (skip this step for Sequential and
# Functional models).
for layer in layers:
layer.built = True
if hasattr(_get_keras_attr(layer), 'call_and_return_conditional_losses'):
layer.call = utils.use_wrapped_call(
layer, _get_keras_attr(layer).call_and_return_conditional_losses,
return_method=True)
layer._init_call_fn_args()
for layer in layers:
# 2. Set model inputs and outputs.
if isinstance(layer, RevivedNetwork):
_set_network_attributes_from_metadata(layer)
call_fn = _get_keras_attr(layer).call_and_return_conditional_losses
if call_fn.input_signature is None:
inputs = infer_inputs_from_restored_call_function(call_fn)
else:
inputs = call_fn.input_signature[0]
layer._set_inputs(inputs)
# 3. Add losses that aren't generated by the layer.call function.
_restore_layer_unconditional_losses(layer)
_restore_layer_activation_loss(layer)
# 4. Restore metrics list
_restore_layer_metrics(layer)
# pylint: enable=protected-access
def _finalize_config_layers(layers):
"""Runs the final steps of loading Keras Layers from config."""
for layer in layers:
# It is assumed that layers define their unconditional losses after being
# recreated from the config and built. The exceptions to this
# are Functional and Sequential models, which only store conditional losses
# (losses dependent on the inputs) in the config. Unconditional losses like
# weight regularization must be revived from the SavedModel.
if _is_graph_network(layer):
_restore_layer_unconditional_losses(layer)
# Some layers, like Dense, record their activation loss function in the
# config. However, not all layers do this, so the activation loss may be
# missing when restored from the config/hdf5.
# TODO(kathywu): Investigate ways to improve the config to ensure consistent
# loading behavior between HDF5 and SavedModel.
_restore_layer_activation_loss(layer)
# Restore metrics list.
_restore_layer_metrics(layer)
# Restore RNN layer states
if (isinstance(layer, recurrent.RNN) and
layer.stateful and
hasattr(_get_keras_attr(layer), 'states')):
layer.states = getattr(_get_keras_attr(layer), 'states', None)
for variable in nest.flatten(layer.states):
backend.track_variable(variable)
def _finalize_metric(metric):
metric.update_state = types.MethodType(metrics_utils.update_state_wrapper(
metric.keras_api.update_state), metric)
metric.result = metric.keras_api.result
def _restore_layer_unconditional_losses(layer):
"""Restore unconditional losses from SavedModel."""
if hasattr(_get_keras_attr(layer), 'layer_regularization_losses'):
losses = getattr(_get_keras_attr(layer), 'layer_regularization_losses', [])
else:
# Some earlier SavedModels may not have layer_regularization_losses
# serialized separately. Fall back to using the regularization_losses
# list if it does not exist.
losses = layer._serialized_attributes.get('regularization_losses', []) # pylint: disable=protected-access
for loss in losses:
layer.add_loss(loss)
def _restore_layer_activation_loss(layer):
"""Restore actiation loss from SavedModel."""
# Use wrapped activity regularizer function if the layer's activity
# regularizer wasn't created during initialization.
activity_regularizer = getattr(_get_keras_attr(layer),
'activity_regularizer_fn', None)
if activity_regularizer and not layer.activity_regularizer:
try:
layer.activity_regularizer = activity_regularizer
except AttributeError:
# This may happen if a layer wrapper is saved with an activity
# regularizer. The wrapper object's activity regularizer is unsettable.
pass
def revive_custom_object(identifier, metadata):
"""Revives object from SavedModel."""
if ops.executing_eagerly_outside_functions():
model_class = training_lib.Model
else:
model_class = training_lib_v1.Model
revived_classes = {
'_tf_keras_layer': (RevivedLayer, base_layer.Layer),
'_tf_keras_input_layer': (RevivedInputLayer, input_layer.InputLayer),
'_tf_keras_network': (RevivedNetwork, network_lib.Network),
'_tf_keras_model': (RevivedNetwork, model_class),
'_tf_keras_sequential': (RevivedNetwork, models_lib.Sequential),
}
parent_classes = revived_classes.get(identifier, None)
if parent_classes is not None:
parent_classes = revived_classes[identifier]
revived_cls = type(
compat.as_str(metadata['class_name']), parent_classes, {})
return revived_cls._init_from_metadata(metadata) # pylint: disable=protected-access
else:
raise ValueError('Unable to restore custom object of type {} currently. '
'Please make sure that the layer implements `get_config`'
'and `from_config` when saving. In addition, please use '
'the `custom_objects` arg when calling `load_model()`.'
.format(identifier))
def _restore_layer_metrics(layer):
metrics_list = getattr(_get_keras_attr(layer), 'layer_metrics', {})
layer_metrics = {m.name: m for m in layer._metrics} # pylint: disable=protected-access
for name, metric in metrics_list.items():
if name not in layer_metrics:
# Metrics may be added during initialization/building of custom layers.
layer._metrics.append(metric) # pylint: disable=protected-access
# TODO(kathywu): Centrally define keys and functions for both serialization and
# deserialization.
class RevivedLayer(object):
"""Keras layer loaded from a SavedModel."""
@classmethod
def _init_from_metadata(cls, metadata):
"""Create revived layer from metadata stored in the SavedModel proto."""
init_args = dict(
name=metadata['name'],
trainable=metadata['trainable'])
if metadata.get('dtype') is not None:
init_args['dtype'] = metadata['dtype']
if metadata.get('batch_input_shape') is not None:
init_args['batch_input_shape'] = metadata['batch_input_shape']
revived_obj = cls(**init_args)
with trackable.no_automatic_dependency_tracking_scope(revived_obj):
# pylint:disable=protected-access
revived_obj._expects_training_arg = metadata['expects_training_arg']
config = metadata.get('config')
if generic_utils.validate_config(config):
revived_obj._config = config
if metadata.get('input_spec') is not None:
revived_obj.input_spec = recursively_deserialize_keras_object(
metadata['input_spec'],
module_objects={'InputSpec': input_spec.InputSpec})
if metadata.get('activity_regularizer') is not None:
revived_obj.activity_regularizer = regularizers.deserialize(
metadata['activity_regularizer'])
if metadata.get('_is_feature_layer') is not None:
revived_obj._is_feature_layer = metadata['_is_feature_layer']
if metadata.get('stateful') is not None:
revived_obj.stateful = metadata['stateful']
# pylint:enable=protected-access
return revived_obj, _revive_setter
@property
def keras_api(self):
return self._serialized_attributes.get(constants.KERAS_ATTR, None)
def get_config(self):
if hasattr(self, '_config'):
return self._config
else:
raise NotImplementedError
def _revive_setter(layer, name, value):
"""Setter function that saves some attributes to separate dictionary."""
# Many attributes in the SavedModel conflict with properties defined in
# Layer and Model. Save these attributes to a separate dictionary.
if name in PUBLIC_ATTRIBUTES:
# pylint: disable=protected-access
if isinstance(value, trackable.Trackable):
layer._track_trackable(value, name=name)
layer._serialized_attributes[name] = value
# pylint: enable=protected-access
elif (isinstance(layer, network_lib.Network) and
re.match(r'^layer(_with_weights)?-[\d+]', name) is not None):
# Edges named "layer-n" or "layer_with_weights-n", which are tracked in
# network._track_layers, should not be added as an attribute.
pass
elif getattr(layer, name, None) is not None:
# Don't overwrite already defined attributes.
pass
else:
setattr(layer, name, value)
class RevivedInputLayer(object):
"""InputLayer loaded from a SavedModel."""
@classmethod
def _init_from_metadata(cls, metadata):
"""Revives the saved InputLayer from the Metadata."""
init_args = dict(
name=metadata['name'],
dtype=metadata['dtype'],
sparse=metadata['sparse'],
ragged=metadata['ragged'],
batch_input_shape=metadata['batch_input_shape'])
revived_obj = cls(**init_args)
with trackable.no_automatic_dependency_tracking_scope(revived_obj):
revived_obj._config = metadata['config'] # pylint:disable=protected-access
return revived_obj, setattr
def get_config(self):
return self._config
def recursively_deserialize_keras_object(config, module_objects=None):
"""Deserialize Keras object from a nested structure."""
if isinstance(config, dict):
if 'class_name' in config:
return generic_utils.deserialize_keras_object(
config, module_objects=module_objects)
else:
return {key: recursively_deserialize_keras_object(config[key],
module_objects)
for key in config}
if isinstance(config, (tuple, list)):
return [recursively_deserialize_keras_object(x, module_objects)
for x in config]
else:
raise ValueError('Unable to decode config: {}'.format(config))
def infer_inputs_from_restored_call_function(fn):
"""Returns TensorSpec of inputs from a restored call function.
Args:
fn: Restored layer call function. It is assumed that the inputs are entirely
in the first argument.
Returns:
TensorSpec of call function inputs.
"""
def common_spec(x, y):
return tensor_spec.TensorSpec(defun.common_shape(x.shape, y.shape),
x.dtype, x.name)
spec = fn.concrete_functions[0].structured_input_signature[0][0]
for concrete in fn.concrete_functions[1:]:
spec2 = concrete.structured_input_signature[0][0]
spec = nest.map_structure(common_spec, spec, spec2)
return spec
class RevivedNetwork(RevivedLayer):
"""Keras network of layers loaded from a SavedModel."""
@classmethod
def _init_from_metadata(cls, metadata):
"""Create revived network from metadata stored in the SavedModel proto."""
revived_obj = cls(name=metadata['name'])
# Store attributes revived from SerializedAttributes in a un-tracked
# dictionary. The attributes are the ones listed in CommonEndpoints or
# "keras_api" for keras-specific attributes.
with trackable.no_automatic_dependency_tracking_scope(revived_obj):
# pylint:disable=protected-access
revived_obj._expects_training_arg = metadata['expects_training_arg']
config = metadata.get('config')
if generic_utils.validate_config(config):
revived_obj._config = config
if metadata.get('activity_regularizer') is not None:
revived_obj.activity_regularizer = regularizers.deserialize(
metadata['activity_regularizer'])
# pylint:enable=protected-access
return revived_obj, _revive_setter # pylint:disable=protected-access
def _set_network_attributes_from_metadata(revived_obj):
"""Sets attributes recorded in the metadata."""
with trackable.no_automatic_dependency_tracking_scope(revived_obj):
# pylint:disable=protected-access
metadata = revived_obj._serialized_attributes['metadata']
if metadata.get('dtype') is not None:
revived_obj._set_dtype_policy(metadata['dtype'])
revived_obj.trainable = metadata['trainable']
# pylint:enable=protected-access
def _maybe_add_serialized_attributes(layer, metadata):
# Store attributes revived from SerializedAttributes in a un-tracked
# dictionary. The attributes are the ones listed in CommonEndpoints or
# "keras_api" for keras-specific attributes.
if not hasattr(layer, '_serialized_attributes'):
with trackable.no_automatic_dependency_tracking_scope(layer):
layer._serialized_attributes = {'metadata': metadata} # pylint: disable=protected-access
def _get_keras_attr(layer):
return getattr(layer, '_serialized_attributes', {}).get(constants.KERAS_ATTR,
None)
| gunan/tensorflow | tensorflow/python/keras/saving/saved_model/load.py | Python | apache-2.0 | 40,001 | 0.006875 |
#!/usr/bin/env python2
###############################################################################
#
# Set a baseline for all benchmarks using numpy's serial matrix multiplication
#
# Copyright (C) 2015, Jonathan Gillett
# All rights reserved.
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import numpy as np
from time import time
from docopt import docopt
from helpers import gen_matrix, usage, schema
from schema import SchemaError
if __name__ == '__main__':
args = docopt(usage)
try:
args = schema.validate(args)
except SchemaError as e:
exit(e)
# Generate the dynamic matrices for the test
dim, dtype, mtype = args['DIM'], args['--dtype'], args['--mtype']
A = gen_matrix(dim, dim, dtype, mtype)
B = gen_matrix(dim, dim, dtype, mtype)
# Calculate the execution time for the baseline
start = time()
C = np.dot(A, B)
end = time()
print "%0.3f" % (end-start,)
| gnu-user/mcsc-6030-project | codes/benchmarks/baseline.py | Python | gpl-3.0 | 1,605 | 0 |
#!/usr/bin/env python
"""
basketPInfo data procesors (rrdtool output)
Rafal Zawadzki <bluszcz@bluszcz.net>
BSD License (license.txt)
"""
import sys
def exit_failure():
" Nice info on failure "
print "usage: %s int\n" % sys.argv[0]
print "int should be 2 (humidity) or 3 (temperature)"
sys.exit(-1)
if len(sys.argv)!=2:
exit_failure()
ARG = int(sys.argv[1])
if ARG not in (2, 3):
exit_failure()
FILENAME = "/home/pi/logs/temphum.txt"
HANDLER = open(FILENAME)
def transpose_data(data):
" Parses data "
return [ll.lstrip('\t') for ll in data.strip().split(',')]
for line in (transpose_data(l) for l in HANDLER.xreadlines()):
if len(line)>1:
try:
print 'rrdtool update temp.rrd %s:%s' % (line[0], line[ARG])
except KeyError:
pass
| bluszcz/basketpinfo | python/process_to_rrdtool.py | Python | bsd-3-clause | 812 | 0.011084 |
# -*- coding: utf-8 -*-
# Copyright(C) 2019 Sylvie Ye
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from PIL import Image, ImageFilter
import random
from weboob.tools.captcha.virtkeyboard import SimpleVirtualKeyboard
from weboob.browser.pages import JsonPage
from weboob.browser.filters.json import Dict
class INGVirtKeyboard(SimpleVirtualKeyboard):
# from parent
tile_margin = 10
convert = 'RGB'
# for children
safe_tile_margin = 10
small_img_size = (15, 14)
alter_img_params = {
'radius': 2,
'percent': 95,
'threshold': 3,
'limit_pixel': 200
}
# for matching_symbols_coords, indexes are cases place like this
# --- --- --- --- ---
# |0| |1| |2| |3| |4|
# --- --- --- --- ---
# --- --- --- --- ---
# |5| |6| |7| |8| |9|
# --- --- --- --- ---
matching_symbols_coords = {
'0': (3, 3, 93, 91),
'1': (99, 3, 189, 91),
'2': (196, 3, 286, 91),
'3': (293, 3, 383, 91),
'4': (390, 3, 480, 91),
'5': (3, 98, 93, 186),
'6': (99, 98, 189, 186),
'7': (196, 98, 286, 186),
'8': (293, 98, 383, 186),
'9': (390, 98, 480, 186),
}
symbols = {
'0': ('7b4989b431e631ec79df5d71aecb1a47','e2522e1f7476ad6430219a73b10799b0', 'f7db285c5c742c3a348e332c0e9f7f3e',),
'1': ('9f1b03aa9a6f9789714c38eb90a43a11', '86bc0e7e1173472928e746db874b38c3',),
'2': ('3a7d1ba32f4326a02f717f71262ba02b', 'afc2a00289ba9e362c4e9333c14a574a',),
'3': ('203bfd122f474eb9c5c278eeda01bed4', 'c1daa556a1eff1fd18817dbef39792f8',),
'4': ('c09b323e5a80a195d9cb0c3000f3d7ec', 'f020eaf7cdffefec065d3b2801ed73e2', '5e194b0aae3b8f02ebbf9cdec5c37239',),
'5': ('1749dc3f2e302cd3562a0558755ab030', 'b64163e3f5f7d83ff1baad8c4d1bc37b',),
'6': ('0888a7dc9085fcf09d56363ac253a54a', 'e269686d10f95678caf995de6834f74b', '8c505dad47cf6029921fca5fb4b0bc8d',),
'7': ('75aaa903b8277b82c458c3540208a009', 'e97b0c0e01d77dd480b8a5f5c138a268',),
'8': ('f5fa36d16f55b72ba988eb87fa1ed753', '118a52a6a480b5db5eabb0ea26196db3',),
'9': ('62f91d10650583cb6146d25bb9ac161d', 'fd81675aa1c26cbf5bb6c9f1bcdbbdf9',),
}
def __init__(self, file, cols, rows, browser):
# use matching_symbols_coords because margins between tiles are not equals
super(INGVirtKeyboard, self).__init__(file=file, cols=cols, rows=rows, matching_symbols_coords=self.matching_symbols_coords, browser=browser)
def process_tiles(self):
for tile in self.tiles:
# format tile object like:
# `tile.original_img`: original tile image size
# `tile.coords`: original tile image coords
# `tile.image`: resized and altered image tile
# `tile.md5`: image tile resized hash
tile.original_img = tile.image
tile.image = tile.image.resize(self.small_img_size, resample=Image.BILINEAR)
# convert to monochrome image
tile.image = tile.image.convert('L')
# See ImageFilter.UnsharpMask from Pillow
tile.image = tile.image.filter(ImageFilter.UnsharpMask(
radius=self.alter_img_params['radius'],
percent=self.alter_img_params['percent'],
threshold=self.alter_img_params['threshold'])
)
tile.image = Image.eval(tile.image, lambda px: 0 if px <= self.alter_img_params['limit_pixel'] else 255)
def cut_tiles(self, tile_margin=None):
assert self.tiles, 'There are no tiles to process'
super(INGVirtKeyboard, self).cut_tiles(tile_margin)
# alter tile
self.process_tiles()
def password_tiles_coord(self, password):
password_tiles = []
for digit in password:
for tile in self.tiles:
if tile.md5 in self.symbols[digit]:
password_tiles.append(tile)
break
else:
# Dump file only when the symbol is not found
self.dump_tiles(self.path)
raise Exception("Symbol '%s' not found; all symbol hashes are available in %s"
% (digit, self.path))
formatted_password = []
for tile in password_tiles:
formatted_password.append([
random.uniform(tile.coords[0], tile.coords[2]),
random.uniform(tile.coords[1], tile.coords[3]),
])
return formatted_password
class LoginPage(JsonPage):
@property
def is_logged(self):
return 'firstName' in self.doc
def get_password_coord(self, img, password):
assert 'pinPositions' in self.doc, 'Virtualkeyboard position has failed'
assert 'keyPadUrl' in self.doc, 'Virtualkeyboard image url is missing'
pin_position = Dict('pinPositions')(self.doc)
image = BytesIO(img)
vk = INGVirtKeyboard(image, cols=5, rows=2, browser=self.browser)
password_random_coords = vk.password_tiles_coord(password)
# pin positions (website side) start at 1, our positions start at 0
return [password_random_coords[index-1] for index in pin_position]
| vicnet/weboob | modules/ing/api/login.py | Python | lgpl-3.0 | 5,888 | 0.002887 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': "Product Matrix",
'summary': """
Technical module: Matrix Implementation
""",
'description': """
Please refer to Sale Matrix or Purchase Matrix for the use of this module.
""",
'category': 'Sales/Sales',
'version': '1.0',
'depends': ['account'],
# Account dependency for section_and_note widget.
'data': [
'views/matrix_templates.xml',
],
'demo': [
'data/product_matrix_demo.xml',
],
'assets': {
'web.assets_backend': [
'product_matrix/static/src/js/section_and_note_widget.js',
'product_matrix/static/src/scss/product_matrix.scss',
],
'web.assets_qweb': [
'product_matrix/static/src/xml/**/*',
],
},
'license': 'LGPL-3',
}
| jeremiahyan/odoo | addons/product_matrix/__manifest__.py | Python | gpl-3.0 | 888 | 0 |
from SimPEG import Mesh, Regularization, Maps, Utils, EM
from SimPEG.EM.Static import DC
import numpy as np
import matplotlib.pyplot as plt
#%matplotlib inline
import copy
#import pandas as pd
#from scipy.sparse import csr_matrix, spdiags, dia_matrix,diags
#from scipy.sparse.linalg import spsolve
from scipy.stats import norm,multivariate_normal
import sys
path ="../pymatsolver/"
path = "../../../Documents/pymatsolver/"
sys.path.append(path)
from pymatsolver import PardisoSolver
#from scipy.interpolate import LinearNDInterpolator, interp1d
#from sklearn.mixture import GaussianMixture
from SimPEG import DataMisfit, Regularization, Optimization, InvProblem, Directives, Inversion
import SimPEG
import scipy.sparse as sp
import os
import glob
#Remove older results
files = glob.glob('./*.npz')
for f in files:
os.remove(f)
#2D model
csx, csy, csz = 0.25,0.25,0.25
# Number of core cells in each directiPon s
ncx, ncz = 123,41
# Number of padding cells to add in each direction
npad = 12
# Vectors of cell lengthts in each direction
hx = [(csx,npad, -1.5),(csx,ncx),(csx,npad, 1.5)]
hz= [(csz,npad,-1.5),(csz,ncz)]
# Create mesh
mesh = Mesh.TensorMesh([hx, hz],x0="CN")
# Map mesh coordinates from local to UTM coordiantes
#mesh.x0[2] = mesh.x0[2]-mesh.vectorCCz[-npad-1]
mesh.x0[1] = mesh.x0[1]+csz/2.
#mesh.x0[0] = mesh.x0[0]+csx/2.
#mesh.plotImage(np.ones(mesh.nC)*np.nan, grid=True)
#mesh.plotImage(np.ones(mesh.nC)*np.nan, grid=True)
#plt.gca().set_xlim([-20,20])
#plt.gca().set_ylim([-15,0])
#mesh.plotGrid()
#plt.gca().set_aspect('equal')
#plt.show()
print "Mesh Size: ", mesh.nC
#Model Creation
lnsig_air = 1e-8;
x0,z0, r0 = -6., -4., 3.
x1,z1, r1 = 6., -4., 3.
ln_sigback = -5.
ln_sigc = -3.
ln_sigr = -7.
noisemean = 0.
noisevar = 0.0
overburden_extent = 0.
ln_over = -4.
#m = (lnsig_background)*np.ones(mesh.nC);
#mu =np.ones(mesh.nC);
mtrue = ln_sigback*np.ones(mesh.nC) + norm(noisemean,noisevar).rvs(mesh.nC)
overb = (mesh.gridCC[:,1] >-overburden_extent) & (mesh.gridCC[:,1]<=0)
mtrue[overb] = ln_over*np.ones_like(mtrue[overb])+ norm(noisemean,noisevar).rvs(np.prod((mtrue[overb]).shape))
csph = (np.sqrt((mesh.gridCC[:,1]-z0)**2.+(mesh.gridCC[:,0]-x0)**2.))< r0
mtrue[csph] = ln_sigc*np.ones_like(mtrue[csph]) + norm(noisemean,noisevar).rvs(np.prod((mtrue[csph]).shape))
#Define the sphere limit
rsph = (np.sqrt((mesh.gridCC[:,1]-z1)**2.+(mesh.gridCC[:,0]-x1)**2.))< r1
mtrue[rsph] = ln_sigr*np.ones_like(mtrue[rsph]) + norm(noisemean,noisevar).rvs(np.prod((mtrue[rsph]).shape))
mtrue = Utils.mkvc(mtrue);
mesh.plotGrid()
plt.gca().set_xlim([-10,10])
plt.gca().set_ylim([-10,0])
xyzlim = np.r_[[[-10.,10.],[-10.,1.]]]
actind, meshCore = Utils.meshutils.ExtractCoreMesh(xyzlim,mesh)
plt.hist(mtrue[actind],bins =50,normed=True);
fig0 = plt.figure()
ax0 = fig0.add_subplot(111)
mm = meshCore.plotImage(mtrue[actind],ax = ax0)
plt.colorbar(mm[0])
ax0.set_aspect("equal")
#plt.show()
#Gradient array 1 2D
srclist = []
nSrc = 23
lines = 1
ylines = np.r_[0.]
xlines = np.r_[0.]
z = 0.
#xline
for k in range(lines):
for i in range(nSrc):
if i<=11:
locA = np.r_[-14.+1., z]
locB = np.r_[-8.+2.*i-1., z]
#M = np.c_[np.arange(-12.,-12+2*(i+1),2),np.ones(i+1)*z]
#N = np.c_[np.arange(-10.,-10+2*(i+1),2),np.ones(i+1)*z]
M = np.c_[np.arange(-12.,10+1,2),np.ones(12)*z]
N = np.c_[np.arange(-10.,12+1,2),np.ones(12)*z]
rx = DC.Rx.Dipole(M,N)
src= DC.Src.Dipole([rx],locA,locB)
srclist.append(src)
#print locA,locB,"\n",[M,N],"\n"
#rx = DC.Rx.Dipole(-M,-N)
#src= DC.Src.Dipole([rx],-locA,-locB)
#srclist.append(src)
#print -locA,-locB,"\n",[-M,-N],"\n"
else:
locA = np.r_[-14.+2*(i-11)+1., z]
locB = np.r_[14.-1.,z]
#M = np.c_[np.arange(locA[0]+1.,12.,2),np.ones(nSrc-i)*z]
#N = np.c_[np.arange(locA[0]+3.,14.,2),np.ones(nSrc-i)*z]
M = np.c_[np.arange(-12.,10+1,2),np.ones(12)*z]
N = np.c_[np.arange(-10.,12+1,2),np.ones(12)*z]
rx = DC.Rx.Dipole(M,N)
src= DC.Src.Dipole([rx],locA,locB)
srclist.append(src)
#print "line2",locA,locB,"\n",[M,N],"\n"
#rx = DC.Rx.Dipole(-M,-N)
#src= DC.Src.Dipole([rx],-locA,-locB)
#srclist.append(src)
mapping = Maps.ExpMap(mesh)
survey = DC.Survey(srclist)
problem = DC.Problem3D_CC(mesh, sigmaMap=mapping)
problem.pair(survey)
problem.Solver = PardisoSolver
survey.dobs = survey.dpred(mtrue)
survey.std = 0.05*np.ones_like(survey.dobs)
survey.eps = 1e-5*np.linalg.norm(survey.dobs)
print '# of data: ', survey.dobs.shape
class SimultaneousSrc(DC.Src.BaseSrc):
"""
Dipole source
"""
QW = None
Q = None
W = None
def __init__(self, rxList,Q,W, **kwargs):
SimPEG.Survey.BaseSrc.__init__(self, rxList, **kwargs)
def eval(self, prob):
return self.QW
class SimultaneousRx(DC.Rx.BaseRx):
"""
SimultaneousRx receiver
"""
def __init__(self, locs, rxType='phi', **kwargs):
# We may not need this ...
SimPEG.Survey.BaseRx.__init__(self, locs, rxType)
@property
def nD(self):
"""Number of data in the receiver."""
return self.locs.shape[0]
# Not sure why ...
# return int(self.locs[0].size / 2)
def getP(self, mesh, Gloc):
return self.locs
P = []
M = np.c_[np.arange(-12.,10+1,2),np.ones(12)*z]
N = np.c_[np.arange(-10.,12+1,2),np.ones(12)*z]
rx = DC.Rx.Dipole(M,N)
P = rx.getP(mesh,'CC')
#Update W Inversion
nsubSrc = 5
m0 = (-5.)*np.ones(mapping.nP);
miter = m0
n_its = 50
InnerIt = 3
dmisfitsub = []
dmisfitall = []
#beta schedule
beta = 1.
betalist = [beta]
coolingFactor = 2.
coolingRate = 3
W = np.random.randint(0, high=2, size=[survey.nSrc,nsubSrc])*2-1
print W
dmisAll = DataMisfit.l2_DataMisfit(survey)
dmisfitall.append(dmisAll.eval(m0)/survey.nD)
print "Starting Model Dmisfit compared to full dataset: ",dmisAll.eval(m0)/survey.nD
print "Check misfit with true model: ",dmisAll.eval(mtrue)/survey.nD
for it in range(n_its):
problem.unpair()
problem.pair(survey)
Q = problem.getRHS()
sub = problem.getRHS().dot(W)
rx_r = SimultaneousRx(locs=P)
srcList_r = []
for isrc in range(sub.shape[1]):
src_r = SimultaneousSrc([rx_r], Q=Q[:,isrc],W=W[:,isrc],QW =Q.dot(W)[:,isrc])
srcList_r.append(src_r)
survey_r = DC.Survey(srcList_r)
problem.unpair()
problem.pair(survey_r)
d = survey_r.dpred(mtrue)
survey_r.dobs = d
survey_r.std = np.ones_like(d)*0.05
survey_r.eps = 1e-5*np.linalg.norm(survey_r.dobs)
print '# of data: ', survey_r.dobs.shape
regmesh = mesh;
dmis = DataMisfit.l2_DataMisfit(survey_r)
reg = Regularization.Tikhonov(regmesh)#,mapping = mapping)#,indActive=actind)
reg.mref = m0
opt = Optimization.InexactGaussNewton(maxIter=1,tolX=1e-6)
opt.remember('xc')
invProb = InvProblem.BaseInvProblem(dmis, reg, opt)
#beta = Directives.BetaEstimate_ByEig(beta0= 10.,beta0_ratio=1e0)
reg.alpha_s = 1e-6;
invProb.beta = beta
#betaSched = Directives.BetaSchedule(coolingFactor=5, coolingRate=2)
#sav0 = Directives.SaveEveryIteration()
#sav1 = Directives.SaveModelEveryIteration()
#sav2 = Directives.SaveOutputDictEveryIteration()
inv = Inversion.BaseInversion(invProb)#, directiveList=[sav2])#[beta,betaSched])#sav0,sav1,
msimple = inv.run(miter);
beta = invProb.beta
if np.mod(it+1,coolingRate) ==0:
beta = beta/coolingFactor
betalist.append(beta)
miter = copy.deepcopy(msimple)
dmisfitsub.append(dmis.eval(msimple)/survey_r.nD)
print "Dmisfit compared to sub dataset: ",dmis.eval(msimple)/survey_r.nD
print "Check misfit with true model: ",dmis.eval(mtrue)/survey_r.nD
problem.unpair()
problem.pair(survey)
dmisAll = DataMisfit.l2_DataMisfit(survey)
dmisfitall.append(dmisAll.eval(msimple)/survey.nD)
print "Dmisfit compared to full dataset: ",dmisAll.eval(msimple)/survey.nD
print "Check misfit with true model: ",dmisAll.eval(mtrue)/survey.nD
if np.mod(it+1,InnerIt) ==0:
W = np.random.randint(0, high=2, size=[survey.nSrc,nsubSrc])*2-1
print 'update W'
#mm = mesh.plotImage(miter)
#plt.colorbar(mm[0])
#plt.gca().set_xlim([-10.,10.])
#plt.gca().set_ylim([-10.,0.])
np.save('./dmisfitsub.npy',dmisfitsub)
np.save('./dmisfitall.npy',dmisfitall)
np.save('./beta.npy',betalist)
np.save('./finalresult',msimple)
#plt.show()'
| thast/EOSC513 | DC/SimultaneousSources/Update_W_each_3it_5s_rademacher/Update_W_each_3it_5s_rademacher.py | Python | mit | 8,698 | 0.022189 |
#!/usr/bin/env python
# hot_drinks.py
# Copyright (C) ContinuumBridge Limited, 2014-2015 - All Rights Reserved
# Written by Peter Claydon
#
# Default values:
config = {
"hot_drinks": True,
"name": "A Human Being",
"alert": True,
"ignore_time": 120,
"window": 360,
"threshold": 10,
"daily_report_time": "02:00",
"data_send_delay": 1
}
import sys
import os.path
import time
from cbcommslib import CbApp, CbClient
from cbconfig import *
import requests
import json
from twisted.internet import reactor
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from cbutils import nicetime
#from cbutils import timeCorrect
# Can be removed after all bridges are at a version that supports timeCorrect()
def timeCorrect():
if time.time() < 32000000:
return False
else:
return True
CONFIG_FILE = CB_CONFIG_DIR + "hot_drinks.config"
STATE_FILE = CB_CONFIG_DIR + "hot_drinks.state"
CID = "CID164" # Client ID
class HotDrinks():
def __init__(self):
self.bridge_id = "unconfigured"
self.kettleOn = False
self.kettleOffTime = 0
self.s = []
self.waiting = False
self.triggered = False
self.power = None
self.binary = []
self.sensorOnTimes = {}
self.counts = {
"drinksInDay": 0,
"kettlesInDay": 0
}
def initIDs(self, bridge_id, idToName):
self.idToName = idToName
self.bridge_id = bridge_id
self.startMonitor()
def addSensor(self, characteristic, sensorID):
if characteristic == "power":
self.power = sensorID
elif characteristic == "binary":
self.binary.append(sensorID)
self.sensorOnTimes[sensorID] = 0
self.cbLog("debug", "addSensor, sensorOnTimes: " + str(self.sensorOnTimes))
def monitor(self):
try:
values = {
"name": self.bridge_id + "/hot_drinks_in_day",
"points": [[int(now*1000), self.counts["drinksInDay"]]]
}
self.storeValues(values)
values = {
"name": self.bridge_id + "/kettles_in_day",
"points": [[int(now*1000), self.counts["kettlesInDay"]]]
}
self.storeValues(values)
self.counts["drinksInDay"] = 0
self.counts["kettlesInDay"] = 0
self.startMonitor()
except Exception as ex:
self.cbLog("warning", "monitor failed. Exception. Type: " + str(type(ex)) + "exception: " + str(ex.args))
def startMonitor(self):
try:
if not timeCorrect():
reactor.callLater(60, self.startMonitor)
now = time.strftime("%Y %b %d %H:%M", time.localtime()).split()
now[3] = config["daily_report_time"]
midnight_e = time.mktime(time.strptime(" ".join(now), "%Y %b %d %H:%M")) + 86400
wait = midnight_e - time.time() + 60
self.cbLog("debug", "monitor set for " + str(int(wait)) + " seconds")
reactor.callLater(wait, self.monitor)
except Exception as ex:
self.cbLog("warning", "startMonitor failed. Exception. Type: " + str(type(ex)) + "exception: " + str(ex.args))
def loadMonitor(self):
try:
if os.path.isfile(STATE_FILE):
with open(STATE_FILE, 'r') as f:
self.counts = json.load(f)
self.cbLog("debug", "Loaded saved counts: " + str(self.counts))
except Exception as ex:
self.cbLog("warning", "Problem loading stored counts. Exception. Type: " + str(type(ex)) + "exception: " + str(ex.args))
finally:
try:
os.remove(STATE_FILE)
except Exception as ex:
self.cbLog("debug", "Cannot remove stored counts file. Exception. Type: " + str(type(ex)) + "exception: " + str(ex.args))
def saveMonitor(self):
try:
with open(STATE_FILE, 'w') as f:
json.dump(self.counts, f)
self.cbLog("info", "Saved counts")
except Exception as ex:
self.cbLog("warning", "Problem saving counts. Type: " + str(type(ex)) + "exception: " + str(ex.args))
def onChange(self, sensor, timeStamp, value):
try:
#self.cbLog("debug", "onChange. sensor: " + self.idToName[sensor] + ", value: " + str(value) + ", time: " + nicetime(timeStamp) + ", kettleOn: " + str(self.kettleOn))
if not timeCorrect():
self.cbLog("info", "Data not processed as time is not correct")
return
if sensor == self.power:
if value > config["threshold"] and not self.kettleOn:
if timeStamp - self.kettleOffTime > config["ignore_time"]:
self.sensorOnTimes[sensor] = timeStamp
self.kettleOn = True
self.cbLog("debug", "kettle on")
values = {
"name": self.bridge_id + "/kettle",
"points": [[int(timeStamp*1000), 1]]
}
self.storeValues(values)
self.counts["kettlesInDay"] += 1
self.cbLog("debug", "kettlesInDay: " + str(self.counts["kettlesInDay"]))
elif value < config["threshold"] and self.kettleOn:
self.kettleOn = False
self.triggered = False
self.kettleOffTime = timeStamp
self.cbLog("debug", "kettle off")
elif sensor in self.binary and value == "on":
self.sensorOnTimes[sensor] = timeStamp
now = time.time()
trigger = True
#self.cbLog("debug", "onChange, sensorOnTimes: " + str(self.sensorOnTimes))
for t in self.sensorOnTimes:
if now - self.sensorOnTimes[t] > config["window"]:
trigger = False
if trigger and not self.triggered:
self.cbLog("debug", "triggered")
self.triggered = True
self.counts["drinksInDay"] += 1
self.cbLog("debug", "drinksInDay: " + str(self.counts["drinksInDay"]))
if config["alert"]:
msg = {"m": "alert",
"a": "Hot drinks being made by " + config["name"] + " at " + nicetime(now),
"t": now
}
self.client.send(msg)
self.cbLog("debug", "msg send to client: " + str(json.dumps(msg, indent=4)))
values = {
"name": self.bridge_id + "/hot_drinks",
"points": [[int(now*1000), 1]]
}
self.storeValues(values)
except Exception as ex:
self.cbLog("warning", "HotDrinks onChange encountered problems. Exception: " + str(type(ex)) + str(ex.args))
def sendValues(self):
msg = {"m": "data",
"d": self.s
}
self.cbLog("debug", "sendValues. Sending: " + str(json.dumps(msg, indent=4)))
self.client.send(msg)
self.s = []
self.waiting = False
def storeValues(self, values):
self.s.append(values)
if not self.waiting:
self.waiting = True
reactor.callLater(config["data_send_delay"], self.sendValues)
class App(CbApp):
def __init__(self, argv):
self.appClass = "monitor"
self.state = "stopped"
self.status = "ok"
self.devices = []
self.devServices = []
self.idToName = {}
self.hotDrinks = HotDrinks()
#CbApp.__init__ MUST be called
CbApp.__init__(self, argv)
def setState(self, action):
if action == "clear_error":
self.state = "running"
else:
self.state = action
msg = {"id": self.id,
"status": "state",
"state": self.state}
self.sendManagerMessage(msg)
def onStop(self):
self.hotDrinks.saveMonitor()
self.client.save()
def onConcMessage(self, message):
#self.cbLog("debug", "onConcMessage, message: " + str(json.dumps(message, indent=4)))
if "status" in message:
if message["status"] == "ready":
# Do this after we have established communications with the concentrator
msg = {
"m": "req_config",
"d": self.id
}
self.client.send(msg)
self.client.receive(message)
def onClientMessage(self, message):
self.cbLog("debug", "onClientMessage, message: " + str(json.dumps(message, indent=4)))
global config
if "config" in message:
if "warning" in message["config"]:
self.cbLog("warning", "onClientMessage: " + str(json.dumps(message["config"], indent=4)))
else:
try:
newConfig = message["config"]
copyConfig = config.copy()
copyConfig.update(newConfig)
if copyConfig != config or not os.path.isfile(CONFIG_FILE):
self.cbLog("debug", "onClientMessage. Updating config from client message")
config = copyConfig.copy()
with open(CONFIG_FILE, 'w') as f:
json.dump(config, f)
self.cbLog("info", "Config updated")
self.readLocalConfig()
# With a new config, send init message to all connected adaptors
for i in self.adtInstances:
init = {
"id": self.id,
"appClass": self.appClass,
"request": "init"
}
self.sendMessage(init, i)
except Exception as ex:
self.cbLog("warning", "onClientMessage, could not write to file. Type: " + str(type(ex)) + ", exception: " + str(ex.args))
def onAdaptorData(self, message):
#self.cbLog("debug", "onAdaptorData, message: " + str(json.dumps(message, indent=4)))
if message["characteristic"] == "binary_sensor" or message["characteristic"] == "power":
self.hotDrinks.onChange(message["id"], message["timeStamp"], message["data"])
def onAdaptorService(self, message):
#self.cbLog("debug", "onAdaptorService, message: " + str(json.dumps(message, indent=4)))
if self.state == "starting":
self.setState("running")
self.devServices.append(message)
serviceReq = []
power = False
biinary = False
for p in message["service"]:
if p["characteristic"] == "power":
power = True
self.hotDrinks.addSensor("power", message["id"])
elif p["characteristic"] == "binary_sensor":
binary = True
self.hotDrinks.addSensor("binary", message["id"])
if power:
serviceReq.append({"characteristic": "power", "interval": 0})
elif binary:
serviceReq.append({"characteristic": "binary_sensor", "interval": 0})
msg = {"id": self.id,
"request": "service",
"service": serviceReq}
self.sendMessage(msg, message["id"])
#self.cbLog("debug", "onAdaptorService, response: " + str(json.dumps(msg, indent=4)))
def readLocalConfig(self):
global config
try:
with open(CONFIG_FILE, 'r') as f:
newConfig = json.load(f)
self.cbLog("debug", "Read local config")
config.update(newConfig)
except Exception as ex:
self.cbLog("warning", "Local config does not exist or file is corrupt. Exception: " + str(type(ex)) + str(ex.args))
self.cbLog("debug", "Config: " + str(json.dumps(config, indent=4)))
def onConfigureMessage(self, managerConfig):
self.readLocalConfig()
idToName2 = {}
for adaptor in managerConfig["adaptors"]:
adtID = adaptor["id"]
if adtID not in self.devices:
# Because managerConfigure may be re-called if devices are added
name = adaptor["name"]
friendly_name = adaptor["friendly_name"]
self.cbLog("debug", "managerConfigure app. Adaptor id: " + adtID + " name: " + name + " friendly_name: " + friendly_name)
idToName2[adtID] = friendly_name
self.idToName[adtID] = friendly_name.replace(" ", "_")
self.devices.append(adtID)
self.client = CbClient(self.id, CID, 10)
self.client.onClientMessage = self.onClientMessage
self.client.sendMessage = self.sendMessage
self.client.cbLog = self.cbLog
self.client.loadSaved()
self.hotDrinks.cbLog = self.cbLog
self.hotDrinks.client = self.client
self.hotDrinks.initIDs(self.bridge_id, self.idToName)
self.hotDrinks.loadMonitor()
self.setState("starting")
if __name__ == '__main__':
App(sys.argv)
| ContinuumBridge/hot_drinks_app | hot_drinks.py | Python | mit | 13,490 | 0.005263 |
# Copyright (C) 2005-2010 MISG/ICTI/EIA-FR
# See LICENSE for details.
"""
Factories for AMQ clients, Thrift clients and SMAC Clients and servers.
@author: Jonathan Stoppani <jonathan.stoppani@edu.hefr.ch>
"""
import weakref
from twisted.internet.protocol import ReconnectingClientFactory
from twisted.internet import defer, error
from txamqp.protocol import AMQClient
from txamqp.contrib.thrift.client import ThriftTwistedDelegate
from txamqp.queue import TimeoutDeferredQueue, Closed
from txamqp.contrib.thrift.transport import TwistedAMQPTransport
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from smac.python import log
from smac.amqp.models import Exchange, Queue, IAddress
from smac.conf import topology
from smac.modules import utils
class SMACServerFactory(object):
iprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
oprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
def __init__(self, client, channel=None):
self.client = client
self.channel = channel or 1
if client.check_0_8():
self.reply_to = "reply to"
else:
self.reply_to = "reply-to"
@defer.inlineCallbacks
def build_server(self, delegate, processor, handler, address, queues=None, standalone=True):
processor_name = processor.__name__
log.debug("Creating new server for {0} with ID {1}".format(
processor_name, address.instance))
address = IAddress(address)
if not queues:
queues = topology.queues
if isinstance(self.channel, int):
channel = yield self.client.channel(self.channel)
yield channel.channel_open()
else:
# Assume it's already open!
channel = self.channel
deferreds = []
# Declare all exchanges
exchanges = {}
for k, e in topology.exchanges.iteritems():
e = Exchange(channel, **e)
e.format_name(**dict(address))
e.declare()
exchanges[k] = e
self.responses = Exchange(channel, **topology.exchanges['responses'])
# Declare all queues
qs = []
for q in queues:
q = q.copy()
bindings = q.pop('bindings')
q = Queue(channel, **q)
q.format_name(**dict(address))
q.declare()
deferreds += [q.bind(exchanges[e], k.format(**dict(address))) for e, k in bindings]
qs.append(q)
# Wait for declarations and bindings
yield defer.DeferredList(deferreds)
log.debug("All queues and needed exchanges declared and bound, start listening")
tags = []
for queue in qs:
tag = yield queue.consume()
tags.append(tag)
@defer.inlineCallbacks
def destroy(ref):
log.debug("Server for {0} garbage collected, removing " \
"subscriptions".format(processor_name))
try:
yield defer.DeferredList([channel.basic_cancel(t) for t in tags])
except Exception as e:
pass
if not standalone:
handler = weakref.proxy(handler, destroy)
processor = processor.Processor(handler)
for tag in tags:
queue = yield self.client.queue(tag)
self.get_next_message(channel, queue, processor, delegate)
def parse_message(self, msg, channel, queue, processor, delegate):
tag = msg.delivery_tag
try:
sender = msg.content[self.reply_to]
except KeyError:
sender = None
transport_in = TTransport.TMemoryBuffer(msg.content.body)
transport_out = TwistedAMQPTransport(channel, str(self.responses), sender)
iprot = self.iprot_factory.getProtocol(transport_in)
oprot = self.oprot_factory.getProtocol(transport_out)
d = processor.process(iprot, oprot)
d.addErrback(delegate.processing_error)
channel.basic_ack(tag, True)
self.get_next_message(channel, queue, processor, delegate)
def get_next_message(self, channel, queue, processor, delegate):
d = queue.get()
d.addCallback(self.parse_message, channel, queue, processor, delegate)
d.addErrback(self.catch_closed_queue, delegate)
d.addErrback(delegate.queue_error)
def catch_closed_queue(self, failure, delegate):
failure.trap(Closed)
delegate.queue_closed(failure)
class SMACClientFactory(object):
iprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
oprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
def __init__(self, client, channel=None):
self.client = client
self.client_lock = defer.DeferredLock()
self.clients = {}
if client.check_0_8():
self.reply_to = "reply to"
else:
self.reply_to = "reply-to"
self.channel = channel or 1
@defer.inlineCallbacks
def build_client(self, address, service=None, distribution=None, cache=True):
yield self.client_lock.acquire()
try:
address = IAddress(address)
if not service:
service = utils.get_module_from_address(address)
service_name = service.__name__ + address.routing_key
distribution = distribution or address.distribution
if not distribution:
raise ValueError("The distribution mode was not defined and " \
"could not be inferred from the address.")
key = (service, address.routing_key, distribution)
try:
client = self.clients[key]
except KeyError:
log.debug("Creating new client for {0} with routing key {1} and distribution {2}".format(
service.__name__, address.routing_key, distribution))
if isinstance(self.channel, int):
channel = yield self.client.channel(self.channel)
yield channel.channel_open()
else:
# Assume it's already open!
channel = self.channel
response_exchange = Exchange(channel, **topology.exchanges['responses'])
response_queue = Queue(channel, exclusive=True, auto_delete=True)
yield response_queue.declare()
yield response_queue.bind(response_exchange)
consumer_tag = yield response_queue.consume()
service_exchange = Exchange(channel, **topology.exchanges[distribution])
service_exchange.format_name(**dict(address))
yield service_exchange.declare()
amqp_transport = TwistedAMQPTransport(channel, str(service_exchange),
address.routing_key, service_name,
str(response_queue), self.reply_to)
client = service.Client(amqp_transport, self.oprot_factory)
client.address = address
client.factory = self
if cache:
weak_client = client
self.clients[key] = client
else:
@defer.inlineCallbacks
def destroy(ref):
log.debug("Client for {0} garbage collected, removing " \
"subscriptions".format(service_name))
try:
yield channel.basic_cancel(consumer_tag)
except Exception as e:
pass
weak_client = weakref.proxy(client, destroy)
queue = yield self.client.queue(consumer_tag)
self.get_next_message(channel, queue, weak_client)
queue = yield self.client.get_return_queue(service_name)
self.get_next_unroutable_message(channel, queue, weak_client)
else:
log.debug("Using cached client for {0} with routing key {1} and distribution {2}".format(
service.__name__, address.routing_key, distribution))
finally:
self.client_lock.release()
defer.returnValue(client)
def parse_message(self, msg, channel, queue, client):
tag = msg.delivery_tag
transport = TTransport.TMemoryBuffer(msg.content.body)
iprot = self.iprot_factory.getProtocol(transport)
(fname, mtype, rseqid) = iprot.readMessageBegin()
if rseqid not in client._reqs:
log.warn('Missing rseqid! fname = %r, rseqid = %s, mtype = %r, routing key = %r, client = %r, msg.content.body = %r' % (fname, rseqid, mtype, msg.routing_key, client, msg.content.body))
method = getattr(client, 'recv_' + fname)
method(iprot, mtype, rseqid)
channel.basic_ack(tag, True)
self.get_next_message(channel, queue, client)
def unrouteable_message(self, msg, channel, queue, client):
transport = TTransport.TMemoryBuffer(msg.content.body)
iprot = self.iprot_factory.getProtocol(transport)
(fname, mtype, rseqid) = iprot.readMessageBegin()
try:
d = client._reqs.pop(rseqid)
except KeyError:
# KeyError will occur if the remote Thrift method is oneway,
# since there is no outstanding local request deferred for
# oneway calls.
pass
else:
type = TTransport.TTransportException.NOT_OPEN,
msg = 'Unrouteable message, routing key = %r calling function %r' % (msg.routing_key, fname)
d.errback(TTransport.TTransportException(type, msg))
self.get_next_unroutable_message(channel, queue, client)
def get_next_unroutable_message(self, channel, queue, client):
d = queue.get()
d.addCallback(self.unrouteable_message, channel, queue, client)
d.addErrback(self.catch_closed_queue)
d.addErrback(self.handle_queue_error)
def get_next_message(self, channel, queue, client):
d = queue.get()
d.addCallback(self.parse_message, channel, queue, client)
d.addErrback(self.catch_closed_queue)
d.addErrback(self.handle_queue_error)
def catch_closed_queue(self, failure):
failure.trap(Closed)
self.handle_closed_queue(failure)
def handle_queue_error(self, failure):
log.err("Error in queue")
log.err(failure)
pass
def handle_closed_queue(self, failure):
log.debug("Queue closed")
class ThriftAMQClient(AMQClient, object):
def __init__(self, *args, **kwargs):
super(ThriftAMQClient, self).__init__(*args, **kwargs)
self.return_queues_lock = defer.DeferredLock()
self.return_queues = {}
@defer.inlineCallbacks
def get_return_queue(self, key):
yield self.return_queues_lock.acquire()
try:
try:
q = self.return_queues[key]
except KeyError:
q = TimeoutDeferredQueue()
self.return_queues[key] = q
finally:
self.return_queues_lock.release()
defer.returnValue(q)
thriftBasicReturnQueue = get_return_queue # compatibility with
# ThriftTwistedDelegate
class AMQClientFactory(ReconnectingClientFactory, object):
"""
Factory for AMQP connections intended to be used by thrift clients.
Overriding the C{protocol} property with a more general C{AMQClient} class
should allow a more generic use of the factory.
"""
protocol = ThriftAMQClient
def __init__(self, spec, vhost):
self.spec = spec
self.vhost = vhost
self.closed = False
def buildProtocol(self, _):
client = self.protocol(ThriftTwistedDelegate(), self.vhost, self.spec)
client.factory = self
return client
def clientConnectionLost(self, connector, reason):
if self.closed:
log.info("Connection to the AMQP broker closed.")
return
log.error('Connection to AMQP broker lost. Reason {0}'.format(reason))
super(AMQClientFactory, self).clientConnectionLost(connector, reason)
def clientConnectionFailed(self, connector, reason):
log.error('Connection to AMQP broker failed. Reason {0}'.format(reason))
super(AMQClientFactory, self).clientConnectionFailed(connector, reason)
| SMAC/corelib | smac/amqp/protocol.py | Python | gpl-3.0 | 13,170 | 0.008276 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
}
complete_apps = ['recipeBox'] | CWVanderReyden/originalMyHomeNet | recipeBox/migrations/0001_initial.py | Python | gpl-3.0 | 353 | 0.005666 |
#!/usr/bin/env python
#
# Copyright 2004,2005,2007,2010,2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
import os
class test_stream_mux (gr_unittest.TestCase):
def setUp (self):
os.environ['GR_CONF_CONTROLPORT_ON'] = 'False'
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def help_stream_2ff(self, N, stream_sizes):
v0 = blocks.vector_source_f(N*[1,], False)
v1 = blocks.vector_source_f(N*[2,], False)
mux = blocks.stream_mux(gr.sizeof_float, stream_sizes)
dst = blocks.vector_sink_f ()
self.tb.connect (v0, (mux,0))
self.tb.connect (v1, (mux,1))
self.tb.connect (mux, dst)
self.tb.run ()
return dst.data ()
def help_stream_ramp_2ff(self, N, stream_sizes):
r1 = range(N)
r2 = range(N)
r2.reverse()
v0 = blocks.vector_source_f(r1, False)
v1 = blocks.vector_source_f(r2, False)
mux = blocks.stream_mux(gr.sizeof_float, stream_sizes)
dst = blocks.vector_sink_f ()
self.tb.connect (v0, (mux,0))
self.tb.connect (v1, (mux,1))
self.tb.connect (mux, dst)
self.tb.run ()
return dst.data ()
def test_stream_2NN_ff(self):
N = 40
stream_sizes = [10, 10]
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0)
self.assertEqual (exp_data, result_data)
def test_stream_ramp_2NN_ff(self):
N = 40
stream_sizes = [10, 10]
result_data = self.help_stream_ramp_2ff(N, stream_sizes)
exp_data = ( 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0,
39.0, 38.0, 37.0, 36.0, 35.0, 34.0, 33.0, 32.0, 31.0, 30.0,
10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0,
29.0, 28.0, 27.0, 26.0, 25.0, 24.0, 23.0, 22.0, 21.0, 20.0,
20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0,
19.0, 18.0, 17.0, 16.0, 15.0, 14.0, 13.0, 12.0, 11.0, 10.0,
30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0,
9.0, 8.0, 7.0, 6.0, 5.0, 4.0, 3.0, 2.0, 1.0, 0.0)
self.assertEqual (exp_data, result_data)
def test_stream_2NM_ff(self):
N = 40
stream_sizes = [7, 9]
self.help_stream_2ff(N, stream_sizes)
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0)
self.assertEqual (exp_data, result_data)
def test_stream_2MN_ff(self):
N = 37
stream_sizes = [7, 9]
self.help_stream_2ff(N, stream_sizes)
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0)
self.assertEqual (exp_data, result_data)
def test_stream_2N0_ff(self):
N = 30
stream_sizes = [7, 0]
self.help_stream_2ff(N, stream_sizes)
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0)
self.assertEqual (exp_data, result_data)
def test_stream_20N_ff(self):
N = 30
stream_sizes = [0, 9]
self.help_stream_2ff(N, stream_sizes)
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
2.0, 2.0, 2.0)
self.assertEqual (exp_data, result_data)
if __name__ == '__main__':
gr_unittest.run(test_stream_mux, "test_stream_mux.xml")
| trondeau/gnuradio-old | gr-blocks/python/blocks/qa_stream_mux.py | Python | gpl-3.0 | 6,241 | 0.005929 |
# -*- coding: utf-8 -*-
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from functools import wraps
import math
import random
import time
from gcs_client import errors as errors
def is_complete(f):
@wraps(f)
def wrapped(self, *args, **kwargs):
attributes = getattr(self, '_required_attributes') or []
for attribute in attributes:
if not getattr(self, attribute, None):
raise Exception('%(func_name)s needs %(attr)s to be set.' %
{'func_name': f.__name__, 'attr': attribute})
return f(self, *args, **kwargs)
return wrapped
# Generate default codes to retry from transient HTTP errors
DEFAULT_RETRY_CODES = tuple(
code for code, (cls_name, cls) in errors.http_errors.items()
if cls is errors.Transient)
class RetryParams(object):
"""Truncated Exponential Backoff configuration class.
This configuration is used to provide truncated exponential backoff retries
for communications.
The algorithm requires 4 arguments: max retries, initial delay, max backoff
wait time and backoff factor.
As long as we have pending retries we will wait
(backoff_factor ^ n-1) * initial delay
Where n is the number of retry.
As long as this wait is not greater than max backoff wait time, if it is
max backoff time wait will be used.
We'll add a random wait time to this delay to help avoid cases where many
clients get synchronized by some situation and all retry at once, sending
requests in synchronized waves.
For example with default values of max_retries=5, initial_delay=1,
max_backoff=32 and backoff_factor=2
- 1st failure: 1 second + random delay [ (2^(1-1)) * 1 ]
- 2nd failure: 2 seconds + random delay [ (2^(2-1)) * 1 ]
- 3rd failure: 4 seconds + random delay [ (2^(3-1)) * 1 ]
- 4th failure: 8 seconds + random delay [ (2^(4-1)) * 1 ]
- 5th failure: 16 seconds + random delay [ (2^(5-1)) * 1 ]
- 6th failure: Fail operation
"""
def __init__(self, max_retries=5, initial_delay=1, max_backoff=32,
backoff_factor=2, randomize=True):
"""Initialize retry configuration.
:param max_retries: Maximum number of retries before giving up.
:type max_retries: int
:param initial_delay: Seconds to wait for the first retry.
:type initial_delay: int or float
:param max_backoff: Maximum number of seconds to wait between retries.
:type max_backoff: int or float
:param backoff_factor: Base to use for the power used to calculate the
delay for the backoff.
:type backoff_factor: int or float
:param randomize: Whether to use randomization of the delay time to
avoid synchronized waves.
:type randomize: bool
"""
self.max_retries = max_retries
self.initial_delay = initial_delay
self.max_backoff = max_backoff
self.backoff_factor = backoff_factor
self.randomize = randomize
@classmethod
def get_default(cls):
"""Return default configuration (simpleton patern)."""
if not hasattr(cls, 'default'):
cls.default = cls()
return cls.default
@classmethod
def set_default(cls, *args, **kwargs):
"""Set default retry configuration.
Methods acepts a RetryParams instance or the same arguments as the
__init__ method.
"""
default = cls.get_default()
# For RetryParams argument copy dictionary to default instance so all
# references to the default configuration will have new values.
if len(args) == 1 and isinstance(args[0], RetryParams):
default.__dict__.update(args[0].__dict__)
# For individual arguments call __init__ method on default instance
else:
default.__init__(*args, **kwargs)
def retry(param='_retry_params', error_codes=DEFAULT_RETRY_CODES):
"""Truncated Exponential Backoff decorator.
There are multiple ways to use this decorator:
@retry
def my_func(self):
In this case we will try to use `self._retry_params` and if that's not
available we'll use default retry configuration and retry on
DEFAULT_RETRY_CODES status codes.
@retry('_retry_cfg')
def my_func(self):
In this case we will try to use `self._retry_cfg` and if that's
not available we'll use default retry configuration and retry on
DEFAULT_RETRY_CODES status codes.
@retry(RetryParams(5, 1, 32, 2, False))
def my_func(self):
In this case we will use a specific retry configuration and retry on
DEFAULT_RETRY_CODES status codes.
@retry('_retry_cfg', [408, 504])
def my_func(self):
In this case we will try to use `self._retry_cfg` and if that's
not available we'll use default retry configuration and retry only on
timeout status codes.
@retry(RetryParams(5, 1, 32, 2, False), [408, 504])
def my_func(self):
In this case we will use a specific retry configuration and retry only
on timeout status codes.
@retry(error_codes=[408, 504])
def my_func(self):
In this case we will try to use `self._retry_params` and if that's not
available we'll use default retry configuration and retry only on
timeout status codes.
If we pass None as the retry parameter or the value of the attribute on the
instance is None we will not do any retries.
"""
def _retry(f):
@wraps(f)
def wrapped(self, *args, **kwargs):
# If retry configuration is none or a RetryParams instance, use it
if isinstance(param, (type(None), RetryParams)):
retry_params = param
# If it's an attribute name try to retrieve it
else:
retry_params = getattr(self, param, RetryParams.get_default())
delay = 0
random_delay = 0
n = 0 # Retry number
while True:
try:
result = f(self, *args, **kwargs)
return result
except errors.Http as exc:
if (not retry_params or n >= retry_params.max_retries or
exc.code not in error_codes):
raise exc
n += 1
# If we haven't reached maximum backoff yet calculate new delay
if delay < retry_params.max_backoff:
backoff = (math.pow(retry_params.backoff_factor, n-1) *
retry_params.initial_delay)
delay = min(retry_params.max_backoff, backoff)
if retry_params.randomize:
random_delay = random.random() * retry_params.initial_delay
time.sleep(delay + random_delay)
return wrapped
# If no argument has been used
if callable(param):
f, param = param, '_retry_params'
return _retry(f)
return _retry
| Akrog/gcs-client | gcs_client/common.py | Python | apache-2.0 | 7,705 | 0 |
# Copyright 2018 SAS Project Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Inter-SAS Duplicate Grants removal.
This is a subset of the pre-IAP reference model which implements inter-SAS
duplicate CBSD removal. If a CBSD has registered with multiple SASs then the
CBSD is removed from the FAD objects of the respective SASs.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
from collections import defaultdict
def interSasDuplicateGrantPurgeReferenceModel(sas_uut_fad, sas_test_harness_fads):
""" Removes CBSDs with grants from more than one SAS from FAD objects.
Checks if a CBSD is registered with more than one SAS and removes the CBSD from
all the FAD objects of all SASs (SAS UUT and SAS Test Harnesses).
Args:
sas_uut_fad: A |FullActivityDump| object containing the FAD records of SAS UUT.
sas_test_harness_fads: A list of |FullActivityDump| objects containing the FAD records
from SAS test harnesses.
"""
# Get all the CBSD Reference ID of all CBSDs from UUT and SAS test Harness FAD objects
cbsd_id_counts = defaultdict(int)
for cbsd in sas_uut_fad.getCbsdRecords():
cbsd_id_counts[cbsd['id']] += 1
for fad in sas_test_harness_fads:
for cbsd in fad.getCbsdRecords():
cbsd_id_counts[cbsd['id']] += 1
# Iterate through the UUT CBSD list and keep only the non duplicate CBSDs
cbsds_to_keep = []
for cbsd in sas_uut_fad.getCbsdRecords():
if cbsd_id_counts[cbsd['id']] == 1:
cbsds_to_keep.append(cbsd)
logging.info('CBSDs to keep in SAS UUT: %s', cbsds_to_keep)
sas_uut_fad.setCbsdRecords(cbsds_to_keep)
# Iterate through the test harness CBSD list and keep only the non duplicate CBSDs
for fad in sas_test_harness_fads:
cbsds_to_keep = []
for cbsd in fad.getCbsdRecords():
if cbsd_id_counts[cbsd['id']] == 1:
cbsds_to_keep.append(cbsd)
logging.info('CBSDs to keep in SAS TH: %s', cbsds_to_keep)
fad.setCbsdRecords(cbsds_to_keep)
| Wireless-Innovation-Forum/Spectrum-Access-System | src/harness/reference_models/pre_iap_filtering/inter_sas_duplicate_grant.py | Python | apache-2.0 | 2,583 | 0.00813 |
import asyncio
import email.utils
import json
import sys
from cgi import parse_header
from collections import namedtuple
from http.cookies import SimpleCookie
from urllib.parse import parse_qs, unquote, urlunparse
from httptools import parse_url
from sanic.exceptions import InvalidUsage
from sanic.log import error_logger, logger
try:
from ujson import loads as json_loads
except ImportError:
if sys.version_info[:2] == (3, 5):
def json_loads(data):
# on Python 3.5 json.loads only supports str not bytes
return json.loads(data.decode())
else:
json_loads = json.loads
DEFAULT_HTTP_CONTENT_TYPE = "application/octet-stream"
# HTTP/1.1: https://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.2.1
# > If the media type remains unknown, the recipient SHOULD treat it
# > as type "application/octet-stream"
class RequestParameters(dict):
"""Hosts a dict with lists as values where get returns the first
value of the list and getlist returns the whole shebang
"""
def get(self, name, default=None):
"""Return the first value, either the default or actual"""
return super().get(name, [default])[0]
def getlist(self, name, default=None):
"""Return the entire list"""
return super().get(name, default)
class StreamBuffer:
def __init__(self, buffer_size=100):
self._queue = asyncio.Queue(buffer_size)
async def read(self):
""" Stop reading when gets None """
payload = await self._queue.get()
self._queue.task_done()
return payload
async def put(self, payload):
await self._queue.put(payload)
def is_full(self):
return self._queue.full()
class Request(dict):
"""Properties of an HTTP request such as URL, headers, etc."""
__slots__ = (
"__weakref__",
"_cookies",
"_ip",
"_parsed_url",
"_port",
"_remote_addr",
"_socket",
"app",
"body",
"endpoint",
"headers",
"method",
"parsed_args",
"parsed_files",
"parsed_form",
"parsed_json",
"raw_url",
"stream",
"transport",
"uri_template",
"version",
)
def __init__(self, url_bytes, headers, version, method, transport):
self.raw_url = url_bytes
# TODO: Content-Encoding detection
self._parsed_url = parse_url(url_bytes)
self.app = None
self.headers = headers
self.version = version
self.method = method
self.transport = transport
# Init but do not inhale
self.body_init()
self.parsed_json = None
self.parsed_form = None
self.parsed_files = None
self.parsed_args = None
self.uri_template = None
self._cookies = None
self.stream = None
self.endpoint = None
def __repr__(self):
return "<{0}: {1} {2}>".format(
self.__class__.__name__, self.method, self.path
)
def __bool__(self):
if self.transport:
return True
return False
def body_init(self):
self.body = []
def body_push(self, data):
self.body.append(data)
def body_finish(self):
self.body = b"".join(self.body)
@property
def json(self):
if self.parsed_json is None:
self.load_json()
return self.parsed_json
def load_json(self, loads=json_loads):
try:
self.parsed_json = loads(self.body)
except Exception:
if not self.body:
return None
raise InvalidUsage("Failed when parsing body as json")
return self.parsed_json
@property
def token(self):
"""Attempt to return the auth header token.
:return: token related to request
"""
prefixes = ("Bearer", "Token")
auth_header = self.headers.get("Authorization")
if auth_header is not None:
for prefix in prefixes:
if prefix in auth_header:
return auth_header.partition(prefix)[-1].strip()
return auth_header
@property
def form(self):
if self.parsed_form is None:
self.parsed_form = RequestParameters()
self.parsed_files = RequestParameters()
content_type = self.headers.get(
"Content-Type", DEFAULT_HTTP_CONTENT_TYPE
)
content_type, parameters = parse_header(content_type)
try:
if content_type == "application/x-www-form-urlencoded":
self.parsed_form = RequestParameters(
parse_qs(self.body.decode("utf-8"))
)
elif content_type == "multipart/form-data":
# TODO: Stream this instead of reading to/from memory
boundary = parameters["boundary"].encode("utf-8")
self.parsed_form, self.parsed_files = parse_multipart_form(
self.body, boundary
)
except Exception:
error_logger.exception("Failed when parsing form")
return self.parsed_form
@property
def files(self):
if self.parsed_files is None:
self.form # compute form to get files
return self.parsed_files
@property
def args(self):
if self.parsed_args is None:
if self.query_string:
self.parsed_args = RequestParameters(
parse_qs(self.query_string)
)
else:
self.parsed_args = RequestParameters()
return self.parsed_args
@property
def raw_args(self):
return {k: v[0] for k, v in self.args.items()}
@property
def cookies(self):
if self._cookies is None:
cookie = self.headers.get("Cookie")
if cookie is not None:
cookies = SimpleCookie()
cookies.load(cookie)
self._cookies = {
name: cookie.value for name, cookie in cookies.items()
}
else:
self._cookies = {}
return self._cookies
@property
def ip(self):
if not hasattr(self, "_socket"):
self._get_address()
return self._ip
@property
def port(self):
if not hasattr(self, "_socket"):
self._get_address()
return self._port
@property
def socket(self):
if not hasattr(self, "_socket"):
self._get_address()
return self._socket
def _get_address(self):
self._socket = self.transport.get_extra_info("peername") or (
None,
None,
)
self._ip = self._socket[0]
self._port = self._socket[1]
@property
def remote_addr(self):
"""Attempt to return the original client ip based on X-Forwarded-For.
:return: original client ip.
"""
if not hasattr(self, "_remote_addr"):
forwarded_for = self.headers.get("X-Forwarded-For", "").split(",")
remote_addrs = [
addr
for addr in [addr.strip() for addr in forwarded_for]
if addr
]
if len(remote_addrs) > 0:
self._remote_addr = remote_addrs[0]
else:
self._remote_addr = ""
return self._remote_addr
@property
def scheme(self):
if (
self.app.websocket_enabled
and self.headers.get("upgrade") == "websocket"
):
scheme = "ws"
else:
scheme = "http"
if self.transport.get_extra_info("sslcontext"):
scheme += "s"
return scheme
@property
def host(self):
# it appears that httptools doesn't return the host
# so pull it from the headers
return self.headers.get("Host", "")
@property
def content_type(self):
return self.headers.get("Content-Type", DEFAULT_HTTP_CONTENT_TYPE)
@property
def match_info(self):
"""return matched info after resolving route"""
return self.app.router.get(self)[2]
@property
def path(self):
return self._parsed_url.path.decode("utf-8")
@property
def query_string(self):
if self._parsed_url.query:
return self._parsed_url.query.decode("utf-8")
else:
return ""
@property
def url(self):
return urlunparse(
(self.scheme, self.host, self.path, None, self.query_string, None)
)
File = namedtuple("File", ["type", "body", "name"])
def parse_multipart_form(body, boundary):
"""Parse a request body and returns fields and files
:param body: bytes request body
:param boundary: bytes multipart boundary
:return: fields (RequestParameters), files (RequestParameters)
"""
files = RequestParameters()
fields = RequestParameters()
form_parts = body.split(boundary)
for form_part in form_parts[1:-1]:
file_name = None
content_type = "text/plain"
content_charset = "utf-8"
field_name = None
line_index = 2
line_end_index = 0
while not line_end_index == -1:
line_end_index = form_part.find(b"\r\n", line_index)
form_line = form_part[line_index:line_end_index].decode("utf-8")
line_index = line_end_index + 2
if not form_line:
break
colon_index = form_line.index(":")
form_header_field = form_line[0:colon_index].lower()
form_header_value, form_parameters = parse_header(
form_line[colon_index + 2 :]
)
if form_header_field == "content-disposition":
field_name = form_parameters.get("name")
file_name = form_parameters.get("filename")
# non-ASCII filenames in RFC2231, "filename*" format
if file_name is None and form_parameters.get("filename*"):
encoding, _, value = email.utils.decode_rfc2231(
form_parameters["filename*"]
)
file_name = unquote(value, encoding=encoding)
elif form_header_field == "content-type":
content_type = form_header_value
content_charset = form_parameters.get("charset", "utf-8")
if field_name:
post_data = form_part[line_index:-4]
if file_name is None:
value = post_data.decode(content_charset)
if field_name in fields:
fields[field_name].append(value)
else:
fields[field_name] = [value]
else:
form_file = File(
type=content_type, name=file_name, body=post_data
)
if field_name in files:
files[field_name].append(form_file)
else:
files[field_name] = [form_file]
else:
logger.debug(
"Form-data field does not have a 'name' parameter "
"in the Content-Disposition header"
)
return fields, files
| lixxu/sanic | sanic/request.py | Python | mit | 11,420 | 0.000088 |
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.utils import six
class AccountActivationTokenGenerator(PasswordResetTokenGenerator):
def _make_hash_value(self, user, timestamp):
return (six.text_type(user.pk) + six.text_type(timestamp)) + six.text_type(user.is_active)
account_activation_token = AccountActivationTokenGenerator() | srijannnd/Login-and-Register-App-in-Django | simplesocial/accounts/tokens.py | Python | mit | 375 | 0.008 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
import traceback
from StringIO import StringIO
import re
import datetime
from urllib import urlencode
from collections import defaultdict
from django import http
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import redirect_to_login
from django.db import transaction
from django.core.urlresolvers import reverse
from django.conf import settings
from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from django.db.models import Q
from django.template import Context, loader
from django.core.mail import get_connection, EmailMessage
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.shortcuts import render
from django.views.decorators.http import require_POST
from django.contrib.sites.models import RequestSite
from django.core.cache import cache
from django.db.models import Min, Count
import vobject
from .models import Entry, Hours, BlacklistedUser, FollowingUser, UserKey
from pto.apps.users.models import UserProfile, User
from pto.apps.users.utils import ldap_lookup
from .utils import parse_datetime, DatetimeParseError
from .utils.countrytotals import UnrecognizedCountryError, get_country_totals
import utils
import forms
from .decorators import json_view
from .csv_export import UnicodeWriter as CSVUnicodeWriter
def valid_email(value):
try:
validate_email(value)
return True
except ValidationError:
return False
def handler500(request):
data = {}
if settings.TRACEBACKS_ON_500:
err_type, err_value, err_traceback = sys.exc_info()
out = StringIO()
traceback.print_exc(file=out)
traceback_formatted = out.getvalue()
data['err_type'] = err_type
data['err_value'] = err_value
data['err_traceback'] = traceback_formatted
data['_report_traceback'] = True
else:
data['_report_traceback'] = False
return render(request, '500.html', data, status=500)
def home(request): # aka dashboard
data = {}
data['mobile'] = request.MOBILE # thank you django-mobility (see settings)
if data['mobile']:
# unless an explicit cookie it set, redirect to /mobile/
if not request.COOKIES.get('no-mobile', False):
return redirect(reverse('mobile.home'))
# now do what the login_required would usually do
if not request.user.is_authenticated():
path = request.get_full_path()
return redirect_to_login(path)
data['page_title'] = "Dashboard"
profile = request.user.get_profile()
if profile and profile.country in ('GB', 'FR', 'DE'):
first_day = 1 # 1=Monday
else:
first_day = 0 # default to 0=Sunday
data['first_day'] = first_day
if 'all-rightnow' in request.GET:
MAX_RIGHT_NOWS = 9999
else:
MAX_RIGHT_NOWS = 20
## Commented out whilst we decide whether to keep it at all
#right_nows, right_now_users = get_right_nows()
#data['right_nows'] = right_nows
#data['right_now_users'] = right_now_users
#if len(right_now_users) > MAX_RIGHT_NOWS:
# data['right_now_too_many'] = (len(data['right_now_users'])
# - MAX_RIGHT_NOWS)
# data['right_now_users'] = data['right_now_users'][:MAX_RIGHT_NOWS]
#else:
# data['right_now_too_many'] = None
data.update(get_taken_info(request.user))
data['calendar_url'] = _get_user_calendar_url(request)
cache_key = 'recently_created_%s' % request.user.pk
recently_created = cache.get(cache_key)
if recently_created:
data['recently_created'] = recently_created
cache.delete(cache_key)
return render(request, 'dates/home.html', data)
def _get_user_calendar_url(request):
user_key, __ = UserKey.objects.get_or_create(user=request.user)
base_url = '%s://%s' % (request.is_secure() and 'https' or 'http',
RequestSite(request).domain)
return base_url + reverse('dates.calendar_vcal', args=(user_key.key,))
def get_taken_info(user):
data = {}
profile = user.get_profile()
if profile.country:
data['country'] = profile.country
try:
data['country_totals'] = get_country_totals(profile.country)
except UnrecognizedCountryError:
data['unrecognized_country'] = True
today = datetime.date.today()
start_date = datetime.date(today.year, 1, 1)
last_date = datetime.date(today.year + 1, 1, 1)
from django.db.models import Sum
qs = Entry.objects.filter(
user=user,
start__gte=start_date,
end__lt=last_date
)
agg = qs.aggregate(Sum('total_hours'))
total_hours = agg['total_hours__sum']
if total_hours is None:
total_hours = 0
data['taken'] = _friendly_format_hours(total_hours)
return data
def _friendly_format_hours(total_hours):
days = 1.0 * total_hours / settings.WORK_DAY
hours = total_hours % settings.WORK_DAY
if not total_hours:
return '0 days'
elif total_hours < settings.WORK_DAY:
return '%s hours' % total_hours
elif total_hours == settings.WORK_DAY:
return '1 day'
else:
if not hours:
return '%d days' % days
else:
return '%s days' % days
def get_right_nows():
right_now_users = []
right_nows = defaultdict(list)
_today = datetime.date.today()
for entry in (Entry.objects
.filter(start__lte=_today,
end__gte=_today,
total_hours__gte=0)
.order_by('user__first_name',
'user__last_name',
'user__username')):
if entry.user not in right_now_users:
right_now_users.append(entry.user)
left = (entry.end - _today).days + 1
right_nows[entry.user].append((left, entry))
return right_nows, right_now_users
def get_upcomings(max_days=14):
users = []
upcoming = defaultdict(list)
today = datetime.date.today()
max_future = today + datetime.timedelta(days=max_days)
for entry in (Entry.objects
.filter(start__gt=today,
start__lt=max_future,
total_hours__gte=0)
.order_by('user__first_name',
'user__last_name',
'user__username')):
if entry.user not in users:
users.append(entry.user)
days = (entry.start - today).days + 1
upcoming[entry.user].append((days, entry))
return upcoming, users
def make_entry_title(entry, this_user, include_details=True):
if entry.user != this_user:
if entry.user.first_name:
title = '%s %s - ' % (entry.user.first_name,
entry.user.last_name)
else:
title = '%s - ' % entry.user.username
else:
title = ''
days = 0
for hour in Hours.objects.filter(entry=entry):
if hour.hours == 8:
days += 1
elif hour.hours == 4:
days += 0.5
if days > 1:
if int(days) == days:
title += '%d days' % days
else:
title += '%s days' % days
if Hours.objects.filter(entry=entry, birthday=True).exists():
title += ' (includes birthday)'
elif (days == 1 and entry.total_hours == 0 and
Hours.objects.filter(entry=entry, birthday=True)):
title += 'Birthday!'
elif days == 1 and entry.total_hours == 8:
title += '1 day'
else:
title += '%s hours' % entry.total_hours
if entry.details:
if days == 1:
max_length = 20
else:
max_length = 40
if include_details:
title += ', '
if len(entry.details) > max_length:
title += entry.details[:max_length] + '...'
else:
title += entry.details
return title
@json_view
def calendar_events(request):
if not request.user.is_authenticated():
return http.HttpResponseForbidden('Must be logged in')
if not request.GET.get('start'):
return http.HttpResponseBadRequest('Argument start missing')
if not request.GET.get('end'):
return http.HttpResponseBadRequest('Argument end missing')
try:
start = parse_datetime(request.GET['start'])
except DatetimeParseError:
return http.HttpResponseBadRequest('Invalid start')
try:
end = parse_datetime(request.GET['end'])
except DatetimeParseError:
return http.HttpResponseBadRequest('Invalid end')
entries = []
COLORS = ("#EAA228", "#c5b47f", "#579575", "#839557", "#958c12",
"#953579", "#4b5de4", "#d8b83f", "#ff5800", "#0085cc",
"#c747a3", "#cddf54", "#FBD178", "#26B4E3", "#bd70c7")
user_ids = [request.user.pk]
colors = {}
colors_fullnames = []
colors[request.user.pk] = None
colors_fullnames.append((request.user.pk, 'Me myself and I', '#3366CC'))
for i, user_ in enumerate(get_observed_users(request.user, max_depth=2)):
user_ids.append(user_.pk)
colors[user_.pk] = COLORS[i]
full_name = user_.get_full_name()
if not full_name:
full_name = user_.username
colors_fullnames.append((
user_.pk,
full_name,
colors[user_.pk]
))
_managers = {}
def can_see_details(user):
if request.user.is_superuser:
return True
if request.user.pk == user.pk:
return True
if user.pk not in _managers:
_profile = user.get_profile()
_manager = None
if _profile and _profile.manager_user:
_manager = _profile.manager_user.pk
_managers[user.pk] = _manager
return _managers[user.pk] == request.user.pk
visible_user_ids = set()
for entry in (Entry.objects
.filter(user__in=user_ids,
total_hours__gte=0,
total_hours__isnull=False)
.select_related('user')
.exclude(Q(end__lt=start) | Q(start__gt=end))):
visible_user_ids.add(entry.user.pk)
entries.append({
'id': entry.pk,
'title': make_entry_title(entry, request.user,
include_details=can_see_details(entry.user)),
'start': entry.start.strftime('%Y-%m-%d'),
'end': entry.end.strftime('%Y-%m-%d'),
'color': colors[entry.user.pk],
'mine': entry.user.pk == request.user.pk,
})
colors = [dict(name=x, color=y) for (pk, x, y) in colors_fullnames
if pk in visible_user_ids]
return {'events': entries, 'colors': colors}
def get_minions(user, depth=1, max_depth=2):
minions = []
for minion in (UserProfile.objects.filter(manager_user=user)
.select_related('manager_user')
.order_by('manager_user')):
minions.append(minion.user)
if depth < max_depth:
minions.extend(get_minions(minion.user,
depth=depth + 1,
max_depth=max_depth))
return minions
def get_siblings(user):
profile = user.get_profile()
if not profile.manager_user:
return []
users = []
for profile in (UserProfile.objects
.filter(manager_user=profile.manager_user)
.exclude(pk=user.pk)
.select_related('user')):
users.append(profile.user)
return users
def get_followed_users(user):
users = []
for each in (FollowingUser.objects
.filter(follower=user)
.select_related('following')):
users.append(each.following)
return users
def get_observed_users(this_user, depth=1, max_depth=2):
users = []
def is_blacklisted(user):
return (BlacklistedUser.objects
.filter(observer=this_user, observable=user)
.exists())
for user in get_minions(this_user, depth=depth, max_depth=max_depth):
if user not in users:
if not is_blacklisted(user):
users.append(user)
for user in get_siblings(this_user):
if user not in users:
if not is_blacklisted(user):
users.append(user)
profile = this_user.get_profile()
manager = profile.manager_user
if manager and manager not in users:
if not is_blacklisted(manager):
users.append(manager)
for user in get_followed_users(this_user):
if user not in users:
users.append(user)
return users
@transaction.commit_on_success
@login_required
def notify(request):
data = {}
data['page_title'] = "Notify about new vacation"
if request.method == 'POST':
form = forms.AddForm(request.user, data=request.POST)
if form.is_valid():
start = form.cleaned_data['start']
end = form.cleaned_data['end']
details = form.cleaned_data['details'].strip()
notify = form.cleaned_data['notify']
entry = Entry.objects.create(
user=request.user,
start=start,
end=end,
details=details,
)
clean_unfinished_entries(entry)
messages.info(request, 'Entry added, now specify hours')
url = reverse('dates.hours', args=[entry.pk])
request.session['notify_extra'] = notify
return redirect(url)
else:
initial = {}
if request.GET.get('start'):
try:
initial['start'] = parse_datetime(request.GET['start'])
except DatetimeParseError:
pass
if request.GET.get('end'):
try:
initial['end'] = parse_datetime(request.GET['end'])
except DatetimeParseError:
pass
form = forms.AddForm(request.user, initial=initial)
profile = request.user.get_profile()
manager = None
if profile and profile.manager:
manager = ldap_lookup.fetch_user_details(profile.manager)
data['hr_managers'] = [x.user for x in
(UserProfile.objects
.filter(hr_manager=True)
.select_related('user'))]
data['manager'] = manager
data['all_managers'] = [x for x in data['hr_managers'] if x]
if manager:
data['all_managers'].append(manager)
data['form'] = form
return render(request, 'dates/notify.html', data)
@transaction.commit_on_success
@login_required
def cancel_notify(request):
Entry.objects.filter(user=request.user, total_hours__isnull=True).delete()
return redirect(reverse('dates.home'))
def clean_unfinished_entries(good_entry):
# delete all entries that don't have total_hours and touch on the
# same dates as this good one
bad_entries = (Entry.objects
.filter(user=good_entry.user,
total_hours__isnull=True)
.exclude(pk=good_entry.pk))
for entry in bad_entries:
entry.delete()
@transaction.commit_on_success
@login_required
def hours(request, pk):
data = {}
entry = get_object_or_404(Entry, pk=pk)
if entry.user != request.user:
if not (request.user.is_staff or request.user.is_superuser):
return http.HttpResponseForbidden('insufficient access')
if request.method == 'POST':
form = forms.HoursForm(entry, data=request.POST)
if form.is_valid():
total_hours, is_edit = save_entry_hours(entry, form)
extra_users = request.session.get('notify_extra', '')
extra_users = [x.strip() for x
in extra_users.split(';')
if x.strip()]
success, email_addresses = send_email_notification(
entry,
extra_users,
is_edit=is_edit,
)
assert success
#messages.info(request,
# '%s hours of vacation logged.' % total_hours
#)
recently_created = make_entry_title(entry, request.user)
cache_key = 'recently_created_%s' % request.user.pk
cache.set(cache_key, recently_created, 60)
url = reverse('dates.emails_sent', args=[entry.pk])
url += '?' + urlencode({'e': email_addresses}, True)
return redirect(url)
else:
initial = {}
for date in utils.get_weekday_dates(entry.start, entry.end):
try:
#hours_ = Hours.objects.get(entry=entry, date=date)
hours_ = Hours.objects.get(date=date, entry__user=entry.user)
initial[date.strftime('d-%Y%m%d')] = hours_.hours
except Hours.DoesNotExist:
initial[date.strftime('d-%Y%m%d')] = settings.WORK_DAY
form = forms.HoursForm(entry, initial=initial)
data['form'] = form
if entry.total_hours:
data['total_hours'] = entry.total_hours
else:
total_days = 0
for date in utils.get_weekday_dates(entry.start, entry.end):
try:
hours_ = Hours.objects.get(entry=entry, date=date)
print hours_.hours
if hours_.hours == settings.WORK_DAY:
total_days += 1
elif hours_.hours:
total_days += .5
except Hours.DoesNotExist:
total_days += 1
data['total_days'] = total_days
notify = request.session.get('notify_extra', [])
data['notify'] = notify
return render(request, 'dates/hours.html', data)
def save_entry_hours(entry, form):
assert form.is_valid()
total_hours = 0
for date in utils.get_weekday_dates(entry.start, entry.end):
hours = int(form.cleaned_data[date.strftime('d-%Y%m%d')])
birthday = False
if hours == -1:
birthday = True
hours = 0
assert hours >= 0 and hours <= settings.WORK_DAY, hours
try:
hours_ = Hours.objects.get(entry__user=entry.user,
date=date)
if hours_.hours:
# this nullifies the previous entry on this date
reverse_entry = Entry.objects.create(
user=hours_.entry.user,
start=date,
end=date,
details=hours_.entry.details,
total_hours=hours_.hours * -1,
)
Hours.objects.create(
entry=reverse_entry,
hours=hours_.hours * -1,
date=date,
)
#hours_.hours = hours # nasty stuff!
#hours_.birthday = birthday
#hours_.save()
except Hours.DoesNotExist:
# nothing to credit
pass
Hours.objects.create(
entry=entry,
hours=hours,
date=date,
birthday=birthday,
)
total_hours += hours
#raise NotImplementedError
is_edit = entry.total_hours is not None
#if entry.total_hours is not None:
entry.total_hours = total_hours
entry.save()
return total_hours, is_edit
def send_email_notification(entry, extra_users, is_edit=False):
email_addresses = []
for profile in (UserProfile.objects
.filter(hr_manager=True,
user__email__isnull=False)):
email_addresses.append(profile.user.email)
profile = entry.user.get_profile()
if profile and profile.manager:
manager = ldap_lookup.fetch_user_details(profile.manager)
if manager.get('mail'):
email_addresses.append(manager['mail'])
if extra_users:
email_addresses.extend(extra_users)
email_addresses = list(set(email_addresses)) # get rid of dupes
if not email_addresses:
email_addresses = [settings.FALLBACK_TO_ADDRESS]
if is_edit:
subject = settings.EMAIL_SUBJECT_EDIT
else:
subject = settings.EMAIL_SUBJECT
subject = subject % dict(
first_name=entry.user.first_name,
last_name=entry.user.last_name,
username=entry.user.username,
email=entry.user.email,
)
message = template = loader.get_template('dates/notification.txt')
context = {
'entry': entry,
'user': entry.user,
'is_edit': is_edit,
'settings': settings,
'start_date': entry.start.strftime(settings.DEFAULT_DATE_FORMAT),
}
body = template.render(Context(context)).strip()
connection = get_connection()
message = EmailMessage(
subject=subject,
body=body,
from_email=entry.user.email,
to=email_addresses,
cc=entry.user.email and [entry.user.email] or None,
connection=connection
)
success = message.send()
return success, email_addresses
@login_required
def emails_sent(request, pk):
data = {}
entry = get_object_or_404(Entry, pk=pk)
if entry.user != request.user:
if not (request.user.is_staff or request.user.is_superuser):
return http.HttpResponseForbidden('insufficient access')
emails = request.REQUEST.getlist('e')
if isinstance(emails, basestring):
emails = [emails]
data['emails'] = emails
data['emailed_users'] = []
for email in emails:
record = ldap_lookup.fetch_user_details(email)
if record:
data['emailed_users'].append(record)
else:
data['emailed_users'].append(email)
show_fireworks = not request.COOKIES.get('no_fw', False)
data['show_fireworks'] = show_fireworks
return render(request, 'dates/emails_sent.html', data)
@login_required
def list_(request):
data = {}
form = forms.ListFilterForm(date_format='%d %B %Y',
data=request.GET)
if form.is_valid():
data['filters'] = form.cleaned_data
data['today'] = datetime.date.today()
entries_base = Entry.objects.all()
try:
data['first_date'] = entries_base.order_by('start')[0].start
data['last_date'] = entries_base.order_by('-end')[0].end
data['first_filed_date'] = (entries_base
.order_by('add_date')[0]
.add_date)
except IndexError:
# first run, not so important
data['first_date'] = datetime.date(2000, 1, 1)
data['last_date'] = datetime.date(2000, 1, 1)
data['first_filed_date'] = datetime.date(2000, 1, 1)
data['form'] = form
data['query_string'] = request.META.get('QUERY_STRING')
return render(request, 'dates/list.html', data)
@login_required
def list_csv(request):
entries = get_entries_from_request(request.GET)
response = http.HttpResponse(mimetype='text/csv')
writer = CSVUnicodeWriter(response)
writer.writerow((
'ID',
'EMAIL',
'FIRST NAME',
'LAST NAME',
'ADDED',
'START',
'END',
'DAYS',
'DETAILS',
'CITY',
'COUNTRY',
'START DATE',
))
profiles = {} # basic memoization
for entry in entries:
if entry.user.pk not in profiles:
profiles[entry.user.pk] = entry.user.get_profile()
profile = profiles[entry.user.pk]
writer.writerow((
str(entry.pk),
entry.user.email,
entry.user.first_name,
entry.user.last_name,
entry.add_date.strftime('%Y-%m-%d'),
entry.start.strftime('%Y-%m-%d'),
entry.end.strftime('%Y-%m-%d'),
str(entry.total_days),
entry.details,
profile.city,
profile.country,
(profile.start_date and
profile.start_date.strftime('%Y-%m-%d') or ''),
))
return response
@json_view
@login_required
def list_json(request):
entries = get_entries_from_request(request.GET)
_managers = {}
def can_see_details(user):
if request.user.is_superuser:
return True
if request.user.pk == user.pk:
return True
if user.pk not in _managers:
_profile = user.get_profile()
_manager = None
if _profile and _profile.manager_user:
_manager = _profile.manager_user.pk
_managers[user.pk] = _manager
return _managers[user.pk] == request.user.pk
data = []
profiles = {}
for entry in entries:
if entry.user.pk not in profiles:
profiles[entry.user.pk] = entry.user.get_profile()
profile = profiles[entry.user.pk]
if entry.total_hours < 0:
details = '*automatic edit*'
elif can_see_details(entry.user):
details = entry.details
else:
details = ''
row = [entry.user.email,
entry.user.first_name,
entry.user.last_name,
entry.add_date.strftime('%Y-%m-%d'),
entry.total_days,
entry.start.strftime('%Y-%m-%d'),
entry.end.strftime('%Y-%m-%d'),
profile.city,
profile.country,
details,
#edit_link,
#hours_link
]
data.append(row)
return {'aaData': data}
def get_entries_from_request(data):
form = forms.ListFilterForm(date_format='%d %B %Y', data=data)
if not form.is_valid():
return Entry.objects.none()
fdata = form.cleaned_data
entries = (Entry.objects.exclude(total_hours=None)
.select_related('user'))
if fdata.get('date_from'):
entries = entries.filter(end__gte=fdata.get('date_from'))
if fdata.get('date_to'):
entries = entries.filter(start__lte=fdata.get('date_to'))
if fdata.get('date_filed_from'):
entries = entries.filter(
add_date__gte=fdata.get('date_filed_from'))
if fdata.get('date_filed_to'):
entries = entries.filter(
add_date__lt=fdata.get('date_filed_to') +
datetime.timedelta(days=1))
if fdata.get('name'):
name = fdata['name'].strip()
if valid_email(name):
entries = entries.filter(user__email__iexact=name)
else:
entries = entries.filter(
Q(user__first_name__istartswith=name.split()[0]) |
Q(user__last_name__iendswith=name.split()[-1])
)
if fdata.get('country'):
country = fdata['country'].strip()
_users = UserProfile.objects.filter(country=country).values('user_id')
entries = entries.filter(user__id__in=_users)
return entries
@login_required
def following(request):
data = {}
observed = []
_followed = get_followed_users(request.user)
_minions_1 = get_minions(request.user, depth=1, max_depth=1)
_minions_2 = get_minions(request.user, depth=1, max_depth=2)
_manager = request.user.get_profile().manager_user
for user in sorted(get_observed_users(request.user, max_depth=2),
lambda x, y: cmp(x.first_name.lower(),
y.first_name.lower())):
if user in _minions_1:
reason = 'direct manager of'
elif user in _minions_2:
reason = 'indirect manager of'
elif user == _manager:
reason = 'your manager'
elif user in _followed:
reason = 'curious'
else:
reason = 'teammate'
observed.append((user, reason))
not_observed = (BlacklistedUser.objects
.filter(observer=request.user)
.order_by('observable__first_name'))
data['observed'] = observed
data['not_observed'] = [x.observable for x in not_observed]
return render(request, 'dates/following.html', data)
@json_view
@login_required
@transaction.commit_on_success
@require_POST
def save_following(request):
search = request.POST.get('search')
if not search:
return http.HttpResponseBadRequest('Missing search')
if (-1 < search.rfind('<') < search.rfind('@') < search.rfind('>')):
try:
email = re.findall('<([\w\.\-]+@[\w\.\-]+)>', search)[0]
email = email.strip()
validate_email(email)
except (ValidationError, IndexError):
email = None
elif search.isdigit():
try:
email = User.objects.get(pk=search).email
except User.DoesNotExist:
email = None # will deal with this later
else:
found = []
result = ldap_lookup.search_users(search, 30, autocomplete=True)
for each in result:
try:
found.append(User.objects.get(email__iexact=each['mail']))
except User.DoesNotExist:
pass
if len(found) > 1:
return http.HttpResponseBadRequest('More than one user found')
elif not found:
return http.HttpResponseBadRequest('No user found')
else:
email = found[0].email
# if no email is found in the search, it's an error
if not email:
return http.HttpResponseBadRequest('No email found')
try:
user = User.objects.get(email__iexact=email)
except User.DoesNotExist:
return http.HttpResponseBadRequest('No user by that email found')
FollowingUser.objects.get_or_create(
follower=request.user,
following=user,
)
# find a reason why we're following this user
_minions_1 = get_minions(request.user, depth=1, max_depth=1)
_minions_2 = get_minions(request.user, depth=1, max_depth=2)
if user in _minions_1:
reason = 'direct manager of'
elif user in _minions_2:
reason = 'indirect manager of'
elif user == request.user.get_profile().manager_user:
reason = 'your manager'
elif (request.user.get_profile().manager_user
and user in _minions_1):
reason = 'teammate'
else:
reason = 'curious'
name = ('%s %s' % (user.first_name,
user.last_name)).strip()
if not name:
name = user.username
data = {
'id': user.pk,
'name': name,
'reason': reason,
}
return data
@json_view
@login_required
@transaction.commit_on_success
@require_POST
def save_unfollowing(request):
remove = request.POST.get('remove')
try:
user = User.objects.get(pk=remove)
except (ValueError, User.DoesNotExist):
return http.HttpResponseBadRequest('Invalid user ID')
for f in (FollowingUser.objects
.filter(follower=request.user, following=user)):
f.delete()
data = {}
if user in get_observed_users(request.user, max_depth=2):
# if not blacklisted, this user will automatically re-appear
BlacklistedUser.objects.get_or_create(
observer=request.user,
observable=user
)
data['id'] = user.pk
name = ('%s %s' % (user.first_name,
user.last_name)).strip()
if not name:
name = user.username
data['name'] = name
return data
def calendar_vcal(request, key):
base_url = '%s://%s' % (request.is_secure() and 'https' or 'http',
RequestSite(request).domain)
home_url = base_url + '/'
cal = vobject.iCalendar()
cal.add('x-wr-calname').value = 'Mozilla Vacation'
try:
user = UserKey.objects.get(key=key).user
except UserKey.DoesNotExist:
# instead of raising a HTTP error, respond a calendar
# that urges the user to update the stale URL
event = cal.add('vevent')
event.add('summary').value = (
"Calendar expired. Visit %s#calendarurl to get the "
"new calendar URL" % home_url
)
today = datetime.date.today()
event.add('dtstart').value = today
event.add('dtend').value = today
event.add('url').value = '%s#calendarurl' % (home_url,)
event.add('description').value = ("The calendar you used has expired "
"and is no longer associated with any user")
return _render_vcalendar(cal, key)
# always start on the first of this month
today = datetime.date.today()
#first = datetime.date(today.year, today.month, 1)
user_ids = [user.pk]
for user_ in get_observed_users(user, max_depth=2):
user_ids.append(user_.pk)
entries = (Entry.objects
.filter(user__in=user_ids,
total_hours__gte=0,
total_hours__isnull=False,
end__gte=today)
.select_related('user')
)
_list_base_url = base_url + reverse('dates.list')
def make_list_url(entry):
name = entry.user.get_full_name()
if not name:
name = entry.user.username
data = {
'date_from': entry.start.strftime('%d %B %Y'),
'date_to': entry.end.strftime('%d %B %Y'),
'name': name
}
return _list_base_url + '?' + urlencode(data, True)
for entry in entries:
event = cal.add('vevent')
event.add('summary').value = '%s Vacation' % make_entry_title(entry, user,
include_details=False)
event.add('dtstart').value = entry.start
event.add('dtend').value = entry.end
#url = (home_url + '?cal_y=%d&cal_m=%d' %
# (slot.date.year, slot.date.month))
event.add('url').value = make_list_url(entry)
#event.add('description').value = entry.details
event.add('description').value = "Log in to see the details"
return _render_vcalendar(cal, key)
def _render_vcalendar(cal, key):
#return http.HttpResponse(cal.serialize(),
# mimetype='text/plain;charset=utf-8'
# )
resp = http.HttpResponse(cal.serialize(),
mimetype='text/calendar;charset=utf-8'
)
filename = '%s.ics' % (key,)
resp['Content-Disposition'] = 'inline; filename="%s"' % filename
return resp
@login_required
@transaction.commit_on_success
def reset_calendar_url(request):
for each in UserKey.objects.filter(user=request.user):
each.delete()
return redirect(reverse('dates.home') + '#calendarurl')
@login_required
def about_calendar_url(request):
data = {}
data['calendar_url'] = _get_user_calendar_url(request)
return render(request, 'dates/about-calendar-url.html', data)
@login_required
def duplicate_report(request):
data = {
'filter_errors': None,
}
if request.method == 'POST':
raise NotImplementedError
else:
form = forms.DuplicateReportFilterForm(date_format='%d %B %Y',
data=request.GET)
user = request.user
filter_ = dict(user=user)
if form.is_valid():
if form.cleaned_data['user']:
user = form.cleaned_data['user']
if user != request.user:
if not (request.user.is_superuser
or request.user.is_staff):
if user != request.user:
return http.HttpResponse(
"Only available for admins")
filter_['user'] = user
if form.cleaned_data['since']:
filter_['start__gte'] = form.cleaned_data['since']
data['since'] = form.cleaned_data['since']
else:
data['filter_errors'] = form.errors
data['first_date'] = (Entry.objects
.filter(user=user)
.aggregate(Min('start'))
['start__min'])
start_dates = (Entry.objects
.filter(**filter_)
.values("start")
.annotate(Count("start"))
.order_by('-start__count'))
groups = []
for each in start_dates:
if each['start__count'] <= 1:
break
entries = Entry.objects.filter(user=user, start=each['start'])
details = [x.details for x in entries]
note = "Probably not a mistake"
if len(set(details)) == 1:
note = ("Probably a duplicate! "
"The details are the same for each entry")
else:
note = "Possibly not a duplicate since the details different"
groups.append((entries, note))
data['groups'] = groups
if 'since' not in data:
data['since'] = data['first_date']
return render(request, 'dates/duplicate-report.html', data)
| mozilla/pto | pto/apps/dates/views.py | Python | mpl-2.0 | 37,380 | 0.00099 |
from django.test import TestCase
from .utils import is_holiday
from datetime import date, timedelta
class HolidaysTests(TestCase):
longMessage = True
fixtures = ['demo']
def fullYearTest(self, group, year, holidays):
it = date(year, 1, 1)
end = date(year, 12, 31)
delta = timedelta(days=1)
calc_holidays = []
while it <= end:
if is_holiday(group, it):
calc_holidays.append(it)
it += delta
self.assertEquals(calc_holidays, holidays)
def testPortugal2015(self):
self.fullYearTest(
'PT',
2015,
[
date(2015, 1, 1),
date(2015, 4, 3),
date(2015, 4, 5),
date(2015, 4, 25),
date(2015, 5, 1),
date(2015, 6, 10),
date(2015, 8, 15),
date(2015, 12, 8),
date(2015, 12, 25),
],
)
def testPortugalPorto2015(self):
self.fullYearTest(
'PT-PRT',
2015,
[
date(2015, 1, 1),
date(2015, 4, 3),
date(2015, 4, 5),
date(2015, 4, 25),
date(2015, 5, 1),
date(2015, 6, 10),
date(2015, 6, 24),
date(2015, 8, 15),
date(2015, 12, 8),
date(2015, 12, 25),
],
)
| fopina/django-holidays | holidays/tests.py | Python | mit | 1,476 | 0 |
import logging
import sys
import traceback
from collections import namedtuple
import numpy as np
import pandas as pd
from scipy.stats import chisquare
from . import categorizer as cat
from . import draw
from .ipf.ipf import calculate_constraints
from .ipu.ipu import household_weights
logger = logging.getLogger("synthpop")
FitQuality = namedtuple(
'FitQuality',
('people_chisq', 'people_p'))
BlockGroupID = namedtuple(
'BlockGroupID', ('state', 'county', 'tract', 'block_group'))
def enable_logging():
handler = logging.StreamHandler(stream=sys.stdout)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
def synthesize(h_marg, p_marg, h_jd, p_jd, h_pums, p_pums,
marginal_zero_sub=.01, jd_zero_sub=.001, hh_index_start=0):
# this is the zero marginal problem
h_marg = h_marg.replace(0, marginal_zero_sub)
p_marg = p_marg.replace(0, marginal_zero_sub)
# zero cell problem
h_jd.frequency = h_jd.frequency.replace(0, jd_zero_sub)
p_jd.frequency = p_jd.frequency.replace(0, jd_zero_sub)
# ipf for households
logger.info("Running ipf for households")
h_constraint, _ = calculate_constraints(h_marg, h_jd.frequency)
h_constraint.index = h_jd.cat_id
logger.debug("Household constraint")
logger.debug(h_constraint)
logger.debug(h_constraint.sum())
# ipf for persons
logger.info("Running ipf for persons")
p_constraint, _ = calculate_constraints(p_marg, p_jd.frequency)
p_constraint.index = p_jd.cat_id
logger.debug("Person constraint")
logger.debug(p_constraint)
logger.debug(p_constraint.sum())
# make frequency tables that the ipu expects
household_freq, person_freq = cat.frequency_tables(p_pums, h_pums,
p_jd.cat_id,
h_jd.cat_id)
# do the ipu to match person marginals
logger.info("Running ipu")
import time
t1 = time.time()
best_weights, fit_quality, iterations = household_weights(household_freq,
person_freq,
h_constraint,
p_constraint)
logger.info("Time to run ipu: %.3fs" % (time.time()-t1))
logger.debug("IPU weights:")
logger.debug(best_weights.describe())
logger.debug(best_weights.sum())
logger.debug("Fit quality:")
logger.debug(fit_quality)
logger.debug("Number of iterations:")
logger.debug(iterations)
num_households = int(h_marg.groupby(level=0).sum().mean())
print "Drawing %d households" % num_households
best_chisq = np.inf
return draw.draw_households(
num_households, h_pums, p_pums, household_freq, h_constraint,
p_constraint, best_weights, hh_index_start=hh_index_start)
def synthesize_all(recipe, num_geogs=None, indexes=None,
marginal_zero_sub=.01, jd_zero_sub=.001):
"""
Parameters
----------
write_households_csv, write_persons_csv : str
Name of households and persons csv file to write.
Pass None to return these rather than write.
Returns
-------
households, people : pandas.DataFrame
Only returns these if `write_households_csv` and `write_persons_csv`
are None.
fit_quality : dict of FitQuality
Keys are geographic IDs, values are namedtuples with attributes
``.household_chisq``, ``household_p``, ``people_chisq``,
and ``people_p``.
"""
print "Synthesizing at geog level: '{}' (number of geographies is {})".\
format(recipe.get_geography_name(), recipe.get_num_geographies())
if indexes is None:
indexes = recipe.get_available_geography_ids()
hh_list = []
people_list = []
cnt = 0
fit_quality = {}
hh_index_start = 0
# TODO will parallelization work here?
for geog_id in indexes:
print "Synthesizing geog id:\n", geog_id
h_marg = recipe.get_household_marginal_for_geography(geog_id)
logger.debug("Household marginal")
logger.debug(h_marg)
p_marg = recipe.get_person_marginal_for_geography(geog_id)
logger.debug("Person marginal")
logger.debug(p_marg)
h_pums, h_jd = recipe.\
get_household_joint_dist_for_geography(geog_id)
logger.debug("Household joint distribution")
logger.debug(h_jd)
p_pums, p_jd = recipe.get_person_joint_dist_for_geography(geog_id)
logger.debug("Person joint distribution")
logger.debug(p_jd)
try:
households, people, people_chisq, people_p = \
synthesize(
h_marg, p_marg, h_jd, p_jd, h_pums, p_pums,
marginal_zero_sub=marginal_zero_sub, jd_zero_sub=jd_zero_sub,
hh_index_start=hh_index_start)
if not recipe.write_households(geog_id, households):
hh_list.append(households)
if not recipe.write_persons(geog_id, people):
people_list.append(people)
key = tuple(geog_id.values)
# key = BlockGroupID(
# geog_id['state'], geog_id['county'], geog_id['tract'],
# geog_id['block group'])
fit_quality[key] = FitQuality(people_chisq, people_p)
cnt += 1
if len(households) > 0:
hh_index_start = households.index.values[-1] + 1
if num_geogs is not None and cnt >= num_geogs:
break
except Exception as e:
print "Exception caught: ", sys.exc_info()[0]
print traceback.format_exc()
# continue
return (pd.concat(hh_list) if len(hh_list) > 0 else None,
pd.concat(people_list, ignore_index=True) if len(people_list) > 0 else None,
fit_quality)
| sfcta/synthpop | synthpop/synthesizer.py | Python | bsd-3-clause | 6,015 | 0.001829 |
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework import permissions
from videos.api.serializers import video as video_serializers
from videos.models import Video
class VideoViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.DjangoModelPermissionsOrAnonReadOnly]
def get_queryset(self):
queryset = Video.objects.all()
if self.request.method not in permissions.SAFE_METHODS:
if self.request.user.is_authenticated:
queryset = queryset.filter_owner(user=self.request.user)
else:
return queryset.none()
return queryset
def get_serializer_class(self):
if self.request.method in permissions.SAFE_METHODS:
return video_serializers.RetrieveSerializer
return video_serializers.DefaultSerializer
def get_serializer_context(self):
context = super(VideoViewSet, self).get_serializer_context()
if self.request.method not in permissions.SAFE_METHODS \
and not self.request.user.is_superuser:
context['exclude'] = ('sites', )
return context
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
| sdeleeuw/contagement | videos/api/viewsets/video.py | Python | gpl-3.0 | 1,272 | 0 |
#hw 1/ task8/ Sergei Shybkoi
t = (1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 'a', 'b', 'c')
print "Set:",t
print "Each third element:"
print t[2::3]
print t[-1*len(t)+2:-1:3] | pybursa/homeworks | s_shybkoy/hw1/hw1_task8_ShybkoiSergei.py | Python | gpl-2.0 | 162 | 0.018519 |
# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db.models import fields
from modeltranslation import settings as mt_settings
from modeltranslation.utils import (
get_language, build_localized_fieldname, build_localized_verbose_name, resolution_order)
from modeltranslation.widgets import ClearableWidgetWrapper
SUPPORTED_FIELDS = (
fields.CharField,
# Above implies also CommaSeparatedIntegerField, EmailField, FilePathField, SlugField
# and URLField as they are subclasses of CharField.
fields.TextField,
fields.IntegerField,
# Above implies also BigIntegerField, SmallIntegerField, PositiveIntegerField and
# PositiveSmallIntegerField, as they are subclasses of IntegerField.
fields.BooleanField,
fields.NullBooleanField,
fields.FloatField,
fields.DecimalField,
fields.IPAddressField,
fields.DateField,
fields.DateTimeField,
fields.TimeField,
fields.files.FileField,
fields.files.ImageField,
fields.related.ForeignKey,
# Above implies also OneToOneField
)
try:
SUPPORTED_FIELDS += (fields.GenericIPAddressField,) # Django 1.4+ only
except AttributeError:
pass
class NONE:
"""
Used for fallback options when they are not provided (``None`` can be
given as a fallback or undefined value) or to mark that a nullable value
is not yet known and needs to be computed (e.g. field default).
"""
pass
def create_translation_field(model, field_name, lang, empty_value):
"""
Translation field factory. Returns a ``TranslationField`` based on a
fieldname and a language.
The list of supported fields can be extended by defining a tuple of field
names in the projects settings.py like this::
MODELTRANSLATION_CUSTOM_FIELDS = ('MyField', 'MyOtherField',)
If the class is neither a subclass of fields in ``SUPPORTED_FIELDS``, nor
in ``CUSTOM_FIELDS`` an ``ImproperlyConfigured`` exception will be raised.
"""
if empty_value not in ('', 'both', None, NONE):
raise ImproperlyConfigured('%s is not a valid empty_value.' % empty_value)
field = model._meta.get_field(field_name)
cls_name = field.__class__.__name__
if not (isinstance(field, SUPPORTED_FIELDS) or cls_name in mt_settings.CUSTOM_FIELDS):
raise ImproperlyConfigured(
'%s is not supported by modeltranslation.' % cls_name)
translation_class = field_factory(field.__class__)
return translation_class(translated_field=field, language=lang, empty_value=empty_value)
def field_factory(baseclass):
class TranslationFieldSpecific(TranslationField, baseclass):
pass
# Reflect baseclass name of returned subclass
TranslationFieldSpecific.__name__ = 'Translation%s' % baseclass.__name__
return TranslationFieldSpecific
class TranslationField(object):
"""
The translation field functions as a proxy to the original field which is
wrapped.
For every field defined in the model's ``TranslationOptions`` localized
versions of that field are added to the model depending on the languages
given in ``settings.LANGUAGES``.
If for example there is a model ``News`` with a field ``title`` which is
registered for translation and the ``settings.LANGUAGES`` contains the
``de`` and ``en`` languages, the fields ``title_de`` and ``title_en`` will
be added to the model class. These fields are realized using this
descriptor.
The translation field needs to know which language it contains therefore
that needs to be specified when the field is created.
"""
def __init__(self, translated_field, language, empty_value, *args, **kwargs):
# Update the dict of this field with the content of the original one
# This might be a bit radical?! Seems to work though...
self.__dict__.update(translated_field.__dict__)
# Store the originally wrapped field for later
self.translated_field = translated_field
self.language = language
self.empty_value = empty_value
if empty_value is NONE:
self.empty_value = None if translated_field.null else ''
# Translation are always optional (for now - maybe add some parameters
# to the translation options for configuring this)
if not isinstance(self, fields.BooleanField):
# TODO: Do we really want to enforce null *at all*? Shouldn't this
# better honour the null setting of the translated field?
self.null = True
self.blank = True
# Adjust the name of this field to reflect the language
self.attname = build_localized_fieldname(self.translated_field.name, self.language)
self.name = self.attname
# Copy the verbose name and append a language suffix
# (will show up e.g. in the admin).
self.verbose_name = build_localized_verbose_name(translated_field.verbose_name, language)
# ForeignKey support - rewrite related_name
if self.rel and self.related and not self.rel.is_hidden():
import copy
current = self.related.get_accessor_name()
self.rel = copy.copy(self.rel) # Since fields cannot share the same rel object.
# self.related doesn't need to be copied, as it will be recreated in
# ``RelatedField.do_related_class``
if self.rel.related_name is None:
# For implicit related_name use different query field name
loc_related_query_name = build_localized_fieldname(
self.related_query_name(), self.language)
self.related_query_name = lambda: loc_related_query_name
self.rel.related_name = build_localized_fieldname(current, self.language)
self.rel.field = self # Django 1.6
if hasattr(self.rel.to._meta, '_related_objects_cache'):
del self.rel.to._meta._related_objects_cache
# Django 1.5 changed definition of __hash__ for fields to be fine with hash requirements.
# It spoiled our machinery, since TranslationField has the same creation_counter as its
# original field and fields didn't get added to sets.
# So here we override __eq__ and __hash__ to fix the issue while retaining fine with
# http://docs.python.org/2.7/reference/datamodel.html#object.__hash__
def __eq__(self, other):
if isinstance(other, fields.Field):
return (self.creation_counter == other.creation_counter and
self.language == getattr(other, 'language', None))
return super(TranslationField, self).__eq__(other)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.creation_counter, self.language))
def get_attname_column(self):
attname = self.get_attname()
if self.translated_field.db_column:
column = build_localized_fieldname(self.translated_field.db_column, self.language)
else:
column = attname
return attname, column
def formfield(self, *args, **kwargs):
"""
Returns proper formfield, according to empty_values setting
(only for ``forms.CharField`` subclasses).
There are 3 different formfields:
- CharField that stores all empty values as empty strings;
- NullCharField that stores all empty values as None (Null);
- NullableField that can store both None and empty string.
By default, if no empty_values was specified in model's translation options,
NullCharField would be used if the original field is nullable, CharField otherwise.
This can be overridden by setting empty_values to '' or None.
Setting 'both' will result in NullableField being used.
Textual widgets (subclassing ``TextInput`` or ``Textarea``) used for
nullable fields are enriched with a clear checkbox, allowing ``None``
values to be preserved rather than saved as empty strings.
The ``forms.CharField`` somewhat surprising behaviour is documented as a
"won't fix": https://code.djangoproject.com/ticket/9590.
"""
formfield = super(TranslationField, self).formfield(*args, **kwargs)
if isinstance(formfield, forms.CharField):
if self.empty_value is None:
from modeltranslation.forms import NullCharField
form_class = formfield.__class__
kwargs['form_class'] = type(
'Null%s' % form_class.__name__, (NullCharField, form_class), {})
formfield = super(TranslationField, self).formfield(*args, **kwargs)
elif self.empty_value == 'both':
from modeltranslation.forms import NullableField
form_class = formfield.__class__
kwargs['form_class'] = type(
'Nullable%s' % form_class.__name__, (NullableField, form_class), {})
formfield = super(TranslationField, self).formfield(*args, **kwargs)
if isinstance(formfield.widget, (forms.TextInput, forms.Textarea)):
formfield.widget = ClearableWidgetWrapper(formfield.widget)
return formfield
def save_form_data(self, instance, data, check=True):
# Allow 3rd-party apps forms to be saved using only translated field name.
# When translated field (e.g. 'name') is specified and translation field (e.g. 'name_en')
# not, we assume that form was saved without knowledge of modeltranslation and we make
# things right:
# Translated field is saved first, settings respective translation field value. Then
# translation field is being saved without value - and we handle this here (only for
# active language).
# Questionable fields are stored in special variable, which is later handled by clean_fields
# method on the model.
if check and self.language == get_language() and getattr(instance, self.name) and not data:
if not hasattr(instance, '_mt_form_pending_clear'):
instance._mt_form_pending_clear = {}
instance._mt_form_pending_clear[self.name] = data
else:
super(TranslationField, self).save_form_data(instance, data)
def south_field_triple(self):
"""
Returns a suitable description of this field for South.
"""
# We'll just introspect the _actual_ field.
from south.modelsinspector import introspector
try:
# Check if the field provides its own 'field_class':
field_class = self.translated_field.south_field_triple()[0]
except AttributeError:
field_class = '%s.%s' % (self.translated_field.__class__.__module__,
self.translated_field.__class__.__name__)
args, kwargs = introspector(self)
# That's our definition!
return (field_class, args, kwargs)
class TranslationFieldDescriptor(object):
"""
A descriptor used for the original translated field.
"""
def __init__(self, field, fallback_languages=None, fallback_value=NONE,
fallback_undefined=NONE):
"""
Stores fallback options and the original field, so we know it's name
and default.
"""
self.field = field
self.fallback_languages = fallback_languages
self.fallback_value = fallback_value
self.fallback_undefined = fallback_undefined
def __set__(self, instance, value):
"""
Updates the translation field for the current language.
"""
if getattr(instance, '_mt_init', False):
# When assignment takes place in model instance constructor, don't set value.
# This is essential for only/defer to work, but I think it's sensible anyway.
return
loc_field_name = build_localized_fieldname(self.field.name, get_language())
setattr(instance, loc_field_name, value)
def meaningful_value(self, val, undefined):
"""
Check if val is considered non-empty.
"""
if isinstance(val, fields.files.FieldFile):
return val.name and not (
isinstance(undefined, fields.files.FieldFile) and val == undefined)
return val is not None and val != undefined
def __get__(self, instance, owner):
"""
Returns value from the translation field for the current language, or
value for some another language according to fallback languages, or the
custom fallback value, or field's default value.
"""
if instance is None:
return self
default = NONE
undefined = self.fallback_undefined
if undefined is NONE:
default = self.field.get_default()
undefined = default
langs = resolution_order(get_language(), self.fallback_languages)
for lang in langs:
loc_field_name = build_localized_fieldname(self.field.name, lang)
val = getattr(instance, loc_field_name, None)
if self.meaningful_value(val, undefined):
return val
if mt_settings.ENABLE_FALLBACKS and self.fallback_value is not NONE:
return self.fallback_value
else:
if default is NONE:
default = self.field.get_default()
# Some fields like FileField behave strange, as their get_default() doesn't return
# instance of attr_class, but rather None or ''.
# Normally this case is handled in the descriptor, but since we have overridden it, we
# must mock it up.
if (isinstance(self.field, fields.files.FileField) and
not isinstance(default, self.field.attr_class)):
return self.field.attr_class(instance, self.field, default)
return default
class TranslatedRelationIdDescriptor(object):
"""
A descriptor used for the original '_id' attribute of a translated
ForeignKey field.
"""
def __init__(self, field_name, fallback_languages):
self.field_name = field_name # The name of the original field (excluding '_id')
self.fallback_languages = fallback_languages
def __set__(self, instance, value):
lang = get_language()
loc_field_name = build_localized_fieldname(self.field_name, lang)
# Localized field name with '_id'
loc_attname = instance._meta.get_field(loc_field_name).get_attname()
setattr(instance, loc_attname, value)
def __get__(self, instance, owner):
if instance is None:
return self
langs = resolution_order(get_language(), self.fallback_languages)
for lang in langs:
loc_field_name = build_localized_fieldname(self.field_name, lang)
# Localized field name with '_id'
loc_attname = instance._meta.get_field(loc_field_name).get_attname()
val = getattr(instance, loc_attname, None)
if val is not None:
return val
return None
class LanguageCacheSingleObjectDescriptor(object):
"""
A Mixin for RelatedObjectDescriptors which use current language in cache lookups.
"""
accessor = None # needs to be set on instance
@property
def cache_name(self):
lang = get_language()
cache = build_localized_fieldname(self.accessor, lang)
return "_%s_cache" % cache
| yaroslavprogrammer/django-modeltranslation | modeltranslation/fields.py | Python | bsd-3-clause | 15,602 | 0.002628 |
#!/usr/bin/env python
# Copyright (c) 2006 Damien Miller <djm@mindrot.org>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# $Id$
import editdist
import unittest
import random
test_vectors = (
( 'abc', 'abc', 0 ),
( 'abc', 'ab', 1 ),
( 'abc', 'abcd', 1 ),
( 'abc', 'bc', 1 ),
( 'abc', 'a', 2 ),
( 'abc', '', 3 ),
( '', '', 0 ),
( 'abc', 'acx', 2 ),
( 'abc', 'acxx', 3 ),
( 'abc', 'bcd', 2 ),
( 'a' * 1000, 'a' * 1000, 0 ),
( 'a' * 1000, 'b' * 1000, 1000),
)
def randstring(l):
a = "abcdefghijklmnopqrstuvwxyz"
r = ""
for i in range(0, l):
r += a[random.randint(0, len(a) - 1)]
return r
class TestRadix(unittest.TestCase):
def test_00__test_vectors(self):
for a, b, score in test_vectors:
self.assertEqual(editdist.distance(a, b), score)
def test_01__reversed_test_vectors(self):
for b, a, score in test_vectors:
self.assertEqual(editdist.distance(a, b), score)
def test_02__fuzz(self):
for i in range(0, 32) + range(128, 1024, 128):
for j in range(0, 32):
a = randstring(i)
b = randstring(j)
dist = editdist.distance(a, b)
self.assert_(dist >= 0)
def main():
unittest.main()
if __name__ == '__main__':
main()
| com4/py-editdist | test.py | Python | isc | 1,856 | 0.033405 |
#!/usr/bin/env python3
#pylint: disable=missing-docstring
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import vtk
import chigger
camera = vtk.vtkCamera()
camera.SetViewUp(0.0105, 0.1507, 0.9885)
camera.SetPosition(15.6131, -0.3930, 0.0186)
camera.SetFocalPoint(0.0000, 0.0000, 0.1250)
reader = chigger.exodus.ExodusReader('../input/mug_blocks_out.e')
mug = chigger.exodus.ExodusResult(reader, block=[76], representation='points', camera=camera, color=[0,1,0])
window = chigger.RenderWindow(mug, size=[300,300], test=True)
window.update();window.resetCamera() #TODO: This is needed to make results render correctly, not sure why
window.write('points.png')
window.start()
| nuclear-wizard/moose | python/chigger/tests/wireframe/points.py | Python | lgpl-2.1 | 936 | 0.019231 |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A wrapper for the gyp_main that ensures the appropriate include directories
are brought in.
"""
import os
import shlex
import sys
import vs_toolchain_wrapper
script_dir = os.path.dirname(os.path.realpath(__file__))
syzygy_src = os.path.abspath(os.path.join(script_dir, os.pardir, os.pardir))
sys.path.insert(0, os.path.join(syzygy_src, 'tools', 'gyp', 'pylib'))
import gyp
def apply_gyp_environment_from_file(file_path):
"""Reads in a *.gyp_env file and applies the valid keys to os.environ."""
if not os.path.exists(file_path):
return False
with open(file_path, 'rU') as f:
file_contents = f.read()
try:
file_data = eval(file_contents, {'__builtins__': None}, None)
except SyntaxError, e:
e.filename = os.path.abspath(file_path)
raise
supported_vars = (
'GYP_DEFINES',
'GYP_GENERATOR_FLAGS',
'GYP_GENERATORS',
'GYP_MSVS_VERSION',
)
for var in supported_vars:
file_val = file_data.get(var)
if file_val:
if var in os.environ:
print 'INFO: Environment value for "%s" overrides value in %s.' % (
var, os.path.abspath(file_path)
)
else:
os.environ[var] = file_val
return True
def get_output_directory():
"""Returns the output directory that GYP will use."""
# Handle generator flags from the environment.
genflags = shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))
needle = 'output_dir='
for item in genflags:
if item.startswith(needle):
return item[len(needle):]
return 'out'
def apply_syzygy_gyp_env(syzygy_src_path):
if 'SKIP_SYZYGY_GYP_ENV' not in os.environ:
# Update the environment based on syzygy.gyp_env
path = os.path.join(syzygy_src_path, 'syzygy.gyp_env')
applied_env_from_file = apply_gyp_environment_from_file(path)
if (not applied_env_from_file or not os.environ.get('GYP_GENERATORS')):
# Default to ninja if no generator has explicitly been set.
os.environ['GYP_GENERATORS'] = 'ninja'
if (not applied_env_from_file or not os.environ.get('GYP_MSVS_VERSION')):
os.environ['GYP_MSVS_VERSION'] = '2015'
if __name__ == '__main__':
# Get the path of the root 'src' directory.
self_dir = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.abspath(os.path.join(self_dir, '..', '..'))
apply_syzygy_gyp_env(src_dir)
assert os.environ.get('GYP_GENERATORS')
if os.environ.get('GYP_GENERATORS') == 'msvs':
print 'ERROR: The \'msvs\' configuration isn\'t supported anymore.'
sys.exit(1)
# Setup the VS toolchain.
vs_runtime_dll_dirs = \
vs_toolchain_wrapper.SetEnvironmentAndGetRuntimeDllDirs()
gyp_rc = gyp.main(sys.argv[1:])
# Copy the VS runtime DLLs to the build directories.
if vs_runtime_dll_dirs:
x64_runtime, x86_runtime = vs_runtime_dll_dirs
vs_toolchain_wrapper.CopyVsRuntimeDlls(
os.path.join(src_dir, get_output_directory()),
(x86_runtime, x64_runtime))
sys.exit(gyp_rc)
| google/syzygy | syzygy/build/gyp_main.py | Python | apache-2.0 | 3,560 | 0.009551 |
import logging
import re
import time
import os
from autotest.client.shared import error, utils
from virttest import utils_misc, env_process
@error.context_aware
def run_watchdog(test, params, env):
"""
Configure watchdog, crash the guest and check if watchdog_action occurs.
Test Step:
1. see every function step
Params:
:param test: QEMU test object.
:param params: Dictionary with test parameters.
:param env: Dictionary with the test environment.
"""
timeout = int(params.get("login_timeout", '360'))
relogin_timeout = int(params.get("relogin_timeout", '240'))
watchdog_device_type = params.get("watchdog_device_type", "i6300esb")
watchdog_action = params.get("watchdog_action", "reset")
trigger_cmd = params.get("trigger_cmd", "echo c > /dev/watchdog")
# internal function
def _watchdog_device_check(session, watchdog_device):
"""
Check the watchdog device have been found and init successfully. if not
will raise error.
"""
# when using ib700 need modprobe it's driver manually.
if watchdog_device == "ib700":
session.cmd("modprobe ib700wdt")
# when wDT is 6300esb need check pci info
if watchdog_device == "i6300esb":
error.context("checking pci info to ensure have WDT device",
logging.info)
o = session.cmd_output("lspci")
if o:
wdt_pci_info = re.findall(".*6300ESB Watchdog Timer", o)
if not wdt_pci_info:
raise error.TestFail("Can find watchdog pci")
logging.info("Found watchdog pci device : %s" % wdt_pci_info)
# checking watchdog init info using dmesg
error.context("Checking watchdog init info using dmesg", logging.info)
dmesg_info = params.get("dmesg_info", "(i6300ESB|ib700wdt).*init")
(s, o) = session.cmd_status_output(
"dmesg | grep -i '%s' " % dmesg_info)
if s != 0:
error_msg = "Wactchdog device '%s' initialization failed "
raise error.TestError(error_msg % watchdog_device)
logging.info("Watchdog device '%s' add and init successfully"
% watchdog_device)
logging.debug("Init info : '%s'" % o)
def _trigger_watchdog(session, trigger_cmd=None):
"""
Trigger watchdog action
Params:
@session: guest connect session.
@trigger_cmd: cmd trigger the watchdog
"""
if trigger_cmd is not None:
error.context("Trigger Watchdog action using:'%s'." % trigger_cmd,
logging.info)
session.sendline(trigger_cmd)
def _action_check(session, watchdog_action):
"""
Check whether or not the watchdog action occurred. if the action was
not occurred will raise error.
"""
# when watchdog action is pause, shutdown, reset, poweroff
# the vm session will lost responsive
response_timeout = int(params.get("response_timeout", '240'))
error.context("Check whether or not watchdog action '%s' take effect"
% watchdog_action, logging.info)
if not utils_misc.wait_for(lambda: not session.is_responsive(),
response_timeout, 0, 1):
if watchdog_action == "none" or watchdog_action == "debug":
logging.info("OK, the guest session is responsive still")
else:
raise error.TestFail(
"Oops, seems action '%s' take no effect, ",
"guest is responsive" % watchdog_action)
# when action is poweroff or shutdown(without no-shutdown option), the vm
# will dead, and qemu exit.
# The others the vm monitor still responsive, can report the vm status.
if (watchdog_action == "poweroff" or (watchdog_action == "shutdown"
and params.get("disable_shutdown") != "yes")):
if not utils_misc.wait_for(lambda: vm.is_dead(),
response_timeout, 0, 1):
raise error.TestFail(
"Oops, seems '%s' action take no effect, ",
"guest is alive!" % watchdog_action)
else:
if watchdog_action == "pause":
f_param = "paused"
elif watchdog_action == "shutdown":
f_param = "shutdown"
else:
f_param = "running"
if not utils_misc.wait_for(
lambda: vm.monitor.verify_status(f_param),
response_timeout, 0, 1):
logging.debug("Monitor status is:%s" % vm.monitor.get_status())
raise error.TestFail(
"Oops, seems action '%s' take no effect, ",
"Wrong monitor status!" % watchdog_action)
# when the action is reset, need can relogin the guest.
if watchdog_action == "reset":
logging.info("Try to login the guest after reboot")
vm.wait_for_login(timeout=relogin_timeout)
logging.info("Watchdog action '%s' come into effect." %
watchdog_action)
# test case
def check_watchdog_support():
"""
check the host qemu-kvm support watchdog device
Test Step:
1. Send qemu command 'qemu -watchdog ?'
2. Check the watchdog type that the host support.
"""
qemu_binary = utils_misc.get_qemu_binary(params)
watchdog_type_check = params.get(
"watchdog_type_check", " -watchdog '?'")
qemu_cmd = qemu_binary + watchdog_type_check
# check the host support watchdog types.
error.context("Checking whether or not the host support WDT '%s'"
% watchdog_device_type, logging.info)
watchdog_device = utils.system_output("%s 2>&1" % qemu_cmd,
retain_output=True)
if watchdog_device:
if re.findall(watchdog_device_type, watchdog_device, re.I):
logging.info("The host support '%s' type watchdog device" %
watchdog_device_type)
else:
raise error.TestFail("Host not support watchdog device type %s "
% watchdog_device_type)
logging.info("The host support watchdog device type is: '%s'"
% watchdog_device)
else:
raise error.TestFail("No watchdog device support in the host!")
def guest_boot_with_watchdog():
"""
check the guest can boot with watchdog device
Test Step:
1. Boot guest with watchdog device
2. Check watchdog device have been initialized successfully in guest
"""
_watchdog_device_check(session, watchdog_device_type)
def watchdog_action_test():
"""
Watchdog action test
Test Step:
1. Boot guest with watchdog device
2. Check watchdog device have been initialized successfully in guest
3.Trigger wathchdog action through open /dev/watchdog
4.Ensure watchdog_action take effect.
"""
_watchdog_device_check(session, watchdog_device_type)
_trigger_watchdog(session, trigger_cmd)
_action_check(session, watchdog_action)
def magic_close_support():
"""
Magic close the watchdog action.
Test Step:
1. Boot guest with watchdog device
2. Check watchdog device have been initialized successfully in guest
3. Inside guest, trigger watchdog action"
4. Inside guest, before heartbeat expires, close this action"
5. Wait heartbeat timeout check the watchdog action deactive.
"""
response_timeout = int(params.get("response_timeout", '240'))
magic_cmd = params.get("magic_close_cmd", "echo V > /dev/watchdog")
_watchdog_device_check(session, watchdog_device_type)
_trigger_watchdog(session, trigger_cmd)
# magic close
error.context("Magic close is start", logging.info)
_trigger_watchdog(session, magic_cmd)
if utils_misc.wait_for(lambda: not session.is_responsive(),
response_timeout, 0, 1):
error_msg = "Oops,Watchdog action take effect, magic close FAILED"
raise error.TestFail(error_msg)
logging.info("Magic close take effect.")
def migration_when_wdt_timeout():
"""
Migration when WDT timeout
Test Step:
1. Boot guest with watchdog device
2. Check watchdog device have been initialized successfully in guest
3. Start VM with watchdog device, action reset|poweroff|pause
4. Inside RHEL guest, trigger watchdog
5. Before WDT timeout, do vm migration
6. After migration, check the watchdog action take effect
"""
mig_timeout = float(params.get("mig_timeout", "3600"))
mig_protocol = params.get("migration_protocol", "tcp")
mig_cancel_delay = int(params.get("mig_cancel") == "yes") * 2
_watchdog_device_check(session, watchdog_device_type)
_trigger_watchdog(session, trigger_cmd)
error.context("Do migration(protocol:%s),Watchdog have been triggered."
% mig_protocol, logging.info)
vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay)
_action_check(session, watchdog_action)
def hotplug_unplug_watchdog_device():
"""
Hotplug/unplug watchdog device
Test Step:
1. Start VM with "-watchdog-action pause" CLI option
2. Add WDT via monitor
3. Trigger watchdog action in guest
4. Remove WDT device through monitor cmd "device_del"
5. Resume and relogin the guest, check the device have been removed.
"""
session = vm.wait_for_login(timeout=timeout)
o = session.cmd_output("lspci")
if o:
wdt_pci_info = re.findall(".*6300ESB Watchdog Timer", o)
if wdt_pci_info:
raise error.TestFail("Can find watchdog pci")
plug_watchdog_device = params.get("plug_watchdog_device", "i6300esb")
watchdog_device_add = ("device_add driver=%s, id=%s"
% (plug_watchdog_device, "watchdog"))
watchdog_device_del = ("device_del id=%s" % "watchdog")
error.context("Hotplug watchdog device '%s'" % plug_watchdog_device,
logging.info)
vm.monitor.send_args_cmd(watchdog_device_add)
# wait watchdog device init
time.sleep(5)
_watchdog_device_check(session, plug_watchdog_device)
_trigger_watchdog(session, trigger_cmd)
_action_check(session, watchdog_action)
error.context("Hot unplug watchdog device", logging.info)
vm.monitor.send_args_cmd(watchdog_device_del)
error.context("Resume the guest, check the WDT have been removed",
logging.info)
vm.resume()
session = vm.wait_for_login(timeout=timeout)
o = session.cmd_output("lspci")
if o:
wdt_pci_info = re.findall(".*6300ESB Watchdog Timer", o)
if wdt_pci_info:
raise error.TestFail("Oops, find watchdog pci, unplug failed")
logging.info("The WDT remove successfully")
# main procedure
test_type = params.get("test_type")
error.context("'%s' test starting ... " % test_type, logging.info)
error.context("Boot VM with WDT(Device:'%s', Action:'%s'),and try to login"
% (watchdog_device_type, watchdog_action), logging.info)
params["start_vm"] = "yes"
env_process.preprocess_vm(test, params, env, params.get("main_vm"))
vm = env.get_vm(params["main_vm"])
session = vm.wait_for_login(timeout=timeout)
if (test_type in locals()):
test_running = locals()[test_type]
test_running()
else:
raise error.TestError("Oops test %s doesn't exist, have a check please."
% test_type)
| spiceqa/virt-test | qemu/tests/watchdog.py | Python | gpl-2.0 | 12,256 | 0.000408 |
def findBestShift(wordList, text):
text = "".join((char if char.isalpha() else " ") for char in text).split()
max_valid = 0
best_shift = 0
for shift in range(26):
num_valid = 0
for word in text:
plaintext = applyShift(word, shift)
if isWord(wordList, plaintext):
num_valid += 1
if num_valid > max_valid:
max_valid = num_valid
best_shift = shift
return best_shift
| iharsh234/MIT6.00x | pset6-P2-FindBestShift.py | Python | mit | 475 | 0 |
##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2018 Steve R <steversig@virginmedia.com>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
##
import sigrokdecode as srd
bitvals = ('0', '1', 'f', 'U')
def decode_bit(edges):
# Datasheet says long pulse is 3 times short pulse.
lmin = 2 # long min multiplier
lmax = 5 # long max multiplier
eqmin = 0.5 # equal min multiplier
eqmax = 1.5 # equal max multiplier
if ( # 0 -___-___
(edges[1] >= edges[0] * lmin and edges[1] <= edges[0] * lmax) and
(edges[2] >= edges[0] * eqmin and edges[2] <= edges[0] * eqmax) and
(edges[3] >= edges[0] * lmin and edges[3] <= edges[0] * lmax)):
return '0'
elif ( # 1 ---_---_
(edges[0] >= edges[1] * lmin and edges[0] <= edges[1] * lmax) and
(edges[0] >= edges[2] * eqmin and edges[0] <= edges[2] * eqmax) and
(edges[0] >= edges[3] * lmin and edges[0] <= edges[3] * lmax)):
return '1'
elif ( # float ---_-___
(edges[1] >= edges[0] * lmin and edges[1] <= edges[0] * lmax) and
(edges[2] >= edges[0] * lmin and edges[2] <= edges[0]* lmax) and
(edges[3] >= edges[0] * eqmin and edges[3] <= edges[0] * eqmax)):
return 'f'
else:
return 'U'
def pinlabels(bit_count):
if bit_count <= 6:
return 'A%i' % (bit_count - 1)
else:
return 'A%i/D%i' % (bit_count - 1, 12 - bit_count)
def decode_model(model, bits):
if model == 'maplin_l95ar':
address = 'Addr' # Address pins A0 to A5
for i in range(0, 6):
address += ' %i:' % (i + 1) + ('on' if bits[i][0] == '0' else 'off')
button = 'Button'
# Button pins A6/D5 to A11/D0
if bits[6][0] == '0' and bits[11][0] == '0':
button += ' A ON/OFF'
elif bits[7][0] == '0' and bits[11][0] == '0':
button += ' B ON/OFF'
elif bits[9][0] == '0' and bits[11][0] == '0':
button += ' C ON/OFF'
elif bits[8][0] == '0' and bits[11][0] == '0':
button += ' D ON/OFF'
else:
button += ' Unknown'
return ['%s' % address, bits[0][1], bits[5][2], \
'%s' % button, bits[6][1], bits[11][2]]
class Decoder(srd.Decoder):
api_version = 3
id = 'rc_encode'
name = 'RC encode'
longname = 'Remote control encoder'
desc = 'PT2262/HX2262/SC5262 remote control encoder protocol.'
license = 'gplv2+'
inputs = ['logic']
outputs = []
tags = ['IC', 'IR']
channels = (
{'id': 'data', 'name': 'Data', 'desc': 'Data line'},
)
annotations = (
('bit-0', 'Bit 0'),
('bit-1', 'Bit 1'),
('bit-f', 'Bit f'),
('bit-U', 'Bit U'),
('bit-sync', 'Bit sync'),
('pin', 'Pin'),
('code-word-addr', 'Code word address'),
('code-word-data', 'Code word data'),
)
annotation_rows = (
('bits', 'Bits', (0, 1, 2, 3, 4)),
('pins', 'Pins', (5,)),
('code-words', 'Code words', (6, 7)),
)
options = (
{'id': 'remote', 'desc': 'Remote', 'default': 'none',
'values': ('none', 'maplin_l95ar')},
)
def __init__(self):
self.reset()
def reset(self):
self.samplenumber_last = None
self.pulses = []
self.bits = []
self.labels = []
self.bit_count = 0
self.ss = None
self.es = None
self.state = 'IDLE'
def start(self):
self.out_ann = self.register(srd.OUTPUT_ANN)
self.model = self.options['remote']
def putx(self, data):
self.put(self.ss, self.es, self.out_ann, data)
def decode(self):
while True:
pin = self.wait({0: 'e'})
self.state = 'DECODING'
if not self.samplenumber_last: # Set counters to start of signal.
self.samplenumber_last = self.samplenum
self.ss = self.samplenum
continue
if self.bit_count < 12: # Decode A0 to A11.
self.bit_count += 1
for i in range(0, 4): # Get four pulses for each bit.
if i > 0:
pin = self.wait({0: 'e'}) # Get next 3 edges.
samples = self.samplenum - self.samplenumber_last
self.pulses.append(samples) # Save the pulse width.
self.samplenumber_last = self.samplenum
self.es = self.samplenum
self.bits.append([decode_bit(self.pulses), self.ss,
self.es]) # Save states and times.
idx = bitvals.index(decode_bit(self.pulses))
self.putx([idx, [decode_bit(self.pulses)]]) # Write decoded bit.
self.putx([5, [pinlabels(self.bit_count)]]) # Write pin labels.
self.pulses = []
self.ss = self.samplenum
else:
if self.model != 'none':
self.labels = decode_model(self.model, self.bits)
self.put(self.labels[1], self.labels[2], self.out_ann,
[6, [self.labels[0]]]) # Write model decode.
self.put(self.labels[4], self.labels[5], self.out_ann,
[7, [self.labels[3]]]) # Write model decode.
samples = self.samplenum - self.samplenumber_last
pin = self.wait({'skip': 8 * samples}) # Wait for end of sync bit.
self.es = self.samplenum
self.putx([4, ['Sync']]) # Write sync label.
self.reset() # Reset and wait for next set of pulses.
self.state = 'DECODE_TIMEOUT'
if not self.state == 'DECODE_TIMEOUT':
self.samplenumber_last = self.samplenum
| StefanBruens/libsigrokdecode | decoders/rc_encode/pd.py | Python | gpl-3.0 | 6,428 | 0.007156 |
#!/usr/bin/env python
# pylint: disable=R0903
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2016
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains a object that represents a Telegram
Message Parse Modes."""
class ParseMode(object):
"""This object represents a Telegram Message Parse Modes."""
MARKDOWN = 'Markdown'
HTML = 'HTML'
| franciscod/python-telegram-bot | telegram/parsemode.py | Python | gpl-2.0 | 1,054 | 0 |
#! /usr/bin/env python
from openturns import *
from math import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
# Instanciate one distribution object
dim = 1
meanPoint = NumericalPoint(dim, 1.0)
meanPoint[0] = 0.5
sigma = NumericalPoint(dim, 1.0)
sigma[0] = 2.0
R = CorrelationMatrix(dim)
distribution1 = Normal(meanPoint, sigma, R)
# Instanciate another distribution object
meanPoint[0] = -1.5
sigma[0] = 4.0
distribution2 = Normal(meanPoint, sigma, R)
# Test for sampling
size = 2000
nBars = 20
sample1 = distribution1.getSample(size)
sample2 = distribution2.getSample(size)
# Construct histograms
epsilon = 0.1
min1 = sample1.getMin()[0]
max1 = sample1.getMax()[0] + epsilon
min2 = sample2.getMin()[0]
max2 = sample2.getMax()[0] + epsilon
tmp = NumericalPoint(2)
tmp[0] = (max1 - min1) / nBars
data1 = NumericalSample(nBars, tmp)
tmp[0] = (max2 - min2) / nBars
data2 = NumericalSample(nBars, tmp)
for i in range(size):
index = long(floor((sample1[i, 0] - min1) / (max1 - min1) * nBars))
data1[index, 1] += 1
index = long(floor((sample2[i, 0] - min2) / (max2 - min2) * nBars))
data2[index, 1] += 1
# Create an empty graph
myGraph = Graph("Some barplots", "y", "frequency", True, "topleft")
# Create the first barplot
myBarPlot1 = BarPlot(data1, min1, "blue", "shaded", "dashed", "histogram1")
# Then, draw it
myGraph.add(myBarPlot1)
myGraph.draw("Graph_BarPlot_a_OT", 640, 480)
# Check that the correct files have been generated by computing their
# checksum
# Create the second barplot
myBarPlot2 = BarPlot(data2, min2, "red", "solid", "solid", "histogram2")
# Add it to the graph and draw everything
myGraph.add(myBarPlot2)
myGraph.draw("Graph_BarPlot_b_OT", 640, 480)
except:
import sys
print "t_BarPlot_std.py", sys.exc_type, sys.exc_value
| sofianehaddad/ot-svn | python/test/t_BarPlot_std.py | Python | mit | 1,975 | 0.000506 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0009_auto_20150729_1745'),
]
operations = [
migrations.AlterField(
model_name='areasoltura',
name='cpf',
field=models.CharField(max_length=11, verbose_name='CPF', null=True, blank=True),
)
]
| igor-rodrigues01/casv | casv/core/migrations/0010_auto_20150804_1030.py | Python | agpl-3.0 | 443 | 0.002257 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 7, transform = "Anscombe", sigma = 0.0, exog_count = 0, ar_order = 0); | antoinecarme/pyaf | tests/artificial/transf_Anscombe/trend_PolyTrend/cycle_7/ar_/test_artificial_32_Anscombe_PolyTrend_7__0.py | Python | bsd-3-clause | 261 | 0.088123 |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class crvserver_crpolicy_binding(base_resource) :
""" Binding class showing the crpolicy that can be bound to crvserver.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._hits = 0
self._name = ""
self._targetvserver = ""
self.___count = 0
@property
def priority(self) :
ur"""The priority for the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
ur"""The priority for the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policyname(self) :
ur"""Policies bound to this vserver.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
ur"""Policies bound to this vserver.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def name(self) :
ur"""Name of the cache redirection virtual server to which to bind the cache redirection policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the cache redirection virtual server to which to bind the cache redirection policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def targetvserver(self) :
ur"""Name of the virtual server to which content is forwarded. Applicable only if the policy is a map policy and the cache redirection virtual server is of type REVERSE.
"""
try :
return self._targetvserver
except Exception as e:
raise e
@targetvserver.setter
def targetvserver(self, targetvserver) :
ur"""Name of the virtual server to which content is forwarded. Applicable only if the policy is a map policy and the cache redirection virtual server is of type REVERSE.
"""
try :
self._targetvserver = targetvserver
except Exception as e:
raise e
@property
def hits(self) :
ur"""Number of hits.
"""
try :
return self._hits
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(crvserver_crpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.crvserver_crpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = crvserver_crpolicy_binding()
updateresource.name = resource.name
updateresource.policyname = resource.policyname
updateresource.targetvserver = resource.targetvserver
updateresource.priority = resource.priority
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [crvserver_crpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].policyname = resource[i].policyname
updateresources[i].targetvserver = resource[i].targetvserver
updateresources[i].priority = resource[i].priority
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = crvserver_crpolicy_binding()
deleteresource.name = resource.name
deleteresource.policyname = resource.policyname
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [crvserver_crpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].policyname = resource[i].policyname
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
ur""" Use this API to fetch crvserver_crpolicy_binding resources.
"""
try :
obj = crvserver_crpolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
ur""" Use this API to fetch filtered set of crvserver_crpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = crvserver_crpolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
ur""" Use this API to count crvserver_crpolicy_binding resources configued on NetScaler.
"""
try :
obj = crvserver_crpolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
ur""" Use this API to count the filtered set of crvserver_crpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = crvserver_crpolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class crvserver_crpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.crvserver_crpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.crvserver_crpolicy_binding = [crvserver_crpolicy_binding() for _ in range(length)]
| benfinke/ns_python | nssrc/com/citrix/netscaler/nitro/resource/config/cr/crvserver_crpolicy_binding.py | Python | apache-2.0 | 7,634 | 0.037595 |
__author__ = 'sfaci'
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
""" | OSAlt/secret-santa | santa_lib/__init__.py | Python | mit | 559 | 0.003578 |
"""
Test notifiers
"""
import unittest
from sickchill.oldbeard import db
from sickchill.oldbeard.notifiers.emailnotify import Notifier as EmailNotifier
from sickchill.oldbeard.notifiers.prowl import Notifier as ProwlNotifier
from sickchill.tv import TVEpisode, TVShow
from sickchill.views.home import Home
from tests import test_lib as test
# noinspection PyProtectedMember
class NotifierTests(test.SickChillTestDBCase):
"""
Test notifiers
"""
@classmethod
def setUpClass(cls):
num_legacy_shows = 3
num_shows = 3
num_episodes_per_show = 5
cls.mydb = db.DBConnection()
cls.legacy_shows = []
cls.shows = []
# Per-show-notifications were originally added for email notifications only. To add
# this feature to other notifiers, it was necessary to alter the way text is stored in
# one of the DB columns. Therefore, to test properly, we must create some shows that
# store emails in the old method (legacy method) and then other shows that will use
# the new method.
for show_counter in range(100, 100 + num_legacy_shows):
show = TVShow(1, show_counter)
show.name = "Show " + str(show_counter)
show.episodes = []
for episode_counter in range(0, num_episodes_per_show):
episode = TVEpisode(show, test.SEASON, episode_counter)
episode.name = "Episode " + str(episode_counter + 1)
episode.quality = "SDTV"
show.episodes.append(episode)
show.saveToDB()
cls.legacy_shows.append(show)
for show_counter in range(200, 200 + num_shows):
show = TVShow(1, show_counter)
show.name = "Show " + str(show_counter)
show.episodes = []
for episode_counter in range(0, num_episodes_per_show):
episode = TVEpisode(show, test.SEASON, episode_counter)
episode.name = "Episode " + str(episode_counter + 1)
episode.quality = "SDTV"
show.episodes.append(episode)
show.saveToDB()
cls.shows.append(show)
def setUp(self):
"""
Set up tests
"""
self._debug_spew("\n\r")
@unittest.skip('Not yet implemented')
def test_boxcar(self):
"""
Test boxcar notifications
"""
pass
@unittest.skip('Cannot call directly without a request')
def test_email(self):
"""
Test email notifications
"""
email_notifier = EmailNotifier()
# Per-show-email notifications were added early on and utilized a different format than the other notifiers.
# Therefore, to test properly (and ensure backwards compatibility), this routine will test shows that use
# both the old and the new storage methodology
legacy_test_emails = "email-1@address.com,email2@address.org,email_3@address.tv"
test_emails = "email-4@address.com,email5@address.org,email_6@address.tv"
for show in self.legacy_shows:
showid = self._get_showid_by_showname(show.show_name)
self.mydb.action("UPDATE tv_shows SET notify_list = ? WHERE show_id = ?", [legacy_test_emails, showid])
for show in self.shows:
showid = self._get_showid_by_showname(show.show_name)
Home.saveShowNotifyList(show=showid, emails=test_emails)
# Now, iterate through all shows using the email list generation routines that are used in the notifier proper
shows = self.legacy_shows + self.shows
for show in shows:
for episode in show.episodes:
ep_name = episode._format_pattern('%SN - %Sx%0E - %EN - ') + episode.quality
show_name = email_notifier._parseEp(ep_name)
recipients = email_notifier._generate_recipients(show_name)
self._debug_spew("- Email Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:")
for email in recipients:
self._debug_spew("-- " + email.strip())
self._debug_spew("\n\r")
return True
@unittest.skip('Not yet implemented')
def test_emby(self):
"""
Test emby notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_freemobile(self):
"""
Test freemobile notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_growl(self):
"""
Test growl notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_kodi(self):
"""
Test kodi notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_libnotify(self):
"""
Test libnotify notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_nma(self):
"""
Test nma notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_nmj(self):
"""
Test nmj notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_nmjv2(self):
"""
Test nmjv2 notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_plex(self):
"""
Test plex notifications
"""
pass
@unittest.skip('Cannot call directly without a request')
def test_prowl(self):
"""
Test prowl notifications
"""
prowl_notifier = ProwlNotifier()
# Prowl per-show-notifications only utilize the new methodology for storage; therefore, the list of legacy_shows
# will not be altered (to preserve backwards compatibility testing)
test_prowl_apis = "11111111111111111111,22222222222222222222"
for show in self.shows:
showid = self._get_showid_by_showname(show.show_name)
Home.saveShowNotifyList(show=showid, prowlAPIs=test_prowl_apis)
# Now, iterate through all shows using the Prowl API generation routines that are used in the notifier proper
for show in self.shows:
for episode in show.episodes:
ep_name = episode._format_pattern('%SN - %Sx%0E - %EN - ') + episode.quality
show_name = prowl_notifier._parse_episode(ep_name)
recipients = prowl_notifier._generate_recipients(show_name)
self._debug_spew("- Prowl Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:")
for api in recipients:
self._debug_spew("-- " + api.strip())
self._debug_spew("\n\r")
return True
@unittest.skip('Not yet implemented')
def test_pushalot(self):
"""
Test pushalot notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_pushbullet(self):
"""
Test pushbullet notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_pushover(self):
"""
Test pushover notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_pytivo(self):
"""
Test pytivo notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_synoindex(self):
"""
Test synoindex notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_synologynotifier(self):
"""
Test synologynotifier notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_trakt(self):
"""
Test trakt notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_tweet(self):
"""
Test tweet notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_twilio(self):
"""
Test twilio notifications
"""
pass
@staticmethod
def _debug_spew(text):
"""
Spew text notifications
:param text: to spew
:return:
"""
if __name__ == '__main__' and text is not None:
print(text)
def _get_showid_by_showname(self, showname):
"""
Get show ID by show name
:param showname:
:return:
"""
if showname is not None:
rows = self.mydb.select("SELECT show_id FROM tv_shows WHERE show_name = ?", [showname])
if len(rows) == 1:
return rows[0]['show_id']
return -1
if __name__ == '__main__':
print("==================")
print("STARTING - NOTIFIER TESTS")
print("==================")
print("######################################################################")
SUITE = unittest.TestLoader().loadTestsFromTestCase(NotifierTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| Vagab0nd/SiCKRAGE | tests/notifier_tests.py | Python | gpl-3.0 | 9,061 | 0.002097 |
## A script for extracting info about the patients used in the analysis
## Load necessary modules
from rpy2 import robjects as ro
import numpy as np
import os
ro.r('library(survival)')
import re
##This call will only work if you are running python from the command line.
##If you are not running from the command line manually type in your paths.
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
f=open(os.path.join(BASE_DIR,'tcga_data','COAD','clinical','nationwidechildrens.org_clinical_follow_up_v1.0_coad.txt'))
##get the column indexes needed
columns=f.readline().split('\t')
patient_column=columns.index('bcr_patient_barcode')
alive_column=columns.index('last_contact_days_to')
death_column=columns.index('death_days_to')
f.readline()
f.readline()
data=[i.split('\t') for i in f]
## A patient can be listed multiple times in the file. The most recent listing (furthest down in the file), contains the most recent
## follow up data. This code checks if the patient has already been loaded into the list, and if so, takes the more recent data.
## This required an empty value in the list initialization.
## Data is: [[Patient ID, time(days), Vital status],[Patient ID, time(days), Vital status],...]
clinical1=[['','','']]
for i in data:
if clinical1[-1][0]==i[patient_column]:
if re.search('^[0-9]+$',i[death_column]):
clinical1[-1]=[i[patient_column],int(i[death_column]),'Dead']
elif re.search('^[0-9]+$',i[alive_column]):
clinical1[-1]=[i[patient_column],int(i[alive_column]),'Alive']
else:
pass
else:
if re.search('^[0-9]+$',i[death_column]):
clinical1.append([i[patient_column],int(i[death_column]),'Dead'])
elif re.search('^[0-9]+$',i[alive_column]):
clinical1.append([i[patient_column],int(i[alive_column]),'Alive'])
else:
pass
## Removing the empty value.
clinical=clinical1[1:]
## Sex and age information were taken from the "clinical_patient" file. A dictionary was created for sex.
more_clinical={}
sex_dict={}
sex_dict['MALE']=0
sex_dict['FEMALE']=1
## The "clinical_patient" file can also contain patients not listed in the follow_up files.
## In these cases the clinical data for these patients gets appended to a new clinical list.
f=open(os.path.join(BASE_DIR,'tcga_data','COAD','clinical','nationwidechildrens.org_clinical_patient_coad.txt'))
##get the column indexes needed
columns=f.readline().split('\t')
sex_column=columns.index('gender')
age_column=columns.index('age_at_initial_pathologic_diagnosis')
patient_column=columns.index('bcr_patient_barcode')
alive_column=columns.index('last_contact_days_to')
death_column=columns.index('death_days_to')
f.readline()
f.readline()
clinical4=[]
data=[i.split('\t') for i in f]
for i in data:
try:
more_clinical[i[patient_column]]=[0,sex_dict[i[sex_column]],int(i[age_column])]
if re.search('^[0-9]+$',i[death_column]):
clinical4.append([i[patient_column],int(i[death_column]),'Dead'])
elif re.search('^[0-9]+$',i[alive_column]):
clinical4.append([i[patient_column],int(i[alive_column]),'Alive'])
else:
pass
except:
pass
new_clinical=[]
##It is possible that the clinical data in the clinical_patient file is more up to date than the follow_up files
##All the clinical data is merged checking which data is the most up to date
for i in clinical4:
if i[0] not in [j[0] for j in clinical]:
new_clinical.append(i)
else:
if i[1]<=clinical[[j[0] for j in clinical].index(i[0])][1]:
new_clinical.append(clinical[[j[0] for j in clinical].index(i[0])])
else:
new_clinical.append(i)
##also do the reverse since clinical can contain patients not included in clinical4
for i in clinical:
if i[0] not in [j[0] for j in new_clinical]:
new_clinical.append(i)
## only patients who had a follow up time greater than 0 days are included in the analysis
clinical=[i for i in new_clinical if i[1]>0]
final_clinical=[]
## A new list containing both follow up times and sex and age is constructed.
## Only patients with sex and age information are included.
## Data is [[Patient ID, time (days), vital status, 0, sex, age at diagnosis],...]
for i in clinical:
if i[0] in more_clinical:
final_clinical.append(i+more_clinical[i[0]])
##In a separate script I parsed the mitranscriptome.expr.counts.tsv file and extracted the COAD patient and expression values.
##From this file I will load the expression data.
##There are duplicated transcripts and the possibility of a patient having multiple sequencing files.
f=open(os.path.join(BASE_DIR,'tcga_data','COAD','lncrna','COAD.txt'))
##patient list is at the top of the file
patients=f.readline().strip().split()
##create a dictionary mapping patient to all of their lncrna expression data
patient_dict={}
for index, i in enumerate(patients):
patient_dict[i[:12]]=''
##find which patients have complete clinical data, order the data, and average data if necessary
##it's possible there are expression data for patients without clinical data, and clinical data without expression data
##create a new clinical list called clinical_and_files for consistency with previous scripts
clinical_and_files=[]
for i in final_clinical:
if i[0] in patient_dict:
clinical_and_files.append(i)
##print average age at diagnosis
age=np.mean([i[5] for i in clinical_and_files])
##print number of males
males=len([i for i in clinical_and_files if i[4]==0])
##print number of females
females=len([i for i in clinical_and_files if i[4]==1])
##to get the median survival we need to call survfit from r
##prepare variables for R
ro.globalenv['times']=ro.IntVector([i[1] for i in clinical_and_files])
##need to create a dummy variable group
ro.globalenv['group']=ro.IntVector([0 for i in clinical_and_files])
##need a vector for deaths
death_dic={}
death_dic['Alive']=0
death_dic['Dead']=1
ro.globalenv['died']=ro.IntVector([death_dic[i[2]] for i in clinical_and_files])
res=ro.r('survfit(Surv(times,died) ~ as.factor(group))')
#the number of events(deaths) is the fourth column of the output
deaths=str(res).split('\n')[-2].strip().split()[3]
#the median survival time is the fifth column of the output
median=str(res).split('\n')[-2].strip().split()[4]
##write data to a file
f=open('patient_info.txt','w')
f.write('Average Age')
f.write('\t')
f.write('Males')
f.write('\t')
f.write('Females')
f.write('\t')
f.write('Deaths')
f.write('\t')
f.write('Median Survival')
f.write('\n')
f.write(str(age))
f.write('\t')
f.write(str(males))
f.write('\t')
f.write(str(females))
f.write('\t')
f.write(deaths)
f.write('\t')
f.write(median)
f.close()
| OmnesRes/onco_lnc | lncrna/cox/COAD/patient_info.py | Python | mit | 6,839 | 0.021787 |
# Copyright (c) 2018 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
from typing import Optional, Dict, Any
class PluginObject:
"""Base class for objects that can be provided by a plugin.
This class should be inherited by any class that can be provided
by a plugin. Its only function is to serve as a mapping between
the plugin and the object.
"""
def __init__(self, *args, **kwags) -> None:
self._plugin_id = None # type: Optional[str]
self._version = None # type: Optional[str]
self._metadata = {} # type: Dict[str, Any]
self._name = None # type: Optional[str]
# This returns a globally unique id for this plugin object.
# It prepends it's set name (which should be locally (eg; within the plugin) unique) with the plugin_id, making it
# globally unique.
def getId(self) -> str:
result = self.getPluginId()
if self._name:
result += "_%s" % self._name
return result
def setPluginId(self, plugin_id: str) -> None:
self._plugin_id = plugin_id
# The metadata of the plugin is set at the moment it is loaded.
def setMetaData(self, metadata: Dict[str, Any]) -> None:
self._metadata = metadata
def getMetaData(self) -> Dict[str, Any]:
return self._metadata
def getPluginId(self) -> str:
if not self._plugin_id:
raise ValueError("The plugin ID needs to be set before the plugin can be used")
return self._plugin_id
def setVersion(self, version: str) -> None:
self._version = version
def getVersion(self) -> str:
if not self._version:
raise ValueError("The plugin version needs to be set before the plugin can be used")
return self._version
| Ultimaker/Uranium | UM/PluginObject.py | Python | lgpl-3.0 | 1,812 | 0.002208 |
from cherrypy.process.plugins import PIDFile
import argparse
import cherrypy
from PressUI.cherrypy.PressConfig import PressConfig
import PressUI.cherrypy.PressProduction as PressProduction
parser = argparse.ArgumentParser()
parser.add_argument(
'--production',
help = 'Run app in production mode',
action = 'store_true',
)
parser.add_argument(
'--port',
help = 'Run app on this port (defaults to %(default)d)',
default = 8080,
)
parser.add_argument(
'config',
help = 'Path to config file for this app',
)
def quickstart(app, app_name, fun_callback = None):
args = parser.parse_args()
PressConfig.init(args.config)
if fun_callback is not None:
fun_callback()
cherrypy.config.update({
'server.socket_port': args.port,
'server.socket_host': '127.0.0.1',
'tools.gzip.on': True,
})
if args.production:
cherrypy.config.update({
'environment': 'production',
'tools.proxy.on': True,
'log.access_file': '/tmp/{}.access.log'.format(app_name),
'log.error_file': '/tmp/{}.error.log'.format(app_name),
})
PIDFile(
cherrypy.engine,
'/tmp/{}.pid'.format(app_name),
).subscribe()
PressProduction.set_production(True)
cherrypy.quickstart(app())
| maarons/pressui | cherrypy/server.py | Python | mit | 1,340 | 0.009701 |
# copyright: (c) 2012 by Hansel Dunlop.
# license: ISC, see LICENSE for more details.
#
# secret keys for virtual server providers
# Users of this package will need to add their
# keys to this file for it to work
#
AWS_ACCESS_KEY_ID = ''
AWS_SECRET_ACCESS_KEY = ''
RACKSPACE_USERNAME = ''
RACKSPACE_API_KEY = ''
KEY_FILENAME = ''
| aychedee/kubrick | kubrick/secrets.py | Python | isc | 333 | 0 |
from django import template as template_
from django.conf import settings
from django.utils.safestring import mark_safe
from extras.plugins import PluginTemplateExtension
from extras.registry import registry
register = template_.Library()
def _get_registered_content(obj, method, template_context):
"""
Given an object and a PluginTemplateExtension method name and the template context, return all the
registered content for the object's model.
"""
html = ''
context = {
'object': obj,
'request': template_context['request'],
'settings': template_context['settings'],
'csrf_token': template_context['csrf_token'],
'perms': template_context['perms'],
}
model_name = obj._meta.label_lower
template_extensions = registry['plugin_template_extensions'].get(model_name, [])
for template_extension in template_extensions:
# If the class has not overridden the specified method, we can skip it (because we know it
# will raise NotImplementedError).
if getattr(template_extension, method) == getattr(PluginTemplateExtension, method):
continue
# Update context with plugin-specific configuration parameters
plugin_name = template_extension.__module__.split('.')[0]
context['config'] = settings.PLUGINS_CONFIG.get(plugin_name, {})
# Call the method to render content
instance = template_extension(context)
content = getattr(instance, method)()
html += content
return mark_safe(html)
@register.simple_tag(takes_context=True)
def plugin_buttons(context, obj):
"""
Render all buttons registered by plugins
"""
return _get_registered_content(obj, 'buttons', context)
@register.simple_tag(takes_context=True)
def plugin_left_page(context, obj):
"""
Render all left page content registered by plugins
"""
return _get_registered_content(obj, 'left_page', context)
@register.simple_tag(takes_context=True)
def plugin_right_page(context, obj):
"""
Render all right page content registered by plugins
"""
return _get_registered_content(obj, 'right_page', context)
@register.simple_tag(takes_context=True)
def plugin_full_width_page(context, obj):
"""
Render all full width page content registered by plugins
"""
return _get_registered_content(obj, 'full_width_page', context)
| digitalocean/netbox | netbox/extras/templatetags/plugins.py | Python | apache-2.0 | 2,412 | 0.001658 |
# -*- coding: utf-8 -*-
"""
TeleMir developpement version with fake acquisition device
lancer dans un terminal :
python examples/test_osc_receive.py
"""
from pyacq import StreamHandler, FakeMultiSignals
from pyacq.gui import Oscilloscope, Oscilloscope_f, TimeFreq, TimeFreq2
from TeleMir.gui import Topoplot, KurtosisGraphics, freqBandsGraphics, spaceShipLauncher, Topoplot_imp
from TeleMir.gui import ScanningOscilloscope,SpectrumGraphics
from TeleMir.analyses import TransmitFeatures
#from TeleMir.example import test_osc_receive
import msgpack
#~ import gevent
#~ import zmq.green as zmq
from PyQt4 import QtCore,QtGui
#from multiprocessing import Process
import zmq
import msgpack
import time
import numpy as np
import os
def teleMir_CB():
streamhandler = StreamHandler()
# Configure and start
#~ dev = FakeMultiSignals(streamhandler = streamhandler)
#~ dev.configure( #name = 'Test dev',
#~ nb_channel = 14,
#~ sampling_rate =128.,
#~ buffer_length = 10.,
#~ packet_size = 1,
#~ )
#~ dev.initialize()
#~ dev.start()
filename = '/home/ran/Projets/pyacq_emotiv_recording/alex/Emotiv Systems Pty Ltd #SN201105160008860.raw'
#filename = '/home/ran/Projets/pyacq_emotiv_recording/caro/Emotiv Systems Pty Ltd #SN201105160008860.raw'
#filename = '/home/mini/pyacq_emotiv_recording/simple_blink/Emotiv Systems Pty Ltd #SN201105160008860.raw'
filenameImp = '/home/ran/Projets/EEG_recordings/anneLise/Emotiv Systems Pty Ltd #SN200709276578911.raw'
filenameXY = '/home/ran/Projets/EEG_recordings/anneLise/Emotiv Systems Pty Ltd #SN200709276578912.raw'
precomputed = np.fromfile(filename , dtype = np.float32).reshape(-1, 14).transpose()
precomputedImp = np.fromfile(filenameImp , dtype = np.float32).reshape(-1, 14).transpose()
precomputedXY = np.fromfile(filenameXY , dtype = np.float32).reshape(-1, 2).transpose()
# Configure and start signal
dev = FakeMultiSignals(streamhandler = streamhandler)
dev.configure( #name = 'Test dev',
nb_channel = 14,
sampling_rate =128.,
buffer_length = 30.,
packet_size = 1,
precomputed = precomputed,
)
dev.initialize()
dev.start()
#~ # Configure and start imp
#~ devImp = FakeMultiSignals(streamhandler = streamhandler)
#~ devImp.configure( #name = 'Test dev',
#~ nb_channel = 14,
#~ sampling_rate =128.,
#~ buffer_length = 30.,
#~ packet_size = 1,
#~ precomputed = precomputedImp,
#~ )
#~ devImp.initialize()
#~ devImp.start()
# Configure and start gyroXY
devXY = FakeMultiSignals(streamhandler = streamhandler)
devXY.configure( #name = 'Test dev',
nb_channel = 2,
sampling_rate =128.,
buffer_length = 30.,
packet_size = 1,
precomputed = precomputedXY,
)
devXY.initialize()
devXY.start()
## Configure and start output stream (for extracted feature)
fout = TransmitFeatures(streamhandler = streamhandler)
fout.configure( #name = 'Test fout',
nb_channel = 14, # np.array([1:5])
nb_feature = 6,
nb_pts = 128,
sampling_rate =10.,
buffer_length = 10.,
packet_size = 1,
)
fout.initialize(stream_in = dev.streams[0], stream_xy = devXY.streams[0])
fout.start()
#Osc server
#p = Process(target=., args=('bob',))
#color = 'summer'
# Bleu
#color = 'jet'
# Rouge
color = 'hot'
# vert/jaune
#color = 'summer'
app = QtGui.QApplication([])
# Impedances
w_imp=Topoplot_imp(stream = dev.streams[0], type_Topo= 'imp')
w_imp.show()
# freqbands
w_sp_bd=freqBandsGraphics(stream = dev.streams[0], interval_length = 3., channels = [12])
w_sp_bd.run()
# signal
w_oscilo=Oscilloscope(stream = dev.streams[0])
w_oscilo.show()
w_oscilo.set_params(xsize = 10, mode = 'scroll')
w_oscilo.auto_gain_and_offset(mode = 2)
w_oscilo.gain_zoom(100)
#w_oscilo.set_params(colors = 'jet')
select_chan = np.ones(14, dtype = bool)
w_oscilo.automatic_color(cmap_name = 'jet', selected = select_chan)
# parametres
w_feat1=Oscilloscope_f(stream = fout.streams[0])
w_feat1.show()
w_feat1.set_params(colormap = color)
#w_feat1.auto_gain_and_offset(mode = 1)
#w_feat1.set_params(xsize = 10, mode = 'scroll')
#~ select_feat = np.ones(6, dtype = bool)
#~ # print select
#~ #w_oscilo.set_params(colormap = 'automn', selected = select)
#~ w_feat1.automatic_color(cmap_name = 'jet', selected = select_feat)
w_feat1.showFullScreen()
w_feat1.set_params(xsize = 10, mode = 'scroll')
#~ select_feat = np.ones(4, dtype = bool)
#~ w_feat1.automatic_color(cmap_name = 'jet', selected = select_feat)
# topographie
w_topo=Topoplot(stream = dev.streams[0], type_Topo= 'topo')
w_topo.show()
# temps frequence 1
w_Tf=TimeFreq(stream = dev.streams[0])
w_Tf.show()
w_Tf.set_params(xsize = 10)
w_Tf.change_param_tfr(f_stop = 45, f0 = 1)
w_Tf.set_params(colormap = color)
#w_Tf.clim_changed(20)
#w_Tf.change_param_channel(clim = 20)
# temps frequence 2
w_Tf2=TimeFreq2(stream = dev.streams[0])
w_Tf2.show()
w_Tf2.set_params(xsize = 10)
w_Tf2.change_param_tfr(f_stop = 45, f0 = 1)
w_Tf2.set_params(colormap = color)
# kurtosis
#w_ku=KurtosisGraphics(stream = dev.streams[0], interval_length = 1.)
#w_ku.run()
## Bien moins fluide
# Spectre
#~ w_sp=SpectrumGraphics(dev.streams[0],3.,channels=[11,12])
#~ w_sp.run()
w1 = spaceShipLauncher(dev.streams[0])
w1.run()
w1.showFullScreen()
app.exec_()
# Stope and release the device
fout.stop()
fout.close()
print 'ici'
dev.stop()
dev.close()
print 'ici'
devXY.stop()
devXY.close()
print 'ici'
devImp.stop()
devImp.close()
print 'ici'
if __name__ == '__main__':
teleMir_CB()
| Hemisphere-Project/Telemir-DatabitMe | Telemir-EEG/TeleMir_171013/Fake_TeleMir_CB.py | Python | gpl-2.0 | 6,881 | 0.034443 |
"""
This file re-creates the major DFXML classes with an emphasis on type safety, serializability, and de-serializability.
With this module, reading disk images or DFXML files is done with the parse or iterparse functions. Writing DFXML files can be done with the DFXMLObject.print_dfxml function.
"""
__version__ = "0.4.5"
#Remaining roadmap to 1.0.0:
# * Documentation.
# * User testing.
# * Compatibility with the DFXML schema, version >1.1.1.
import logging
import re
import copy
import xml.etree.ElementTree as ET
import subprocess
import dfxml
import os
import sys
import struct
_logger = logging.getLogger(os.path.basename(__file__))
#Contains: (namespace, local name) qualified XML element name pairs
_warned_elements = set([])
_warned_byterun_attribs = set([])
#Contains: Unexpected 'facet' values on byte_runs elements.
_warned_byterun_facets = set([])
#Issue some log statements only once per program invocation.
_nagged_alloc = False
_warned_byterun_badtypecomp = False
XMLNS_REGXML = "http://www.forensicswiki.org/wiki/RegXML"
def _ET_tostring(e):
"""Between Python 2 and 3, there are some differences in the ElementTree library's tostring() behavior. One, the method balks at the "unicode" encoding in 2. Two, in 2, the XML prototype's output with every invocation. This method serves as a wrapper to deal with those issues."""
if sys.version_info[0] < 3:
tmp = ET.tostring(e, encoding="UTF-8")
if tmp[0:2] == "<?":
#Trim away first line; it's an XML prototype. This only appears in Python 2's ElementTree output.
return tmp[ tmp.find("?>\n")+3 : ]
else:
return tmp
else:
return ET.tostring(e, encoding="unicode")
def _boolcast(val):
"""Takes Boolean values, and 0 or 1 in string or integer form, and casts them all to Boolean. Preserves nulls. Balks at everything else."""
if val is None:
return None
if val in [True, False]:
return val
_val = val
if val in ["0", "1"]:
_val = int(val)
if _val in [0, 1]:
return _val == 1
_logger.debug("val = " + repr(val))
raise ValueError("Received a not-straightforwardly-Boolean value. Expected some form of 0, 1, True, or False.")
def _bytecast(val):
"""Casts a value as a byte string. If a character string, assumes a UTF-8 encoding."""
if val is None:
return None
if isinstance(val, bytes):
return val
return _strcast(val).encode("utf-8")
def _intcast(val):
"""Casts input integer or string to integer. Preserves nulls. Balks at everything else."""
if val is None:
return None
if isinstance(val, int):
return val
if isinstance(val, str):
if val[0] == "-":
if val[1:].isdigit():
return int(val)
else:
if val.isdigit():
return int(val)
_logger.debug("val = " + repr(val))
raise ValueError("Received a non-int-castable value. Expected an integer or an integer as a string.")
def _read_differential_annotations(annodict, element, annoset):
"""
Uses the shorthand-to-attribute mappings of annodict to translate attributes of element into annoset.
"""
#_logger.debug("annoset, before: %r." % annoset)
#Start with inverting the dictionary
_d = { annodict[k].replace("delta:",""):k for k in annodict }
#_logger.debug("Inverted dictionary: _d = %r" % _d)
for attr in element.attrib:
#_logger.debug("Looking for differential annotations: %r" % element.attrib)
(ns, an) = _qsplit(attr)
if an in _d and ns == dfxml.XMLNS_DELTA:
#_logger.debug("Found; adding %r." % _d[an])
annoset.add(_d[an])
#_logger.debug("annoset, after: %r." % annoset)
def _qsplit(tagname):
"""Requires string input. Returns namespace and local tag name as a pair. I could've sworn this was a basic implementation gimme, but ET.QName ain't it."""
_typecheck(tagname, str)
if tagname[0] == "{":
i = tagname.rfind("}")
return ( tagname[1:i], tagname[i+1:] )
else:
return (None, tagname)
def _strcast(val):
if val is None:
return None
return str(val)
def _typecheck(obj, classinfo):
if not isinstance(obj, classinfo):
_logger.info("obj = " + repr(obj))
if isinstance(classinfo, tuple):
raise TypeError("Expecting object to be one of the types %r." % (classinfo,))
else:
raise TypeError("Expecting object to be of type %r." % classinfo)
class DFXMLObject(object):
def __init__(self, *args, **kwargs):
self.command_line = kwargs.get("command_line")
self.version = kwargs.get("version")
self.sources = kwargs.get("sources", [])
self.dc = kwargs.get("dc", dict())
self.externals = kwargs.get("externals", OtherNSElementList())
self._namespaces = dict()
self._volumes = []
self._files = []
input_volumes = kwargs.get("volumes") or []
input_files = kwargs.get("files") or []
for v in input_volumes:
self.append(v)
for f in input_files:
self.append(f)
#Add default namespaces
self.add_namespace("", dfxml.XMLNS_DFXML)
self.add_namespace("dc", dfxml.XMLNS_DC)
def __iter__(self):
"""Yields all VolumeObjects, recursively their FileObjects, and the FileObjects directly attached to this DFXMLObject, in that order."""
for v in self._volumes:
yield v
for f in v:
yield f
for f in self._files:
yield f
def add_namespace(self, prefix, url):
self._namespaces[prefix] = url
ET.register_namespace(prefix, url)
def append(self, value):
if isinstance(value, VolumeObject):
self._volumes.append(value)
elif isinstance(value, FileObject):
self._files.append(value)
else:
_logger.debug("value = %r" % value)
raise TypeError("Expecting a VolumeObject or a FileObject. Got instead this type: %r." % type(value))
def iter_namespaces(self):
"""Yields (prefix, url) pairs of each namespace registered in this DFXMLObject."""
for prefix in self._namespaces:
yield (prefix, self._namespaces[prefix])
def populate_from_Element(self, e):
if "version" in e.attrib:
self.version = e.attrib["version"]
for ce in e.findall(".//*"):
(cns, cln) = _qsplit(ce.tag)
if cln == "command_line":
self.command_line = ce.text
elif cln == "image_filename":
self.sources.append(ce.text)
elif cns not in [dfxml.XMLNS_DFXML, ""]:
#Put all non-DFXML-namespace elements into the externals list.
self.externals.append(ce)
def print_dfxml(self, output_fh=sys.stdout):
"""Memory-efficient DFXML document printer. However, it assumes the whole element tree is already constructed."""
pe = self.to_partial_Element()
dfxml_wrapper = _ET_tostring(pe)
dfxml_foot = "</dfxml>"
#Check for an empty element
if dfxml_wrapper.strip()[-3:] == " />":
dfxml_head = dfxml_wrapper.strip()[:-3] + ">"
elif dfxml_wrapper.strip()[-2:] == "/>":
dfxml_head = dfxml_wrapper.strip()[:-2] + ">"
else:
dfxml_head = dfxml_wrapper.strip()[:-len(dfxml_foot)]
output_fh.write("""<?xml version="1.0"?>\n""")
output_fh.write(dfxml_head)
output_fh.write("\n")
_logger.debug("Writing %d volume objects." % len(self._volumes))
for v in self._volumes:
v.print_dfxml(output_fh)
output_fh.write("\n")
_logger.debug("Writing %d file objects." % len(self._files))
for f in self._files:
e = f.to_Element()
output_fh.write(_ET_tostring(e))
output_fh.write("\n")
output_fh.write(dfxml_foot)
output_fh.write("\n")
def to_Element(self):
outel = self.to_partial_Element()
for e in self.externals:
outel.append(e)
for v in self._volumes:
tmpel = v.to_Element()
outel.append(tmpel)
for f in self._files:
tmpel = f.to_Element()
outel.append(tmpel)
return outel
def to_dfxml(self):
"""Serializes the entire DFXML document tree into a string. Then returns that string. RAM-intensive. Most will want to use print_dfxml() instead"""
return _ET_tostring(self.to_Element())
def to_partial_Element(self):
outel = ET.Element("dfxml")
tmpel0 = ET.Element("metadata")
for key in sorted(self.dc):
_typecheck(key, str)
if ":" in key:
raise ValueError("Dublin Core key-value entries should have keys without the colon character. If this causes an interesting namespace issue for you, please report it as a bug.")
tmpel1 = ET.Element("dc:" + key)
tmpel1.text = self.dc[key]
tmpel0.append(tmpel1)
outel.append(tmpel0)
if self.command_line:
tmpel0 = ET.Element("creator")
tmpel1 = ET.Element("execution_environment")
tmpel2 = ET.Element("command_line")
tmpel2.text = self.command_line
tmpel1.append(tmpel2)
tmpel0.append(tmpel1)
outel.append(tmpel0)
if len(self.sources) > 0:
tmpel0 = ET.Element("source")
for source in self.sources:
tmpel1 = ET.Element("image_filename")
tmpel1.text = source
tmpel0.append(tmpel1)
outel.append(tmpel0)
if self.version:
outel.attrib["version"] = self.version
#Apparently, namespace setting is only available with the write() function, which is memory-impractical for significant uses of DFXML.
#Ref: http://docs.python.org/3.3/library/xml.etree.elementtree.html#xml.etree.ElementTree.ElementTree.write
for prefix in self._namespaces:
attrib_name = "xmlns"
if prefix != "":
attrib_name += ":" + prefix
outel.attrib[attrib_name] = self._namespaces[prefix]
return outel
@property
def command_line(self):
return self._command_line
@command_line.setter
def command_line(self, value):
self._command_line = _strcast(value)
@property
def dc(self):
"""The Dublin Core dictionary of key-value pairs for this document. Typically, "type" is "Hash List", or "Disk Image". Keys should be strings not containing colons, values should be strings. If this causes an issue for you, please report it as a bug."""
return self._dc
@dc.setter
def dc(self, value):
_typecheck(value, dict)
self._dc = value
@property
def externals(self):
"""(This property behaves the same as FileObject.externals.)"""
return self._externals
@externals.setter
def externals(self, val):
_typecheck(val, OtherNSElementList)
self._externals = val
@property
def files(self):
"""List of file objects directly attached to this DFXMLObject. No setter for now."""
return self._files
@property
def namespaces(self):
raise AttributeError("The namespaces dictionary should not be directly accessed; instead, use .iter_namespaces().")
@property
def sources(self):
return self._sources
@sources.setter
def sources(self, value):
if not value is None:
_typecheck(value, list)
self._sources = value
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = _strcast(value)
@property
def volumes(self):
"""List of volume objects directly attached to this DFXMLObject. No setter for now."""
return self._volumes
class RegXMLObject(object):
def __init__(self, *args, **kwargs):
self.command_line = kwargs.get("command_line")
self.interpreter = kwargs.get("interpreter")
self.metadata = kwargs.get("metadata")
self.program = kwargs.get("program")
self.program_version = kwargs.get("program_version")
self.sources = kwargs.get("sources", [])
self.version = kwargs.get("version")
self._hives = []
self._cells = []
self._namespaces = dict()
input_hives = kwargs.get("hives") or [] # In case kwargs["hives"] = None.
input_cells = kwargs.get("cells") or []
for hive in input_hives:
self.append(hive)
for cell in input_cells:
self.append(cells)
#Add default namespaces
#TODO This will cause a problem when the Objects bindings are used for a DFXML document and RegXML document in the same program.
self.add_namespace("", XMLNS_REGXML)
def __iter__(self):
"""Yields all HiveObjects, recursively their CellObjects, and the CellObjects directly attached to this RegXMLObject, in that order."""
for h in self._hives:
yield h
for c in h:
yield c
for c in self._cells:
yield c
def add_namespace(self, prefix, url):
self._namespaces[prefix] = url
ET.register_namespace(prefix, url)
def append(self, value):
if isinstance(value, HiveObject):
self._hives.append(value)
elif isinstance(value, CellObject):
self._cells.append(value)
else:
_logger.debug("value = %r" % value)
raise TypeError("Expecting a HiveObject or a CellObject. Got instead this type: %r." % type(value))
def print_regxml(self, output_fh=sys.stdout):
"""Serializes and prints the entire object, without constructing the whole tree."""
regxml_wrapper = _ET_tostring(self.to_partial_Element())
#_logger.debug("regxml_wrapper = %r." % regxml_wrapper)
regxml_foot = "</regxml>"
#Check for an empty element
if regxml_wrapper.strip()[-3:] == " />":
regxml_head = regxml_wrapper.strip()[:-3] + ">"
elif regxml_wrapper.strip()[-2:] == "/>":
regxml_head = regxml_wrapper.strip()[:-2] + ">"
else:
regxml_head = regxml_wrapper.strip()[:-len(regxml_foot)]
output_fh.write(regxml_head)
output_fh.write("\n")
for hive in self._hives:
hive.print_regxml(output_fh)
output_fh.write(regxml_foot)
output_fh.write("\n")
def to_Element(self):
outel = self.to_partial_Element()
for hive in self._hives:
tmpel = hive.to_Element()
outel.append(tmpel)
for cell in self._cells:
tmpel = cell.to_Element()
outel.append(tmpel)
return outel
def to_partial_Element(self):
"""
Creates the wrapping RegXML element. No hives, no cells. Saves on creating an entire Element tree in memory.
"""
outel = ET.Element("regxml")
if self.version:
outel.attrib["version"] = self.version
if self.program or self.program_version:
tmpel0 = ET.Element("creator")
if self.program:
tmpel1 = ET.Element("program")
tmpel1.text = self.program
tmpel0.append(tmpel1)
if self.program_version:
tmpel1 = ET.Element("version")
tmpel1.text = self.program_version
tmpel0.append(tmpel1)
outel.append(tmpel0)
if self.command_line:
tmpel0 = ET.Element("execution_environment")
if self.interpreter:
tmpel1 = ET.Element("interpreter")
tmpel1.text = self.interpreter
tmpel1 = ET.Element("command_line")
tmpel1.text = self.command_line
tmpel0.append(tmpel1)
#TODO Note libraries used at run-time
outel.append(tmpel0)
if len(self.sources) > 0:
tmpel0 = ET.Element("source")
for source in self.sources:
tmpel1 = ET.Element("image_filename")
tmpel1.text = source
tmpel0.append(tmpel1)
outel.append(tmpel0)
#Apparently, namespace setting is only available with the write() function, which is memory-impractical for significant uses of RegXML.
#Ref: http://docs.python.org/3.3/library/xml.etree.elementtree.html#xml.etree.ElementTree.ElementTree.write
for prefix in self._namespaces:
attrib_name = "xmlns"
if prefix != "":
attrib_name += ":" + prefix
outel.attrib[attrib_name] = self._namespaces[prefix]
return outel
def to_regxml(self):
"""Serializes the entire RegXML document tree into a string. Returns that string. RAM-intensive. Most will want to use print_regxml() instead."""
return _ET_tostring(self.to_Element())
class VolumeObject(object):
_all_properties = set([
"annos",
"allocated_only",
"block_count",
"block_size",
"byte_runs",
"externals",
"first_block",
"ftype",
"ftype_str",
"last_block",
"partition_offset",
"original_volume",
"sector_size"
])
_diff_attr_names = {
"new":"delta:new_volume",
"deleted":"delta:deleted_volume",
"modified":"delta:modified_volume",
"matched":"delta:matched"
}
#TODO There may be need in the future to compare the annotations as well. It complicates make_differential_dfxml too much for now.
_incomparable_properties = set([
"annos"
])
def __init__(self, *args, **kwargs):
self._files = []
self._annos = set()
self._diffs = set()
for prop in VolumeObject._all_properties:
if prop in ["annos", "files"]:
continue
elif prop == "externals":
setattr(self, prop, kwargs.get(prop, OtherNSElementList()))
else:
setattr(self, prop, kwargs.get(prop))
def __iter__(self):
"""Yields all FileObjects directly attached to this VolumeObject."""
for f in self._files:
yield f
def __repr__(self):
parts = []
for prop in VolumeObject._all_properties:
#Skip outputting the files list.
if prop == "files":
continue
val = getattr(self, prop)
if not val is None:
parts.append("%s=%r" % (prop, val))
return "VolumeObject(" + ", ".join(parts) + ")"
def append(self, value):
_typecheck(value, FileObject)
self._files.append(value)
def compare_to_original(self):
self._diffs = self.compare_to_other(self.original_volume, True)
def compare_to_other(self, other, ignore_original=False):
"""Returns a set of all the properties found to differ."""
_typecheck(other, VolumeObject)
diffs = set()
for prop in VolumeObject._all_properties:
if prop in VolumeObject._incomparable_properties:
continue
if ignore_original and prop == "original_volume":
continue
#_logger.debug("getattr(self, %r) = %r" % (prop, getattr(self, prop)))
#_logger.debug("getattr(other, %r) = %r" % (prop, getattr(other, prop)))
#Allow file system type to be case-insensitive
if prop == "ftype_str":
o = getattr(other, prop)
if o: o = o.lower()
s = getattr(self, prop)
if s: s = s.lower()
if s != o:
diffs.add(prop)
else:
if getattr(self, prop) != getattr(other, prop):
diffs.add(prop)
return diffs
def populate_from_Element(self, e):
global _warned_elements
_typecheck(e, (ET.Element, ET.ElementTree))
#_logger.debug("e = %r" % e)
#Read differential annotations
_read_differential_annotations(VolumeObject._diff_attr_names, e, self.annos)
#Split into namespace and tagname
(ns, tn) = _qsplit(e.tag)
assert tn in ["volume", "original_volume"]
#Look through direct-child elements to populate run array
for ce in e.findall("./*"):
#_logger.debug("ce = %r" % ce)
(cns, ctn) = _qsplit(ce.tag)
#_logger.debug("cns = %r" % cns)
#_logger.debug("ctn = %r" % ctn)
if ctn == "byte_runs":
self.byte_runs = ByteRuns()
self.byte_runs.populate_from_Element(ce)
elif ctn == "original_volume":
self.original_volume = VolumeObject()
self.original_volume.populate_from_Element(ce)
elif ctn in VolumeObject._all_properties:
#_logger.debug("ce.text = %r" % ce.text)
setattr(self, ctn, ce.text)
#_logger.debug("getattr(self, %r) = %r" % (ctn, getattr(self, ctn)))
elif cns not in [dfxml.XMLNS_DFXML, ""]:
#Put all non-DFXML-namespace elements into the externals list.
self.externals.append(ce)
else:
if (cns, ctn) not in _warned_elements:
_warned_elements.add((cns, ctn))
_logger.warning("Unsure what to do with this element in a VolumeObject: %r" % ce)
def print_dfxml(self, output_fh=sys.stdout):
pe = self.to_partial_Element()
dfxml_wrapper = _ET_tostring(pe)
if len(pe) == 0 and len(self._files) == 0:
output_fh.write(dfxml_wrapper)
return
dfxml_foot = "</volume>"
#Deal with an empty element being printed as <elem/>
if len(pe) == 0:
replaced_dfxml_wrapper = dfxml_wrapper.replace(" />", ">")
dfxml_head = replaced_dfxml_wrapper
else:
dfxml_head = dfxml_wrapper.strip()[:-len(dfxml_foot)]
output_fh.write(dfxml_head)
output_fh.write("\n")
_logger.debug("Writing %d file objects for this volume." % len(self._files))
for f in self._files:
e = f.to_Element()
output_fh.write(_ET_tostring(e))
output_fh.write("\n")
output_fh.write(dfxml_foot)
output_fh.write("\n")
def to_Element(self):
outel = self.to_partial_Element()
for e in self.externals:
outel.append(e)
for f in self._files:
tmpel = f.to_Element()
outel.append(tmpel)
return outel
def to_partial_Element(self):
"""Returns the volume element with its properties, except for the child fileobjects. Properties are appended in DFXML schema order."""
outel = ET.Element("volume")
annos_whittle_set = copy.deepcopy(self.annos)
diffs_whittle_set = copy.deepcopy(self.diffs)
#Add differential annotations
for annodiff in VolumeObject._diff_attr_names:
if annodiff in annos_whittle_set:
outel.attrib[VolumeObject._diff_attr_names[annodiff]] = "1"
annos_whittle_set.remove(annodiff)
if len(annos_whittle_set) > 0:
_logger.warning("Failed to export some differential annotations: %r." % annos_whittle_set)
if self.byte_runs:
outel.append(self.byte_runs.to_Element())
def _append_el(prop, value):
tmpel = ET.Element(prop)
_keep = False
if not value is None:
tmpel.text = str(value)
_keep = True
if prop in self.diffs:
tmpel.attrib["delta:changed_property"] = "1"
diffs_whittle_set.remove(prop)
_keep = True
if _keep:
outel.append(tmpel)
def _append_str(prop):
value = getattr(self, prop)
_append_el(prop, value)
def _append_bool(prop):
value = getattr(self, prop)
if not value is None:
value = "1" if value else "0"
_append_el(prop, value)
for prop in [
"partition_offset",
"sector_size",
"block_size",
"ftype",
"ftype_str",
"block_count",
"first_block",
"last_block"
]:
_append_str(prop)
#Output the one Boolean property
_append_bool("allocated_only")
#Output the original volume's properties
if not self.original_volume is None or "original_volume" in diffs_whittle_set:
#Skip FileObject list, if any
if self.original_volume is None:
tmpel = ET.Element("delta:original_volume")
else:
tmpel = self.original_volume.to_partial_Element()
tmpel.tag = "delta:original_volume"
if "original_volume" in diffs_whittle_set:
tmpel.attrib["delta:changed_property"] = "1"
outel.append(tmpel)
if len(diffs_whittle_set) > 0:
_logger.warning("Did not annotate all of the differing properties of this volume. Remaining properties: %r." % diffs_whittle_set)
return outel
@property
def allocated_only(self):
return self._allocated_only
@allocated_only.setter
def allocated_only(self, val):
self._allocated_only = _boolcast(val)
@property
def annos(self):
"""Set of differential annotations. Expected members are the keys of this class's _diff_attr_names dictionary."""
return self._annos
@annos.setter
def annos(self, val):
_typecheck(val, set)
self._annos = val
@property
def block_count(self):
return self._block_count
@block_count.setter
def block_count(self, val):
self._block_count = _intcast(val)
@property
def block_size(self):
return self._block_size
@block_size.setter
def block_size(self, val):
self._block_size = _intcast(val)
@property
def diffs(self):
return self._diffs
@property
def externals(self):
"""(This property behaves the same as FileObject.externals.)"""
return self._externals
@externals.setter
def externals(self, val):
_typecheck(val, OtherNSElementList)
self._externals = val
@property
def first_block(self):
return self._first_block
@first_block.setter
def first_block(self, val):
self._first_block = _intcast(val)
@property
def ftype(self):
return self._ftype
@ftype.setter
def ftype(self, val):
self._ftype = _intcast(val)
@property
def ftype_str(self):
return self._ftype_str
@ftype_str.setter
def ftype_str(self, val):
self._ftype_str = _strcast(val)
@property
def last_block(self):
return self._last_block
@last_block.setter
def last_block(self, val):
self._last_block = _intcast(val)
@property
def original_volume(self):
return self._original_volume
@original_volume.setter
def original_volume(self, val):
if not val is None:
_typecheck(val, VolumeObject)
self._original_volume= val
@property
def partition_offset(self):
return self._partition_offset
@partition_offset.setter
def partition_offset(self, val):
self._partition_offset = _intcast(val)
@property
def sector_size(self):
return self._sector_size
@sector_size.setter
def sector_size(self, val):
self._sector_size = _intcast(val)
class HiveObject(object):
_all_properties = set([
"annos",
"mtime",
"filename",
"original_fileobject",
"original_hive"
])
_diff_attr_names = {
"new":"delta:new_hive",
"deleted":"delta:deleted_hive",
"modified":"delta:modified_hive",
"matched":"delta:matched"
}
_incomparable_properties = set([
"annos"
])
def __init__(self, *args, **kwargs):
self._cells = []
self._annos = set()
self._diffs = set()
for prop in HiveObject._all_properties:
if prop in ["annos", "cells"]:
continue
setattr(self, prop, kwargs.get(prop))
def __iter__(self):
"""Yields all CellObjects directly attached to this HiveObject."""
for c in self._cells:
yield c
def append(self, value):
_typecheck(value, CellObject)
self._cells.append(value)
def compare_to_original(self):
self._diffs = self.compare_to_other(self.original_hive, True)
def compare_to_other(self, other, ignore_original=False):
"""Returns a set of all the properties found to differ."""
_typecheck(other, HiveObject)
diffs = set()
for prop in HiveObject._all_properties:
if prop in HiveObject._incomparable_properties:
continue
if ignore_original and prop == "original_hive":
continue
#Allow file system type to be case-insensitive
if getattr(self, prop) != getattr(other, prop):
diffs.add(prop)
return diffs
def print_regxml(self, output_fh=sys.stdout):
pe = self.to_partial_Element()
xml_wrapper = _ET_tostring(pe)
xml_foot = "</hive>"
#Check for an empty element
if xml_wrapper.strip()[-3:] == " />":
xml_head = xml_wrapper.strip()[:-3] + ">"
elif xml_wrapper.strip()[-2:] == "/>":
xml_head = xml_wrapper.strip()[:-2] + ">"
else:
xml_head = xml_wrapper.strip()[:-len(xml_foot)]
output_fh.write(xml_head)
output_fh.write("\n")
for cell in self._cells:
output_fh.write(cell.to_regxml())
output_fh.write("\n")
output_fh.write(xml_foot)
output_fh.write("\n")
def to_Element(self):
outel = self.to_partial_Element()
for cell in self._cells:
tmpel = cell.to_Element()
outel.append(tmpel)
return outel
def to_partial_Element(self):
outel = ET.Element("hive")
if self.filename:
tmpel = ET.Element("filename")
tmpel.text = self.filename
outel.append(tmpel)
if self.mtime:
tmpel = self.mtime.to_Element()
outel.append(tmpel)
if self.original_fileobject:
tmpel = self.original_fileobject.to_Element()
#NOTE: "delta" namespace intentionally omitted.
tmpel.tag = "original_fileobject"
outel.append(tmpel)
return outel
@property
def annos(self):
"""Set of differential annotations. Expected members are the keys of this class's _diff_attr_names dictionary."""
return self._annos
@annos.setter
def annos(self, val):
_typecheck(val, set)
self._annos = val
@property
def filename(self):
"""Path of the hive file within the parent file system."""
return self._filename
@filename.setter
def filename(self, val):
self._filename = _strcast(val)
@property
def mtime(self):
return self._mtime
@mtime.setter
def mtime(self, val):
if val is None:
self._mtime = None
elif isinstance(val, TimestampObject):
self._mtime = val
else:
checked_val = TimestampObject(val, name="mtime")
self._mtime = checked_val
@property
def original_fileobject(self):
return self._original_fileobject
@original_fileobject.setter
def original_fileobject(self, val):
if not val is None:
_typecheck(val, FileObject)
self._original_fileobject = val
@property
def original_hive(self):
return self._original_hive
@original_hive.setter
def original_hive(self, val):
if not val is None:
_typecheck(val, HiveObject)
self._original_hive = val
class ByteRun(object):
_all_properties = set([
"img_offset",
"fs_offset",
"file_offset",
"fill",
"len",
"type",
"uncompressed_len",
"sha1", # TL: Added sha1 property
"md5", # TL: Added md5 property
"entropy" # TL: Added entropy property
])
def __init__(self, *args, **kwargs):
for prop in ByteRun._all_properties:
setattr(self, prop, kwargs.get(prop))
def __add__(self, other):
"""
Joins two ByteRun objects into a single run if possible. Returns a new object of the concatenation if successful, None if not.
"""
_typecheck(other, ByteRun)
#Don't glom fills of different values
if self.fill != other.fill:
return None
#Don't glom typed byte runs (particularly since type has been observed to be 'resident')
if self.type != other.type:
return None
#Don't glom compressed runs
if not self.uncompressed_len is None or not other.uncompressed_len is None:
return None
if None in [self.len, other.len]:
return None
for prop in ["img_offset", "fs_offset", "file_offset"]:
if None in [getattr(self, prop), getattr(other, prop)]:
continue
if getattr(self, prop) + self.len == getattr(other, prop):
retval = copy.deepcopy(self)
retval.len += other.len
return retval
return None
def __eq__(self, other):
#Check type
if other is None:
return False
if not isinstance(other, ByteRun):
if not _warned_byterun_badtypecomp:
_logger.warning("A ByteRun comparison was called against a non-ByteRun object: " + repr(other) + ".")
_warned_byterun_badtypecomp = True
return False
#Check values
return \
self.img_offset == other.img_offset and \
self.fs_offset == other.fs_offset and \
self.file_offset == other.file_offset and \
self.fill == other.fill and \
self.len == other.len and \
self.type == other.type and \
self.uncompressed_len == other.uncompressed_len
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
parts = []
for prop in ByteRun._all_properties:
val = getattr(self, prop)
if not val is None:
parts.append("%s=%r" % (prop, val))
return "ByteRun(" + ", ".join(parts) + ")"
def populate_from_Element(self, e):
_typecheck(e, (ET.Element, ET.ElementTree))
#Split into namespace and tagname
(ns, tn) = _qsplit(e.tag)
# TL: Added "run" to check (for fiwalk-0.6.3.exe)
assert (tn == "byte_run" or tn == "run")
copied_attrib = copy.deepcopy(e.attrib)
#Populate run properties from element attributes
for prop in ByteRun._all_properties:
if prop in copied_attrib:
val = copied_attrib.get(prop)
if not val is None:
setattr(self, prop, val)
del copied_attrib[prop]
#Note remaining properties
for prop in copied_attrib:
if prop not in _warned_byterun_attribs:
_warned_byterun_attribs.add(prop)
_logger.warning("No instructions present for processing this attribute found on a byte run: %r." % prop)
# TL: Quick fix to read in block hashes for analysis
# Need to revisit in future for better error checking
for ce in e.findall("./*"):
(cns, ctn) = _qsplit(ce.tag)
if ctn == "hashdigest":
setattr(self, "type", ce.attrib["type"])
if ce.attrib["type"] == "md5" or ce.attrib["type"] == "MD5":
setattr(self, "md5", ce.text)
elif ce.attrib["type"] == "sha1":
setattr(self, "md5", ce.text)
def to_Element(self):
outel = ET.Element("byte_run")
# TL: Added support to append a child hashdigest element
def _append_hash(name, value):
if not value is None or name in diffs_whittle_set:
tmpel = ET.Element("hashdigest")
tmpel.attrib["type"] = name
if not value is None:
tmpel.text = value
#_anno_hash(tmpel) # TL: Not anticipating annotated hashes, so removed
outel.append(tmpel)
for prop in ByteRun._all_properties:
val = getattr(self, prop)
#Skip null properties
if val is None:
continue
# TL: Added support to populate a child hashdigest element
if prop == "md5":
_append_hash("MD5", self.md5)
continue
elif prop in ["md5", "MD5", "sha1", "SHA1", "type"]:
continue
elif isinstance(val, bytes):
outel.attrib[prop] = str(struct.unpack("b", val)[0])
else:
outel.attrib[prop] = str(val)
return outel
# TL: Added sha1 property setter and getter
@property
def sha1(self):
return self._sha1
@sha1.setter
def sha1(self, val):
self._sha1 = _strcast(val)
# TL: Added md5 property setter and getter
@property
def md5(self):
return self._md5
@md5.setter
def md5(self, val):
self._md5 = _strcast(val)
# TL: Added entropy property setter and getter
@property
def entropy(self):
return self._entropy
@entropy.setter
def entropy(self, val):
self._entropy = _strcast(val)
@property
def file_offset(self):
return self._file_offset
@file_offset.setter
def file_offset(self, val):
self._file_offset = _intcast(val)
@property
def fill(self):
"""
At the moment, the fill value is assumed to be a single byte. The value you receive from this property wll be None or a byte. Setting fill to the string "0" will return the null byte when retrieved later.
For now, setting to any digital string (e.g. "41") will return a byte representing the integer casting string (e.g. the number 41), but this is subject to change pending some discussion.
"""
return self._fill
@fill.setter
def fill(self, val):
if val is None:
self._fill = val
elif val == "0":
self._fill = b'\x00'
elif isinstance(val, bytes):
if len(val) != 1:
raise NotImplementedError("Received a %d-length fill byte string for a byte run. Only 1-byte fill strings are accepted for now, pending further discussion.")
self._fill = val
elif isinstance(val, int):
#This is the easiest way between Python 2 and 3. int.to_bytes would be better, but that is only in >=3.2.
self._fill = struct.pack("b", val)
elif isinstance(val, str) and val.isdigit():
#Recurse, changing type
self.fill = int(val)
@property
def fs_offset(self):
return self._fs_offset
@fs_offset.setter
def fs_offset(self, val):
self._fs_offset = _intcast(val)
@property
def img_offset(self):
return self._img_offset
@img_offset.setter
def img_offset(self, val):
self._img_offset = _intcast(val)
@property
def len(self):
return self._len
@len.setter
def len(self, val):
self._len = _intcast(val)
@property
def type(self):
return self._type
@type.setter
def type(self, val):
self._type = _strcast(val)
@property
def uncompressed_len(self):
return self._uncompressed_len
@uncompressed_len.setter
def uncompressed_len(self, val):
self._uncompressed_len = _intcast(val)
class ByteRuns(object):
"""
A list-like object for ByteRun objects.
"""
#Must define these methods to adhere to the list protocol:
#__len__
#__getitem__
#__setitem__
#__delitem__
#__iter__
#append
#
#Refs:
#http://www.rafekettler.com/magicmethods.html
#http://stackoverflow.com/a/8841520
_facet_values = [None, "data", "inode", "name"]
def __init__(self, run_list=None, **kwargs):
self._facet = kwargs.get("facet")
self._listdata = []
if isinstance(run_list, list):
for run in run_list:
self.append(run)
def __delitem__(self, key):
del self._listdata[key]
def __eq__(self, other):
"""Compares the byte run lists and the facet (allowing a null facet to match "data")."""
#Check type
if other is None:
return False
_typecheck(other, ByteRuns)
if self.facet != other.facet:
if set([self.facet, other.facet]) != set([None, "data"]):
return False
if len(self) != len(other):
#_logger.debug("len(self) = %d" % len(self))
#_logger.debug("len(other) = %d" % len(other))
return False
for (sbr_index, sbr) in enumerate(self):
obr = other[sbr_index]
#_logger.debug("sbr_index = %d" % sbr_index)
#_logger.debug("sbr = %r" % sbr)
#_logger.debug("obr = %r" % obr)
if sbr != obr:
return False
return True
def __getitem__(self, key):
return self._listdata.__getitem__(key)
def __iter__(self):
return iter(self._listdata)
def __len__(self):
return self._listdata.__len__()
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
parts = []
for run in self:
parts.append(repr(run))
maybe_facet = ""
if self.facet:
maybe_facet = "facet=%r, " % self.facet
return "ByteRuns(" + maybe_facet + "run_list=[" + ", ".join(parts) + "])"
def __setitem__(self, key, value):
_typecheck(value, ByteRun)
self._listdata[key] = value
def append(self, value):
"""
Appends a ByteRun object to this container's list.
"""
_typecheck(value, ByteRun)
self._listdata.append(value)
def glom(self, value):
"""
Appends a ByteRun object to this container's list, after attempting to join the run with the last run already stored.
"""
_typecheck(value, ByteRun)
if len(self._listdata) == 0:
self.append(value)
else:
last_run = self._listdata[-1]
maybe_new_run = last_run + value
if maybe_new_run is None:
self.append(value)
else:
self._listdata[-1] = maybe_new_run
def iter_contents(self, raw_image, buffer_size=1048576, sector_size=512, errlog=None, statlog=None):
"""
Generator. Yields contents, as byte strings one block at a time, given a backing raw image path. Relies on The SleuthKit's img_cat, so contents can be extracted from any disk image type that TSK supports.
@param buffer_size The maximum size of the byte strings yielded.
@param sector_size The size of a disk sector in the raw image. Required by img_cat.
"""
if not isinstance(raw_image, str):
raise TypeError("iter_contents needs the string path to the image file. Received: %r." % raw_image)
stderr_fh = None
if not errlog is None:
stderr_fh = open(errlog, "wb")
status_fh = None
if not statlog is None:
status_fh = open(errlog, "wb")
#The exit status of the last img_cat.
last_status = None
try:
for run in self:
if run.len is None:
raise AttributeError("Byte runs can't be extracted if a run length is undefined.")
len_to_read = run.len
#If we have a fill character, just pump out that character
if not run.fill is None and len(run.fill) > 0:
while len_to_read > 0:
#This multiplication and slice should handle multi-byte fill characters, in case that ever comes up.
yield (run.fill * buffer_size)[ : min(len_to_read, buffer_size)]
len_to_read -= buffer_size
#Next byte run
continue
if run.img_offset is None:
raise AttributeError("Byte runs can't be extracted if missing a fill character and image offset.")
import platform
if platform.system() == "Windows":
cwd = "sleuthkit-4.1.3-win32" + os.sep + "bin" + os.sep
cmd = [cwd + "img_cat.exe"]
else:
cmd = ["img_cat"]
cmd.append("-b")
cmd.append(str(sector_size))
cmd.append("-s")
cmd.append(str(run.img_offset//sector_size))
cmd.append("-e")
cmd.append(str( (run.img_offset + run.len)//sector_size))
cmd.append(raw_image)
if platform.system() == "Windows":
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=stderr_fh, cwd=cwd)
else:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=stderr_fh)
#Do the buffered read
while len_to_read > 0:
buffer_data = p.stdout.read(buffer_size)
yield_data = buffer_data[ : min(len_to_read, buffer_size)]
if len(yield_data) > 0:
yield yield_data
else:
#Let the subprocess terminate so we can see the exit status
p.wait()
last_status = p.returncode
if last_status != 0:
raise subprocess.CalledProcessError(last_status, " ".join(cmd), "img_cat failed.")
len_to_read -= buffer_size
except Exception as e:
#Cleanup in an exception
if not stderr_fh is None:
stderr_fh.close()
if not status_fh is None:
if isinstance(e, subprocess.CalledProcessError):
status_fh.write(e.returncode)
else:
status_fh.write("1")
status_fh.close()
raise e
#Cleanup when all's gone well.
if not status_fh is None:
if not last_status is None:
status_fh.write(last_status)
status_fh.close()
if not stderr_fh is None:
stderr_fh.close()
def populate_from_Element(self, e):
_typecheck(e, (ET.Element, ET.ElementTree))
#Split into namespace and tagname
(ns, tn) = _qsplit(e.tag)
# TL: Added "byte runs" (with space) to check (for fiwalk-0.6.3.exe)
assert (tn == "byte_runs" or tn == "byte runs")
if "facet" in e.attrib:
self.facet = e.attrib["facet"]
#Look through direct-child elements to populate run array
for ce in e.findall("./*"):
(cns, ctn) = _qsplit(ce.tag)
if ctn == "byte_run" or ctn == "run":
nbr = ByteRun()
nbr.populate_from_Element(ce)
self.append(nbr)
def to_Element(self):
outel = ET.Element("byte_runs")
for run in self:
tmpel = run.to_Element()
outel.append(tmpel)
if self.facet:
outel.attrib["facet"] = self.facet
return outel
@property
def facet(self):
"""Expected to be null, "data", "inode", or "name". See FileObject.data_brs, FileObject.inode_brs, and FileObject.name_brs."""
return self._facet
@facet.setter
def facet(self, val):
if not val is None:
_typecheck(val, str)
if val not in ByteRuns._facet_values:
raise ValueError("A ByteRuns facet must be one of these: %r. Received: %r." % (ByteRuns._facet_values, val))
self._facet = val
re_precision = re.compile(r"(?P<num>\d+)(?P<unit>(|m|n)s|d)?")
class TimestampObject(object):
"""
Encodes the "dftime" type. Wraps around dfxml.dftime, closely enough that this might just get folded into that class.
TimestampObjects implement a vs-null comparison workaround as in the SAS family of products: Null, for ordering purposes, is considered to be a value less than negative infinity.
"""
timestamp_name_list = ["mtime", "atime", "ctime", "crtime", "dtime", "bkup_time"]
def __init__(self, *args, **kwargs):
self.name = kwargs.get("name")
self.prec = kwargs.get("prec")
#_logger.debug("type(args) = %r" % type(args))
#_logger.debug("args = %r" % (args,))
if len(args) == 0:
self.time = None
elif len(args) == 1:
self.time = args[0]
else:
raise ValueError("Unexpected arguments. Whole args tuple: %r." % (args,))
self._timestamp = None
def __eq__(self, other):
#Check type
if other is None:
return False
_typecheck(other, TimestampObject)
if self.name != other.name:
return False
if self.prec != other.prec:
return False
if self.time != other.time:
return False
return True
def __ge__(self, other):
"""Note: The semantics here and in other ordering functions are that "Null" is a value less than negative infinity."""
if other is None:
return False
else:
self._comparison_sanity_check(other)
return self.time.__ge__(other.time)
def __gt__(self, other):
"""Note: The semantics here and in other ordering functions are that "Null" is a value less than negative infinity."""
if other is None:
return False
else:
self._comparison_sanity_check(other)
return self.time.__gt__(other.time)
def __le__(self, other):
"""Note: The semantics here and in other ordering functions are that "Null" is a value less than negative infinity."""
if other is None:
return True
else:
self._comparison_sanity_check(other)
return self.time.__le__(other.time)
def __lt__(self, other):
"""Note: The semantics here and in other ordering functions are that "Null" is a value less than negative infinity."""
if other is None:
return True
else:
self._comparison_sanity_check(other)
return self.time.__lt__(other.time)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
parts = []
if self.name:
parts.append("name=%r" % self.name)
if self.prec:
parts.append("prec=%r" % (self.prec,))
if self.time:
parts.append("%r" % self.time)
return "TimestampObject(" + ", ".join(parts) + ")"
def __str__(self):
if self.time:
return str(self.time)
else:
return self.__repr__()
def _comparison_sanity_check(self, other):
if None in (self.time, other.time):
raise ValueError("Can't compare TimestampObjects: %r, %r." % self, other)
def populate_from_Element(self, e):
_typecheck(e, (ET.Element, ET.ElementTree))
if "prec" in e.attrib:
self.prec = e.attrib["prec"]
self.time = e.text
(ns, tn) = _qsplit(e.tag)
self.name = tn
def to_Element(self):
_typecheck(self.name, str)
outel = ET.Element(self.name)
if self.prec:
outel.attrib["prec"] = "%d%s" % self.prec
if self.time:
outel.text = str(self.time)
return outel
@property
def name(self):
"""The type of timestamp - modified (mtime), accessed (atime), etc."""
return self._name
@name.setter
def name(self, value):
if not value is None:
if not value in TimestampObject.timestamp_name_list:
raise ValueError("The timestamp name must be in this list: %r. Received: %r." % (TimestampObject.timestamp_name_list, value))
self._name = value
@property
def prec(self):
"""
A pair, (resolution, unit); unit is a second (s), millisecond, nanosecond, or day (d). The default unit is "s". Can be passed as a string or a duple.
"""
return self._prec
@prec.setter
def prec(self, value):
if value is None:
self._prec = None
return self._prec
elif isinstance(value, tuple) and \
len(value) == 2 and \
isinstance(value[0], int) and \
isinstance(value[1], str):
self._prec = value
return self._prec
m = re_precision.match(value)
md = m.groupdict()
tup = (int(md["num"]), md.get("unit") or "s")
#_logger.debug("tup = %r" % (tup,))
self._prec = tup
@property
def time(self):
"""
The actual timestamp. A dfxml.dftime object. This class might be superfluous and end up collapsing into that...
"""
return self._time
@time.setter
def time(self, value):
if value is None:
self._time = None
else:
checked_value = dfxml.dftime(value)
#_logger.debug("checked_value.timestamp() = %r" % checked_value.timestamp())
self._time = checked_value
#Propagate timestamp value to other formats
self._timestamp = self._time.timestamp()
@property
def timestamp(self):
"""A Unix floating-point timestamp, as time.mktime returns. Currently, there is no setter for this property."""
return self._timestamp
class FileObject(object):
"""
This class provides property accesses, an XML serializer (ElementTree-based), and a deserializer.
The properties interface is NOT function calls, but simple accesses. That is, the old _fileobject_ style:
assert isinstance(fi, dfxml.fileobject)
fi.mtime()
is now replaced with:
assert isinstance(fi, Objects.FileObject)
fi.mtime
"""
_all_properties = set([
"alloc",
"alloc_inode",
"alloc_name",
"annos",
"app_name", # TL: Added app_name property
"app_state", # TL: Added app_state property
"atime",
"basename", # TL: Added basename property
"basename_norm", # TL: Added basename_norm property
"bkup_time",
"byte_runs",
"compressed",
"crtime",
"ctime",
"data_brs",
"dtime",
"error",
"externals",
"filename",
"filename_norm", # TL: Added filename_norm property
"filesize",
"gid",
"id",
"inode",
"inode_brs",
"link_target",
"libmagic",
"md5",
"meta_type",
"mode",
"mtime",
"name_brs",
"name_type",
"nlink",
"original_fileobject",
"orphan",
"orphan_name", # TL: Added orphan_name property
"parent_object",
"partition",
"seq",
"sha1",
"uid",
"unalloc",
"unused",
"used"
])
_br_facet_to_property = {
"data":"data_brs",
"inode":"inode_brs",
"name":"name_brs"
}
#TODO There may be need in the future to compare the annotations as well. It complicates make_differential_dfxml too much for now.
_incomparable_properties = set([
"annos",
"byte_runs",
"id",
#"unalloc", TL: Removed this property
"unused"
])
_diff_attr_names = {
"new":"delta:new_file",
"deleted":"delta:deleted_file",
"renamed":"delta:renamed_file",
"changed":"delta:changed_file",
"modified":"delta:modified_file",
"matched":"delta:matched",
"matched_soft":"delta:matched_soft" # TL: Added soft match delta
}
def __init__(self, *args, **kwargs):
#Prime all the properties
for prop in FileObject._all_properties:
if prop == "annos":
continue
elif prop == "externals":
setattr(self, prop, kwargs.get(prop, OtherNSElementList()))
else:
setattr(self, prop, kwargs.get(prop))
self._annos = set()
self._diffs = set()
def __eq__(self, other):
if other is None:
return False
_typecheck(other, FileObject)
for prop in FileObject._all_properties:
if prop in FileObject._incomparable_properties:
continue
if getattr(self, prop) != getattr(other, prop):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
parts = []
for prop in sorted(FileObject._all_properties):
#Save data byte runs for the end, as theirs lists can get really long.
if prop not in ["byte_runs", "data_brs"]:
value = getattr(self, prop)
if not value is None:
parts.append("%s=%r" % (prop, value))
if self.data_brs:
parts.append("data_brs=%r" % self.byte_runs)
return "FileObject(" + ", ".join(parts) + ")"
def compare_to_original(self):
self._diffs = self.compare_to_other(self.original_fileobject, True)
def compare_to_other(self, other, ignore_original=False, ignore_properties=set(), check_properties=set()):
_typecheck(other, FileObject)
diffs = set()
# TL: Added support to specify a set of poperties to compare
if check_properties:
print("HERE")
for propname in check_properties:
if propname in FileObject._incomparable_properties:
continue
if ignore_original and propname == "original_fileobject":
continue
# TL: Added ignore_properties check
# Can pass a set() of properties to ignore
# e.g., {"filename", "sha1"}
if propname in ignore_properties:
continue
oval = getattr(other, propname)
sval = getattr(self, propname)
if oval is None and sval is None:
continue
if oval != sval:
#_logger.debug("propname, oval, sval: %r, %r, %r" % (propname, oval, sval))
diffs.add(propname)
else:
for propname in FileObject._all_properties:
if propname in FileObject._incomparable_properties:
continue
if ignore_original and propname == "original_fileobject":
continue
# TL: Added ignore_properties check
# Can pass a set() of properties to ignore
# e.g., {"filename", "sha1"}
if propname in ignore_properties:
continue
oval = getattr(other, propname)
sval = getattr(self, propname)
if oval is None and sval is None:
continue
if oval != sval:
#_logger.debug("propname, oval, sval: %r, %r, %r" % (propname, oval, sval))
diffs.add(propname)
return diffs
def extract_facet(self, facet, image_path=None, buffer_size=1048576, partition_offset=None, sector_size=512, errlog=None, statlog=None, icat_threshold = 268435456):
"""
Generator. Extracts the facet with a SleuthKit tool, yielding chunks of the data.
@param buffer_size The facet data is yielded in chunks of at most this parameter's size. Default 1MiB.
@param partition_offset The offset of the file's containing partition, in bytes. Needed for icat. If not given, the FileObject's VolumeObject will be used. If that's also absent, icat can't be used, and img_cat will instead be tried as a fallback (which means byte runs must be in the DFXML).
@param icat_threshold icat incurs extensive, non-sequential IO overhead to walk the filesystem to reach the facet's byte runs. img_cat can be called on each byte run reported in the DFXML file, but on fragmented files this incurs overhead in process spawning. Facets larger than this threshold are extracted with icat. Default 256MiB. Force icat by setting this to -1; force img_cat with infinity (float("inf")).
"""
_image_path = image_path
if _image_path is None:
raise ValueError("The backing image path must be supplied.")
_partition_offset = partition_offset
if _partition_offset is None:
if self.volume_object:
_partition_offset = self.volume_object.partition_offset
#Try using icat; needs inode number and volume offset. We're additionally requiring the filesize be known.
#TODO The icat needs a little more experimentation.
if False and facet == "content" and \
not self.filesize is None and \
self.filesize >= icat_threshold and \
not self.inode is None and \
not _partition_offset is None:
_logger.debug("Extracting with icat: %r." % self)
#Set up logging if desired
stderr_fh = sys.stderr
if not errlog is None:
stderr_fh = open(errlog, "wb")
status_fh = None
if not statlog is None:
status_fh = open(errlog, "w")
#Set up icat process
cmd = ["icat"]
cmd.append("-b")
cmd.append(str(sector_size))
cmd.append("-o")
cmd.append(str(self.volume_object.partition_offset//sector_size))
if not self.volume_object.ftype_str is None:
cmd.append("-f")
cmd.append(self.volume_object.ftype_str)
cmd.append(image_path)
cmd.append(str(self.inode))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=stderr_fh)
#Do a buffered read
len_to_read = self.filesize
while len_to_read > 0:
buffer_data = p.stdout.read(buffer_size)
yield_data = buffer_data[ : min(len_to_read, buffer_size)]
if len(yield_data) > 0:
yield yield_data
else:
#Let the subprocess terminate so we can see the exit status
p.wait()
last_status = p.returncode
#Log the status if requested
if not status_fh is None:
status_fh.write(last_status)
#Act on a bad status
if last_status != 0:
raise subprocess.CalledProcessError(last_status, " ".join(cmd), "icat failed.")
len_to_read -= buffer_size
#Clean up file handles
if status_fh: status_fh.close()
if stderr_fh: stderr_fh.close()
elif not self.byte_runs is None:
for chunk in self.byte_runs.iter_contents(_image_path, buffer_size, sector_size, errlog, statlog):
yield chunk
def is_allocated(self):
"""Collapse potentially-partial allocation information into a yes, no, or unknown answer."""
if self.alloc_inode == True and self.alloc_name == True:
return True
if self.alloc_inode is None and self.alloc_name is None:
if self.alloc is None:
return None
else:
return self.alloc
#Partial allocation information at this point is assumed False. In some file systems, like FAT, we only need one of alloc_inode and alloc_name for allocation status. Guidelines on which should win out haven't been set yet, though, so wait on this.
return False
def populate_from_Element(self, e):
"""Populates this FileObject's properties from an ElementTree Element. The Element need not be retained."""
global _warned_elements
_typecheck(e, (ET.Element, ET.ElementTree))
#_logger.debug("FileObject.populate_from_Element(%r)" % e)
#Split into namespace and tagname
(ns, tn) = _qsplit(e.tag)
assert tn in ["fileobject", "original_fileobject", "parent_object"]
#Map "delta:" attributes of <fileobject>s into the self.annos set
#_logger.debug("self.annos, before: %r." % self.annos)
_read_differential_annotations(FileObject._diff_attr_names, e, self.annos)
#_logger.debug("self.annos, after: %r." % self.annos)
#Look through direct-child elements for other properties
for ce in e.findall("./*"):
(cns, ctn) = _qsplit(ce.tag)
#_logger.debug("Populating from child element: %r." % ce.tag)
#Inherit any marked changes
for attr in ce.attrib:
#_logger.debug("Inspecting attr for diff. annos: %r." % attr)
(ns, an) = _qsplit(attr)
if an == "changed_property" and ns == dfxml.XMLNS_DELTA:
#_logger.debug("Identified changed property: %r." % ctn)
#TODO There may be a more elegant way of handling the hashes and any other attribute-dependent element-to-property mapping. Probably involving XPath.
if ctn == "hashdigest":
if "type" not in ce.attrib:
raise AttributeError("Attribute 'type' not found. Every hashdigest element should have a 'type' attribute to identify the hash type.")
self.diffs.add(ce.attrib["type"].lower())
elif ctn == "byte_runs":
facet = ce.attrib.get("facet")
prop = FileObject._br_facet_to_property.get(facet, "data_brs")
self.diffs.add(prop)
else:
self.diffs.add(ctn)
# TL: Added "byte runs" to check (for old fiwalk-0.6.3.exe)
if ctn == "byte_runs" or ctn == "byte runs":
#byte_runs might be for file contents, the inode/MFT entry, or the directory entry naming the file. Use the facet attribute to determine which. If facet is absent, assume they're data byte runs.
if "facet" in ce.attrib:
if ce.attrib["facet"] not in FileObject._br_facet_to_property:
if not ce.attrib["facet"] in _warned_byterun_facets:
_warned_byterun_facets.add(ce.attrib["facet"])
_logger.warning("byte_runs facet %r was unexpected. Will not interpret this element.")
else:
brs = ByteRuns()
brs.populate_from_Element(ce)
brs.facet = ce.attrib["facet"]
setattr(self, FileObject._br_facet_to_property[brs.facet], brs)
else:
self.byte_runs = ByteRuns()
self.byte_runs.populate_from_Element(ce)
elif ctn == "hashdigest":
if ce.attrib["type"].lower() == "md5":
self.md5 = ce.text
elif ce.attrib["type"].lower() == "sha1":
self.sha1 = ce.text
elif ctn == "original_fileobject":
self.original_fileobject = FileObject()
self.original_fileobject.populate_from_Element(ce)
elif ctn == "parent_object":
self.parent_object = FileObject()
self.parent_object.populate_from_Element(ce)
elif ctn in ["atime", "bkup_time", "crtime", "ctime", "dtime", "mtime"]:
setattr(self, ctn, TimestampObject())
getattr(self, ctn).populate_from_Element(ce)
elif ctn in FileObject._all_properties:
setattr(self, ctn, ce.text)
elif cns not in [dfxml.XMLNS_DFXML, ""]:
#Put all non-DFXML-namespace elements into the externals list.
self.externals.append(ce)
else:
if (cns, ctn) not in _warned_elements:
_warned_elements.add((cns, ctn))
_logger.warning("Uncertain what to do with this element: %r" % ce)
def populate_from_stat(self, s):
"""Populates FileObject fields from a stat() call."""
import os
_typecheck(s, os.stat_result)
self.mode = s.st_mode
self.inode = s.st_ino
self.nlink = s.st_nlink
self.uid = s.st_uid
self.gid = s.st_gid
self.filesize = s.st_size
#s.st_dev is ignored for now.
if "st_mtime" in dir(s):
self.mtime = s.st_mtime
if "st_atime" in dir(s):
self.atime = s.st_atime
if "st_ctime" in dir(s):
self.ctime = s.st_ctime
if "st_birthtime" in dir(s):
self.crtime = s.st_birthtime
def to_Element(self):
"""Creates an ElementTree Element with elements in DFXML schema order."""
outel = ET.Element("fileobject")
annos_whittle_set = copy.deepcopy(self.annos)
diffs_whittle_set = copy.deepcopy(self.diffs)
for annodiff in FileObject._diff_attr_names:
if annodiff in annos_whittle_set:
outel.attrib[FileObject._diff_attr_names[annodiff]] = "1"
annos_whittle_set.remove(annodiff)
if len(annos_whittle_set) > 0:
_logger.warning("Failed to export some differential annotations: %r." % annos_whittle_set)
def _anno_change(el):
if el.tag in self.diffs:
el.attrib["delta:changed_property"] = "1"
diffs_whittle_set.remove(el.tag)
def _anno_hash(el):
if el.attrib["type"] in self.diffs:
el.attrib["delta:changed_property"] = "1"
diffs_whittle_set.remove(el.attrib["type"])
def _anno_byte_runs(el):
if "facet" in el.attrib:
prop = FileObject._br_facet_to_property[el.attrib["facet"]]
else:
prop = "data_brs"
if prop in self.diffs:
el.attrib["delta:changed_property"] = "1"
#_logger.debug("diffs_whittle_set = %r." % diffs_whittle_set)
diffs_whittle_set.remove(prop)
#Recall that Element text must be a string
def _append_str(name, value):
"""Note that empty elements should be created if the element was removed."""
if not value is None or name in diffs_whittle_set:
tmpel = ET.Element(name)
if not value is None:
tmpel.text = str(value)
_anno_change(tmpel)
outel.append(tmpel)
def _append_time(name, value):
"""Note that empty elements should be created if the element was removed."""
if not value is None or name in diffs_whittle_set:
if not value is None and value.time:
tmpel = value.to_Element()
else:
tmpel = ET.Element(name)
_anno_change(tmpel)
outel.append(tmpel)
def _append_bool(name, value):
"""Note that empty elements should be created if the element was removed."""
if not value is None or name in diffs_whittle_set:
tmpel = ET.Element(name)
if not value is None:
tmpel.text = str(1 if value else 0)
_anno_change(tmpel)
outel.append(tmpel)
_using_facets = False
def _append_byte_runs(name, value):
"""The complicated part here is setting the "data" facet on the byte runs, because we assume that no facet definitions means that for this file, there's only the one byte_runs list for data."""
#_logger.debug("_append_byte_runs(%r, %r)" % (name, value))
if value or name in diffs_whittle_set:
if value:
tmpel = value.to_Element()
if "facet" in tmpel.attrib:
_using_facets = True
else:
tmpel = ET.Element("byte_runs")
propname_to_facet = {
"data_brs": "data",
"inode_brs": "inode",
"name_brs": "name"
}
if name in propname_to_facet:
_using_facets = True
tmpel.attrib["facet"] = propname_to_facet[name]
elif _using_facets:
tmpel.attrib["facet"] = propname_to_facet["data_brs"]
_anno_byte_runs(tmpel)
outel.append(tmpel)
def _append_externals():
for e in self.externals:
outel.append(e)
def _append_object(name, value, namespace_prefix=None):
"""name must be the name of a property that has a to_Element() method. namespace_prefix will be prepended as-is to the element tag."""
obj = value
if obj or name in diffs_whittle_set:
if obj:
tmpel = obj.to_Element()
else:
tmpel = ET.Element(name)
#Set the tag name here for properties like parent_object, a FileObject without being wholly a FileObject.
if namespace_prefix:
tmpel.tag = namespace_prefix + name
else:
tmpel.tag = name
_anno_change(tmpel)
outel.append(tmpel)
def _append_hash(name, value):
if not value is None or name in diffs_whittle_set:
tmpel = ET.Element("hashdigest")
tmpel.attrib["type"] = name
if not value is None:
tmpel.text = value
_anno_hash(tmpel)
outel.append(tmpel)
#The parent object is a one-off. Duplicating the whole parent is wasteful, so create a shadow object that just outputs the important bits.
if not self.parent_object is None:
parent_object_shadow = FileObject()
parent_object_shadow.inode = self.parent_object.inode
_append_object("parent_object", parent_object_shadow)
_append_str("filename", self.filename)
_append_str("filename_norm", self.filename_norm) # TL: Added filename_norm to XML out
_append_str("basename", self.basename) # TL: Added basename to XML out
_append_str("basename_norm", self.basename_norm) # TL: Added basename_norm to XML out
_append_str("error", self.error)
_append_str("partition", self.partition)
_append_str("id", self.id)
_append_str("name_type", self.name_type)
_append_str("filesize", self.filesize)
#TODO Define a better flag for if we're going to output <alloc> elements.
if self.alloc_name is None and self.alloc_inode is None:
_append_bool("alloc", self.alloc)
else:
_append_bool("alloc_inode", self.alloc_inode)
_append_bool("alloc_name", self.alloc_name)
_append_bool("used", self.used)
_append_bool("orphan", self.orphan)
_append_str("orphan_name", self.orphan_name) # TL: Added orphan_name to XML out
_append_bool("compressed", self.compressed)
_append_str("inode", self.inode)
_append_str("meta_type", self.meta_type)
_append_str("mode", self.mode)
_append_str("nlink", self.nlink)
_append_str("uid", self.uid)
_append_str("gid", self.gid)
_append_time("mtime", self.mtime)
_append_time("ctime", self.ctime)
_append_time("atime", self.atime)
_append_time("crtime", self.crtime)
_append_str("seq", self.seq)
_append_time("dtime", self.dtime)
_append_time("bkup_time", self.bkup_time)
_append_str("link_target", self.link_target)
_append_str("libmagic", self.libmagic)
_append_externals()
_append_byte_runs("inode_brs", self.inode_brs)
_append_byte_runs("name_brs", self.name_brs)
_append_byte_runs("data_brs", self.data_brs)
_append_hash("md5", self.md5)
_append_hash("sha1", self.sha1)
_append_object("original_fileobject", self.original_fileobject, "delta:")
# TL: Added the following object to print XML elements
_append_str("app_name", self.app_name) # TL: Added app_name to XML out
_append_str("app_state", self.app_state) # TL: Added app_state to XML out
if len(diffs_whittle_set) > 0:
_logger.warning("Did not annotate all of the differing properties of this file. Remaining properties: %r." % diffs_whittle_set)
return outel
def to_dfxml(self):
return _ET_tostring(self.to_Element())
@property
def alloc(self):
"""Note that setting .alloc will affect the value of .unalloc, and vice versa. The last one to set wins."""
global _nagged_alloc
if not _nagged_alloc:
#alloc isn't deprecated yet.
#_logger.warning("The FileObject.alloc property is deprecated. Use .alloc_inode and/or .alloc_name instead. .alloc is proxied as True if alloc_inode and alloc_name are both True.")
_nagged_alloc = True
if self.alloc_inode and self.alloc_name:
return True
else:
return self._alloc
@alloc.setter
def alloc(self, val):
self._alloc = _boolcast(val)
if not self._alloc is None:
self._unalloc = not self._alloc
@property
def alloc_inode(self):
return self._alloc_inode
@alloc_inode.setter
def alloc_inode(self, val):
self._alloc_inode = _boolcast(val)
@property
def alloc_name(self):
return self._alloc_name
@alloc_name.setter
def alloc_name(self, val):
self._alloc_name = _boolcast(val)
@property
def annos(self):
"""Set of differential annotations. Expected members are the keys of this class's _diff_attr_names dictionary."""
return self._annos
@annos.setter
def annos(self, val):
_typecheck(val, set)
self._annos = val
# TL: Added app_name property getter
@property
def app_name(self):
return self._app_name
# TL: Added app_name property setter
@app_name.setter
def app_name(self, val):
self._app_name = _strcast(val)
# TL: Added app_state property getter
@property
def app_state(self):
return self._app_state
# TL: Added app_state property setter
@app_state.setter
def app_state(self, val):
self._app_state = _strcast(val)
@property
def atime(self):
return self._atime
@atime.setter
def atime(self, val):
if val is None:
self._atime = None
elif isinstance(val, TimestampObject):
self._atime = val
else:
checked_val = TimestampObject(val, name="atime")
self._atime = checked_val
# TL: Added basename property getter
@property
def basename(self):
return self._basename
# TL: Added basename property setter
@basename.setter
def basename(self, val):
self._basename = _strcast(val)
# TL: Added basename_norm property getter
@property
def basename_norm(self):
return self._basename_norm
# TL: Added basename_norm property setter
@basename_norm.setter
def basename_norm(self, val):
self._basename_norm = _strcast(val)
@property
def bkup_time(self):
return self._bkup_time
@bkup_time.setter
def bkup_time(self, val):
if val is None:
self._bkup_time = None
elif isinstance(val, TimestampObject):
self._bkup_time = val
else:
checked_val = TimestampObject(val, name="bkup_time")
self._bkup_time = checked_val
@property
def byte_runs(self):
"""This property is now a synonym for the data byte runs (.data_brs)."""
return self.data_brs
@byte_runs.setter
def byte_runs(self, val):
self.data_brs = val
@property
def compressed(self):
return self._compressed
@compressed.setter
def compressed(self, val):
self._compressed = _boolcast(val)
@property
def ctime(self):
return self._ctime
@ctime.setter
def ctime(self, val):
if val is None:
self._ctime = None
elif isinstance(val, TimestampObject):
self._ctime = val
else:
checked_val = TimestampObject(val, name="ctime")
self._ctime = checked_val
@property
def crtime(self):
return self._crtime
@crtime.setter
def crtime(self, val):
if val is None:
self._crtime = None
elif isinstance(val, TimestampObject):
self._crtime = val
else:
checked_val = TimestampObject(val, name="crtime")
self._crtime = checked_val
@property
def data_brs(self):
"""The byte runs that store the file's content."""
return self._data_brs
@data_brs.setter
def data_brs(self, val):
if not val is None:
_typecheck(val, ByteRuns)
self._data_brs = val
@property
def diffs(self):
"""This property intentionally has no setter. To populate, call compare_to_original() after assigning an original_fileobject."""
return self._diffs
@property
def dtime(self):
return self._dtime
@dtime.setter
def dtime(self, val):
if val is None:
self._dtime = None
elif isinstance(val, TimestampObject):
self._dtime = val
else:
checked_val = TimestampObject(val, name="dtime")
self._dtime = checked_val
@property
def error(self):
return self._error
@error.setter
def error(self, val):
self._error = _strcast(val)
@property
def filename(self):
return self._filename
@filename.setter
def filename(self, val):
self._filename = _strcast(val)
# TL: Added filename_norm property getter
@property
def filename_norm(self):
return self._filename_norm
# TL: Added filename_norm property setter
@filename_norm.setter
def filename_norm(self, val):
self._filename_norm = _strcast(val)
@property
def externals(self):
"""
This property exposes XML elements of other namespaces. Since these elements can be of arbitrary complexity, this list is solely comprised ofxml.etree.ElementTree.Element objects. The tags must be a fully-qualified namespace (of the pattern {URI}localname). If generating the Elements with a script instead of de-serializing from XML, you should issue an ElementTree register_namespace call with your namespace abbreviation prefix.
NOTE: Diffs are currently NOT computed for external elements.
NOTE: This property should be considered unstable, as the interface is in an early design phase. Please notify the maintainers of this library (see the Git history for the Objects.py file) if you are using this interface and wish to be notified of updates."""
return self._externals
@externals.setter
def externals(self, val):
_typecheck(val, OtherNSElementList)
self._externals = val
@property
def filesize(self):
return self._filesize
@filesize.setter
def filesize(self, val):
self._filesize = _intcast(val)
@property
def gid(self):
return self._gid
@gid.setter
def gid(self, val):
self._gid = _strcast(val)
@property
def id(self):
return self._id
@id.setter
def id(self, val):
self._id = _intcast(val)
@property
def inode(self):
return self._inode
@inode.setter
def inode(self, val):
self._inode = _intcast(val)
@property
def libmagic(self):
return self._libmagic
@libmagic.setter
def libmagic(self, val):
self._libmagic = _strcast(val)
@property
def inode_brs(self):
"""The byte run(s) that represents the file's metadata object (the inode or the MFT entry). In file systems that do not distinguish between inode and directory entry, e.g. FAT, .inode_brs should be equivalent to .name_brs, if both fields are present."""
return self._inode_brs
@inode_brs.setter
def inode_brs(self, val):
if not val is None:
_typecheck(val, ByteRuns)
self._inode_brs = val
@property
def md5(self):
return self._md5
@md5.setter
def md5(self, val):
self._md5 = _strcast(val)
@property
def meta_type(self):
return self._meta_type
@meta_type.setter
def meta_type(self, val):
self._meta_type = _intcast(val)
@property
def mode(self):
"""The security mode is represented in the FileObject as a base-10 integer. It is also serialized as a decimal integer."""
return self._mode
@mode.setter
def mode(self, val):
self._mode = _intcast(val)
@property
def mtime(self):
return self._mtime
@mtime.setter
def mtime(self, val):
if val is None:
self._mtime = None
elif isinstance(val, TimestampObject):
self._mtime = val
else:
checked_val = TimestampObject(val, name="mtime")
self._mtime = checked_val
@property
def name_brs(self):
"""The byte run(s) that represents the file's name object (the directory entry). In file systems that do not distinguish between inode and directory entry, e.g. FAT, .inode_brs should be equivalent to .name_brs, if both fields are present."""
return self._name_brs
@name_brs.setter
def name_brs(self, val):
if not val is None:
_typecheck(val, ByteRuns)
self._name_brs = val
@property
def name_type(self):
return self._name_type
@name_type.setter
def name_type(self, val):
if val is None:
self._name_type = val
else:
cast_val = _strcast(val)
if cast_val not in ["-", "p", "c", "d", "b", "r", "l", "s", "h", "w", "v"]:
raise ValueError("Unexpected name_type received: %r (casted to %r)." % (val, cast_val))
self._name_type = cast_val
@property
def nlink(self):
return self._nlink
@nlink.setter
def nlink(self, val):
self._nlink = _intcast(val)
@property
def orphan(self):
return self._orphan
@orphan.setter
def orphan(self, val):
self._orphan = _boolcast(val)
# TL: Added orphan_name property getter
@property
def orphan_name(self):
return self._orphan_name
# TL: Added orphan_name property setter
@orphan_name.setter
def orphan_name(self, val):
self._orphan_name = _strcast(val)
@property
def original_fileobject(self):
return self._original_fileobject
@original_fileobject.setter
def original_fileobject(self, val):
if not val is None:
_typecheck(val, FileObject)
self._original_fileobject = val
@property
def partition(self):
return self._partition
@partition.setter
def partition(self, val):
self._partition = _intcast(val)
@property
def parent_object(self):
"""This object is an extremely sparse FileObject, containing just identifying information. Alternately, it can be an entire object reference to the parent Object, though uniqueness should be checked."""
return self._parent_object
@parent_object.setter
def parent_object(self, val):
if not val is None:
_typecheck(val, FileObject)
self._parent_object = val
@property
def seq(self):
return self._seq
@seq.setter
def seq(self, val):
self._seq = _intcast(val)
@property
def sha1(self):
return self._sha1
@sha1.setter
def sha1(self, val):
self._sha1 = _strcast(val)
@property
def uid(self):
return self._uid
@uid.setter
def uid(self, val):
self._uid = _strcast(val)
@property
def unalloc(self):
"""Note that setting .unalloc will affect the value of .alloc, and vice versa. The last one to set wins."""
return self._unalloc
@unalloc.setter
def unalloc(self, val):
self._unalloc = _boolcast(val)
if not self._unalloc is None:
self._alloc = not self._unalloc
@property
def unused(self):
return self._used
@unused.setter
def unused(self, val):
self._unused = _intcast(val)
if not self._unused is None:
self._used = not self._unused
@property
def used(self):
return self._used
@used.setter
def used(self, val):
self._used = _intcast(val)
if not self._used is None:
self._unused = not self._used
@property
def volume_object(self):
"""Reference to the containing volume object. Not meant to be propagated with __repr__ or to_Element()."""
return self._volume_object
@volume_object.setter
def volume_object(self, val):
if not val is None:
_typecheck(val, VolumeObject)
self._volume_object = val
class OtherNSElementList(list):
#Note that super() must be called with arguments to work in Python 2.
@classmethod
def _check_qname(cls, tagname):
(ns, ln) = _qsplit(tagname)
if ns == dfxml.XMLNS_DFXML:
raise ValueError("'External' elements must be a non-DFXML namespace.")
#Register qname for later output
#TODO Devise a module-level interface for namespace abreviations.
def __repr__(self):
#Unwrap the string representation of this class's type name (necessary because we don't necessarily know if it'll be Objects.Other... or just Other...).
_typestr = str(type(self))[ len("<class '") : -len("'>") ]
return _typestr + "(" + super(OtherNSElementList, self).__repr__() + ")"
def __setitem__(self, idx, value):
_typecheck(value, ET.Element)
OtherNSElementList._check_qname(value.tag)
super(OtherNSElementList, self).__setitem__(idx, value)
def append(self, value):
_typecheck(value, ET.Element)
OtherNSElementList._check_qname(value.tag)
super(OtherNSElementList, self).append(value)
class CellObject(object):
_all_properties = set([
"alloc",
"app_name", # TL: Added app_name property
"app_state", # TL: Added app_state property
"annos",
"basename",
"basename_norm", # TL: Added basename_norm property
"byte_runs",
"cellpath",
"cellpath_norm", # TL: Added cellpath_norm property
"data",
"data_conversions",
"data_encoding", # TL: Added data_encoding element
"data_raw", # TL: Added data_raw element
"data_type",
"error",
"mtime",
"name_type",
"original_cellobject",
"parent_object",
"root",
"rootkey" # TL: Added rootkey element
])
_diff_attr_names = {
"new":"delta:new_cell",
"deleted":"delta:deleted_cell",
"changed":"delta:changed_cell",
"modified":"delta:modified_cell",
"matched":"delta:matched",
"matched_soft":"delta:matched_soft" # TL: Added a soft match delta
}
#TODO There may be need in the future to compare the annotations as well.
_incomparable_properties = set([
"annos"
])
def __init__(self, *args, **kwargs):
#These properties must be assigned first for sanity check dependencies
self.name_type = kwargs.get("name_type")
for prop in CellObject._all_properties:
if prop == "annos":
setattr(self, prop, kwargs.get(prop, set()))
else:
setattr(self, prop, kwargs.get(prop))
self._diffs = set()
def __eq__(self, other):
if other is None:
return False
_typecheck(other, CellObject)
for prop in CellObject._all_properties:
if prop in CellObject._incomparable_properties:
continue
if getattr(self, prop) != getattr(other, prop):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
parts = []
for prop in sorted(list(CellObject._all_properties)):
if not getattr(self, prop) is None:
parts.append("%s=%r" % (prop, getattr(self, prop)))
return "CellObject(" + ", ".join(parts) + ")"
def compare_to_original(self):
self._diffs = self.compare_to_other(self.original_cellobject, True)
def compare_to_other(self, other, ignore_original=False, ignore_properties=set()):
_typecheck(other, CellObject)
diffs = set()
for propname in CellObject._all_properties:
if propname in CellObject._incomparable_properties:
continue
if ignore_original and propname == "original_cellobject":
continue
# TL: Added ignore_properties check
# Can pass a set() of properties to ignore
# e.g., {"cellpath", "basename"}
if propname in ignore_properties:
continue
oval = getattr(other, propname)
sval = getattr(self, propname)
if oval is None and sval is None:
continue
if oval != sval:
#_logger.debug("propname, oval, sval: %r, %r, %r" % (propname, oval, sval))
diffs.add(propname)
return diffs
def populate_from_Element(self, e):
"""Populates this CellObject's properties from an ElementTree Element. The Element need not be retained."""
global _warned_elements
_typecheck(e, (ET.Element, ET.ElementTree))
_read_differential_annotations(CellObject._diff_attr_names, e, self.annos)
#Split into namespace and tagname
(ns, tn) = _qsplit(e.tag)
assert tn in ["cellobject", "original_cellobject", "parent_object"]
if e.attrib.get("root"):
self.root = e.attrib["root"]
#Look through direct-child elements for other properties
for ce in e.findall("./*"):
(cns, ctn) = _qsplit(ce.tag)
if ctn == "alloc":
self.alloc = ce.text
elif ctn == "basename":
self.basename = ce.text
# TL: Added basename_norm to be populated
elif ctn == "basename_norm":
self.basename_norm = ce.text
elif ctn == "byte_runs":
self.byte_runs = ByteRuns()
self.byte_runs.populate_from_Element(ce)
elif ctn == "cellpath":
self.cellpath = ce.text
# TL: Added cellpath_norm to be populated
elif ctn == "cellpath_norm":
self.cellpath_norm = ce.text
elif ctn == "data":
self.data = ce.text
if ce.attrib.get("encoding"):
self.data_encoding = ce.attrib["encoding"]
# TL: Added data encoding set directly from XML tag
# This is different from above, where encoding in an XML attribute in
# this data tag
elif ctn == "data_encoding":
self.data_encoding = ce.text
# TL: Added raw data element to be populated
elif ctn == "data_raw":
self.data_raw = ce.text
elif ctn == "data_conversions":
self.data_conversions = dict()
for cce in ce:
if cce.tag == "int":
self.data_conversions["int"] = int()
elif cce.tag == "string":
self.data_conversions["string"] = cce.text
elif cce.tag == "string_list":
self.data_conversions["string_list"] = []
for ccce in cce:
self.data_conversions["string_list"].append(ccce.text)
elif ctn == "data_type":
self.data_type = ce.text
elif ctn == "error":
self.error = ce.text
elif ctn == "mtime":
self.mtime = TimestampObject()
self.mtime.populate_from_Element(ce)
elif ctn == "name_type":
self.name_type = ce.text
elif ctn == "original_cellobject":
self.original_cellobject = CellObject()
self.original_cellobject.populate_from_Element(ce)
elif ctn == "parent_object":
self.parent_object = CellObject()
self.parent_object.populate_from_Element(ce)
# TL: Added app_state to be populated
elif ctn == "app_state":
self.app_state = ce.text
# TL: Added app_name to be populated
elif ctn == "app_name":
self.app_name = ce.text
# TL: Added rootkey to be populated
elif ctn == "rootkey":
self.rootkey = ce.text
else:
if (cns, ctn) not in _warned_elements:
_warned_elements.add((cns, ctn))
_logger.warning("Uncertain what to do with this element: %r" % ce)
self.sanity_check()
def sanity_check(self):
if self.name_type and self.name_type != "k":
if self.mtime:
_logger.info("Error occurred sanity-checking this CellObject: %r." % self)
raise ValueError("A Registry Key (node) is the only kind of CellObject that can have a timestamp.")
if self.root:
_logger.info("Error occurred sanity-checking this CellObject: %r." % self)
raise ValueError("A Registry Key (node) is the only kind of CellObject that can have the 'root' attribute.")
def to_Element(self):
self.sanity_check()
outel = ET.Element("cellobject")
annos_whittle_set = copy.deepcopy(self.annos)
diffs_whittle_set = copy.deepcopy(self.diffs)
for annodiff in CellObject._diff_attr_names:
if annodiff in annos_whittle_set:
outel.attrib[CellObject._diff_attr_names[annodiff]] = "1"
annos_whittle_set.remove(annodiff)
if len(annos_whittle_set) > 0:
_logger.warning("Failed to export some differential annotations: %r." % annos_whittle_set)
def _anno_change(el):
if el.tag in self.diffs:
el.attrib["delta:changed_property"] = "1"
diffs_whittle_set.remove(el.tag)
#Do an additional check for data_encoding, which is serialized as an attribute.
if el.tag == "data" and "data_encoding" in self.diffs:
el.attrib["delta:changed_property"] = "1"
diffs_whittle_set.remove("data_encoding")
def _append_bool(name, value):
if not value is None or name in diffs_whittle_set:
tmpel = ET.Element(name)
if not value is None:
tmpel.text = "1" if value else "0"
_anno_change(tmpel)
outel.append(tmpel)
#Recall that Element text must be a string
def _append_str(name, value):
if not value is None or name in diffs_whittle_set:
tmpel = ET.Element(name)
if not value is None:
tmpel.text = str(value)
_anno_change(tmpel)
if name == "data" and not self.data_encoding is None:
tmpel.attrib["encoding"] = self.data_encoding
outel.append(tmpel)
def _append_object(name, value, namespace_prefix=None): # TL: Added prefix
if not value is None or name in diffs_whittle_set:
if value is None:
tmpel = ET.Element(name)
else:
tmpel = value.to_Element()
# TL: Added for prefix support
#Set the tag name here for properties like parent_object, a FileObject without being wholly a FileObject.
if namespace_prefix:
tmpel.tag = namespace_prefix + name
else:
tmpel.tag = name
_anno_change(tmpel)
outel.append(tmpel)
#TODO root should be an element too. Revise schema.
if self.root:
outel.attrib["root"] = str(self.root)
_append_str("cellpath", self.cellpath)
_append_str("cellpath_norm", self.cellpath_norm) # TL: Added cellpath_norm to XML out
_append_str("basename", self.basename)
_append_str("basename_norm", self.basename_norm) # TL: Added basename_norm to XML out
_append_str("error", self.error)
_append_str("name_type", self.name_type)
_append_bool("alloc", self.alloc)
_append_object("mtime", self.mtime)
_append_str("data_type", self.data_type)
_append_str("data", self.data)
_append_str("data_raw", self.data_raw) # TL: Added data_raw to XML out
_append_str("app_name", self.app_name) # TL: Added app_name to XML out
_append_str("app_state", self.app_state) # TL: Added app_state to XML out
_append_str("rootkey", self.rootkey) # TL: Added rootkey to XML out
#The experimental conversions element needs its own code
if not self.data_conversions is None or "data_conversions" in diffs_whittle_set:
tmpel = ET.Element("data_conversions")
if not self.data_conversions is None:
if "int" in self.data_conversions:
tmpcel = ET.Element("int")
tmpcel.text = str(self.data_conversions["int"])
tmpel.append(tmpcel)
if "string" in self.data_conversions:
tmpcel = ET.Element("string")
tmpcel.text = str(self.data_conversions["string"])
tmpel.append(tmpcel)
if "string_list" in self.data_conversions:
tmpcel = ET.Element("string_list")
for s in self.data_conversions["string"]:
tmpccel = ET.Element("string")
tmpccel.text = s
tmpcel.append(tmpccel)
tmpel.append(tmpcel)
_anno_change(tmpel)
outel.append(tmpel)
_append_object("byte_runs", self.byte_runs)
#_append_object("original_cellobject", self.original_cellobject)
# TL: Added delta to original cellobject for printing
_append_object("original_cellobject", self.original_cellobject, "delta:")
if len(diffs_whittle_set) > 0:
_logger.warning("Did not annotate all of the differing properties of this file. Remaining properties: %r." % diffs_whittle_set)
return outel
def to_regxml(self):
return _ET_tostring(self.to_Element())
@property
def alloc(self):
return self._alloc
@alloc.setter
def alloc(self, val):
self._alloc = _boolcast(val)
@property
def annos(self):
"""Set of differential annotations. Expected members are the keys of this class's _diff_attr_names dictionary."""
return self._annos
@annos.setter
def annos(self, val):
_typecheck(val, set)
self._annos = val
# TL: Added app_name property getter
@property
def app_name(self):
return self._app_name
# TL: Added app_name property setter
@app_name.setter
def app_name(self, val):
self._app_name = _strcast(val)
# TL: Added app_state property getter
@property
def app_state(self):
return self._app_state
# TL: Added app_state property setter
@app_state.setter
def app_state(self, val):
self._app_state = _strcast(val)
@property
def basename(self):
return self._basename
@basename.setter
def basename(self, val):
if not val is None:
_typecheck(val, str)
self._basename = val
# TL: Added basename_norm property getter
@property
def basename_norm(self):
return self._basename_norm
# TL: Added basename_norm property setter
@basename_norm.setter
def basename_norm(self, val):
self._basename_norm = _strcast(val)
@property
def byte_runs(self):
return self._byte_runs
@byte_runs.setter
def byte_runs(self, val):
if not val is None:
_typecheck(val, ByteRuns)
self._byte_runs = val
@property
def cellpath(self):
return self._cellpath
@cellpath.setter
def cellpath(self, val):
if not val is None:
_typecheck(val, str)
self._cellpath = val
# TL: Added cellpath_norm property getter
@property
def cellpath_norm(self):
return self._cellpath_norm
# TL: Added cellpath_norm property setter
@cellpath_norm.setter
def cellpath_norm(self, val):
self._cellpath_norm = _strcast(val)
@property
def data(self):
"""Expecting a base64-encoded string. See conversions (according to the Hive parser's library) in data_conversions property."""
return self._data
@data.setter
def data(self, val):
if not val is None:
_typecheck(val, str)
self._data = val
@property
def data_conversions(self):
return self._data_conversions
@data_conversions.setter
def data_conversions(self, val):
if not val is None:
_typecheck(val, dict)
self._data_conversions = val
@property
def data_encoding(self):
"""Expecting a string, typically 'base64'."""
return self._data_encoding
@data_encoding.setter
def data_encoding(self, val):
if not val is None:
_typecheck(val, str)
self._data_encoding = val
# TL: Added data_raw getter
@property
def data_raw(self):
return self._data_raw
# TL: Added data_raw setter
@data_raw.setter
def data_raw(self, val):
if not val is None:
_typecheck(val, str)
self._data_raw = val
@property
def data_type(self):
"""Expecting a string, e.g. "REG_MULTI_SZ", or an int, because value type is known to be overloaded as an integer storage field in some cells."""
return self._data_type
@data_type.setter
def data_type(self, val):
# TL: Added conversion of Registry (zimmerman) Registry value data
# naming conventions to fit Objects.py naming conventions
if val == "RegNone": val = "REG_NONE"
elif val == "RegSz": val = "REG_SZ"
elif val == "RegExpandSz": val = "REG_EXPAND_SZ"
elif val == "RegBinary": val = "REG_BINARY"
elif val == "RegDword": val = "REG_DWORD"
elif val == "RegDwordBigEndian": val = "REG_DWORD"
elif val == "RegLink": val = "REG_LINK"
elif val == "RegMultiSz": val = "REG_MULTI_SZ"
elif val == "RegResourceList": val = "REG_RESOURCE_LIST"
elif val == "RegFullResourceDescription": val = "REG_FULL_RESOURCE_DESCRIPTOR"
elif val == "RegResourceRequirementsList": val = "REG_RESOURCE_REQUIREMENTS_LIST"
elif val == "RegQword": val = "REG_QWORD"
# TL: Added RegFileTime, represent as BINARY
elif val == "RegFileTime": val = "REG_BINARY"
# TL: Added 14 + 12, represented as BINARY
elif val == "14": val = "REG_BINARY"
elif val == "12": val = "REG_BINARY"
# Not 100% sure about the Registry library type of RegUnknown
# Lets set it to no type, just to be safe
elif val == "RegUnknown": val = "REG_NONE"
# TL: Some recovered cells have incorrect data_type
# If the data_type is an integer, set it to binary
#else:
# val = "REG_BINARY"
if not val in [
None,
"REG_NONE",
"REG_SZ",
"REG_EXPAND_SZ",
"REG_BINARY",
"REG_DWORD",
"REG_DWORD_BIG_ENDIAN",
"REG_DWORD_LITTLE_ENDIAN",
"REG_QWORD_LITTLE_ENDIAN",
"REG_LINK",
"REG_MULTI_SZ",
"REG_RESOURCE_LIST",
"REG_FULL_RESOURCE_DESCRIPTOR",
"REG_RESOURCE_REQUIREMENTS_LIST",
"REG_QWORD"
]:
if not isinstance(val, int) or (isinstance(val, str) and val.isdigit()):
raise ValueError("Unexpected value data type received: %r, type %r." % (val, type(val)))
self._data_type = val
@property
def diffs(self):
return self._diffs
@diffs.setter
def diffs(self, value):
_typecheck(value, set)
self._diffs = value
@property
def error(self):
return self._error
@error.setter
def error(self, value):
if not value is None:
_typecheck(value, str)
self._error = value
@property
def hive_object(self):
"""Reference to the containing hive object. Not meant to be propagated with __repr__ or to_Element()."""
return self._hive_object
@hive_object.setter
def hive_object(self, val):
if not val is None:
_typecheck(val, HiveObject)
self._hive_object = val
@property
def mtime(self):
return self._mtime
@mtime.setter
def mtime(self, val):
if val is None:
self._mtime = None
elif isinstance(val, TimestampObject):
self._mtime = val
else:
self._mtime = TimestampObject(val, name="mtime")
self.sanity_check()
@property
def name_type(self):
return self._name_type
@name_type.setter
def name_type(self, val):
if not val is None:
assert val in ["k", "v"]
self._name_type = val
@property
def original_cellobject(self):
return self._original_cellobject
@original_cellobject.setter
def original_cellobject(self, val):
if not val is None:
_typecheck(val, CellObject)
self._original_cellobject = val
@property
def parent_object(self):
"""This object is an extremely sparse CellObject, containing just identifying information. Alternately, it can be an entire object reference to the parent Object, though uniqueness should be checked."""
return self._parent_object
@parent_object.setter
def parent_object(self, val):
if not val is None:
_typecheck(val, CellObject)
self._parent_object = val
@property
def root(self):
return self._root
@root.setter
def root(self, val):
self._root = _boolcast(val)
# TL: Added rootkey property getter
@property
def rootkey(self):
return self._rootkey
# TL: Added rootkey property setter
@rootkey.setter
def rootkey(self, val):
self._rootkey = _strcast(val)
def iterparse(filename, events=("start","end"), **kwargs):
"""
Generator. Yields a stream of populated DFXMLObjects, VolumeObjects and FileObjects, paired with an event type ("start" or "end"). The DFXMLObject and VolumeObjects do NOT have their child lists populated with this method - that is left to the calling program.
The event type interface is meant to match the interface of ElementTree's iterparse; this is simply for familiarity's sake. DFXMLObjects and VolumeObjects are yielded with "start" when the stream of VolumeObject or FileObjects begins - that is, they are yielded after being fully constructed up to the potentially-lengthy child object stream. FileObjects are yielded only with "end".
@param filename: A string
@param events: Events. Optional. A tuple of strings, containing "start" and/or "end".
@param dfxmlobject: A DFXMLObject document. Optional. A DFXMLObject is created and yielded in the object stream if this argument is not supplied.
@param fiwalk: Optional. Path to a particular fiwalk build you want to run.
"""
#The DFXML stream file handle.
fh = None
subp = None
import platform
if platform.system() == "Windows":
fiwalk_loc = "fiwalk" + os.sep + "fiwalk-4.2.0.exe"
fiwalk_path = kwargs.get(fiwalk_loc, fiwalk_loc)
else:
fiwalk_path = kwargs.get("fiwalk", "fiwalk")
#subp_command = [fiwalk_path, "-x", filename]
#subp_command = [fiwalk_path, "-z", "-M", "-x", filename]
subp_command = [fiwalk_path, "-z", "-g", "-b", "-x", filename]
if filename.endswith("xml"):
fh = open(filename, "rb")
else:
subp = subprocess.Popen(subp_command, stdout=subprocess.PIPE)
fh = subp.stdout
_events = set()
for e in events:
if not e in ("start","end"):
raise ValueError("Unexpected event type: %r. Expecting 'start', 'end'." % e)
_events.add(e)
dobj = kwargs.get("dfxmlobject", DFXMLObject())
#The only way to efficiently populate VolumeObjects is to populate the object when the stream has hit its first FileObject.
vobj = None
#It doesn't seem ElementTree allows fetching parents of Elements that are incomplete (just hit the "start" event). So, build a volume Element when we've hit "<volume ... >", glomming all elements until the first fileobject is hit.
#Likewise with the Element for the DFXMLObject.
dfxml_proxy = None
volume_proxy = None
#State machine, used to track when the first fileobject of a volume is encountered.
READING_START = 0
READING_PRESTREAM = 1 #DFXML metadata, pre-Object stream
READING_VOLUMES = 2
READING_FILES = 3
READING_POSTSTREAM = 4 #DFXML metadata, post-Object stream (typically the <rusage> element)
_state = READING_START
for (ETevent, elem) in ET.iterparse(fh, events=("start-ns", "start", "end")):
#View the object event stream in debug mode
#_logger.debug("(event, elem) = (%r, %r)" % (ETevent, elem))
#if ETevent in ("start", "end"):
# _logger.debug("_ET_tostring(elem) = %r" % _ET_tostring(elem))
#Track namespaces
if ETevent == "start-ns":
dobj.add_namespace(*elem)
ET.register_namespace(*elem)
continue
#Split tag name into namespace and local name
(ns, ln) = _qsplit(elem.tag)
if ETevent == "start":
if ln == "dfxml":
if _state != READING_START:
raise ValueError("Encountered a <dfxml> element, but the parser isn't in its start state. Recursive <dfxml> declarations aren't supported at this time.")
dfxml_proxy = ET.Element(elem.tag)
for k in elem.attrib:
#Note that xmlns declarations don't appear in elem.attrib.
dfxml_proxy.attrib[k] = elem.attrib[k]
_state = READING_PRESTREAM
elif ln == "volume":
if _state == READING_PRESTREAM:
#Cut; yield DFXMLObject now.
dobj.populate_from_Element(dfxml_proxy)
if "start" in _events:
yield ("start", dobj)
#Start populating a new Volume proxy.
volume_proxy = ET.Element(elem.tag)
for k in elem.attrib:
volume_proxy.attrib[k] = elem.attrib[k]
_state = READING_VOLUMES
elif ln == "fileobject":
if _state == READING_PRESTREAM:
#Cut; yield DFXMLObject now.
dobj.populate_from_Element(dfxml_proxy)
if "start" in _events:
yield ("start", dobj)
elif _state == READING_VOLUMES:
#_logger.debug("Encountered a fileobject while reading volume properties. Yielding volume now.")
#Cut; yield VolumeObject now.
if volume_proxy is not None:
vobj = VolumeObject()
vobj.populate_from_Element(volume_proxy)
if "start" in _events:
yield ("start", vobj)
#Reset
volume_proxy.clear()
volume_proxy = None
_state = READING_FILES
elif ETevent == "end":
if ln == "fileobject":
if _state in (READING_PRESTREAM, READING_POSTSTREAM):
#This particular branch can be reached if there are trailing fileobject elements after the volume element. This would happen if a tool needed to represent files (likely reassembled fragments) found outside all the partitions.
#More frequently, we hit this point when there are no volume groupings.
vobj = None
fi = FileObject()
fi.populate_from_Element(elem)
fi.volume_object = vobj
#_logger.debug("fi = %r" % fi)
if "end" in _events:
yield ("end", fi)
#Reset
elem.clear()
elif elem.tag == "dfxml":
if "end" in _events:
yield ("end", dobj)
elif elem.tag == "volume":
if "end" in _events:
yield ("end", vobj)
_state = READING_POSTSTREAM
elif _state == READING_VOLUMES:
#This is a volume property; glom onto the proxy.
if volume_proxy is not None:
volume_proxy.append(elem)
elif _state == READING_PRESTREAM:
if ln in ["metadata", "creator", "source"]:
#This is a direct child of the DFXML document property; glom onto the proxy.
if dfxml_proxy is not None:
dfxml_proxy.append(elem)
#If we called Fiwalk, double-check that it exited successfully.
if not subp is None:
_logger.debug("Calling wait() to let the Fiwalk subprocess terminate...") #Just reading from subp.stdout doesn't let the process terminate; it only finishes working.
subp.wait()
if subp.returncode != 0:
e = subprocess.CalledProcessError("There was an error running Fiwalk.")
e.returncode = subp.returncode
e.cmd = subp_command
raise e
_logger.debug("...Done.")
def iterparse_CellObjects(filename, events=("start","end"), **kwargs):
""" Iterparse implementation for RegXML stdout from CellXML. """
#The DFXML stream file handle.
fh = None
subp = None
import platform
import subprocess
if platform.system() == "Windows":
cellxml_loc = "CellXML-Registry-1.3.1" + os.sep + "CellXML-Registry-1.3.1.exe"
else:
print("Error. Cannot parse hives using CellXML on Linux")
return
# Perform a quick test to ensure hive file is parsable
# This uses the -c feature in CellXML-Registry
if not filename.endswith("xml"):
testcmd = [cellxml_loc, '-c', '-f', filename]
p = subprocess.Popen(testcmd,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
bufsize = -1)
output, error = p.communicate()
# If exit code of CellXML-Registry is not 0, exit.
# Probably should not silently exit (add error in future)
if p.returncode != 0:
return
#subp_command = [cellxml_loc, "-f", filename]
subp_command = [cellxml_loc, "-r", "-f", filename]
if filename.endswith("xml"):
fh = open(filename, "rb")
else:
subp = subprocess.Popen(subp_command, stdout=subprocess.PIPE)
fh = subp.stdout
#The RegXML stream file handle.
#fh = open(filename, "rb")
_events = set()
for e in events:
if not e in ("start","end"):
raise ValueError("Unexpected event type: %r. Expecting 'start', 'end'." % e)
_events.add(e)
robj = kwargs.get("regxmlobject", RegXMLObject())
hobj = kwargs.get("hiveobject", HiveObject())
cobj = kwargs.get("cellobject", CellObject())
#It doesn't seem ElementTree allows fetching parents of Elements that are incomplete (just hit the "start" event). So, build a volume Element when we've hit "<volume ... >", glomming all elements until the first fileobject is hit.
#Likewise with the Element for the DFXMLObject.
regxml_proxy = None
hive_proxy = None
msregistry_proxy = None
#State machine, used to track when the first fileobject of a volume is encountered.
READING_START = 0
READING_PRESTREAM = 1 #DFXML metadata, pre-Object stream
READING_VOLUMES = 2
READING_FILES = 3
READING_POSTSTREAM = 4 #DFXML metadata, post-Object stream (typically the <rusage> element)
_state = READING_START
for (ETevent, elem) in ET.iterparse(fh, events=("start-ns", "start", "end")):
#View the object event stream in debug mode
#_logger.debug("(event, elem) = (%r, %r)" % (ETevent, elem))
#if ETevent in ("start", "end"):
# _logger.debug("_ET_tostring(elem) = %r" % _ET_tostring(elem))
#Track namespaces
if ETevent == "start-ns":
robj.add_namespace(*elem)
ET.register_namespace(*elem)
continue
#Split tag name into namespace and local name
(ns, ln) = _qsplit(elem.tag)
#print(ns,ln)
if ETevent == "start":
if ln == "msregistry" or ln == "hive":
# if _state != READING_START:
# raise ValueError("Encountered a <msregistry> element, but the parser isn't in its start state. Recursive <msregistry> declarations aren't supported at this time.")
hive_proxy = ET.Element(elem.tag)
for k in elem.attrib:
hive_proxy.attrib[k] = elem.attrib[k]
_state = READING_PRESTREAM
elif ETevent == "end":
if ln == "cellobject":
if _state in (READING_PRESTREAM, READING_POSTSTREAM):
#This particular branch can be reached if there are trailing fileobject elements after the volume element. This would happen if a tool needed to represent files (likely reassembled fragments) found outside all the partitions.
#More frequently, we hit this point when there are no volume groupings.
vobj = None
co = CellObject()
co.populate_from_Element(elem)
#fi.volume_object = vobj
#_logger.debug("fi = %r" % fi)
if "end" in _events:
yield ("end", co)
#Reset
elem.clear()
elif elem.tag == "msregistry" or elem.tag == "hive":
if "end" in _events:
yield ("end", robj)
_state = READING_POSTSTREAM
elif _state == READING_PRESTREAM:
if ln in ["metadata", "creator", "source"]:
#This is a direct child of the DFXML document property; glom onto the proxy.
if regxml_proxy is not None:
regxml_proxy.append(elem)
def parse(filename):
"""Returns a DFXMLObject populated from the contents of the (string) filename argument."""
retval = None
appender = None
for (event, obj) in iterparse(filename):
if event == "start":
if isinstance(obj, DFXMLObject):
retval = obj
appender = obj
elif isinstance(obj, VolumeObject):
retval.append(obj)
appender = obj
elif event == "end":
if isinstance(obj, VolumeObject):
appender = retval
elif isinstance(obj, FileObject):
appender.append(obj)
return retval
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
logging.basicConfig(level=logging.DEBUG)
#Run unit tests
assert _intcast(-1) == -1
assert _intcast("-1") == -1
assert _qsplit("{http://www.w3.org/2001/XMLSchema}all") == ("http://www.w3.org/2001/XMLSchema","all")
assert _qsplit("http://www.w3.org/2001/XMLSchema}all") == (None, "http://www.w3.org/2001/XMLSchema}all")
fi = FileObject()
#Check property setting
fi.mtime = "1999-12-31T23:59:59Z"
_logger.debug("fi = %r" % fi)
#Check bad property setting
failed = None
try:
fi.mtime = "Not a timestamp"
failed = False
except:
failed = True
_logger.debug("fi = %r" % fi)
_logger.debug("failed = %r" % failed)
assert failed
t0 = TimestampObject(prec="100ns", name="mtime")
_logger.debug("t0 = %r" % t0)
assert t0.prec[0] == 100
assert t0.prec[1] == "ns"
t1 = TimestampObject("2009-01-23T01:23:45Z", prec="2", name="atime")
_logger.debug("t1 = %r" % t1)
assert t1.prec[0] == 2
assert t1.prec[1] == "s"
print("Unit tests passed.")
| thomaslaurenson/Vestigium | dfxml/Objects.py | Python | gpl-2.0 | 127,176 | 0.005198 |
from __future__ import unicode_literals
from django.conf.urls import include, url
from django.test import TestCase
from django.utils import six
from rest_framework import generics, routers, serializers, status, viewsets
from rest_framework.renderers import (
BaseRenderer, BrowsableAPIRenderer, JSONRenderer
)
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_framework.views import APIView
from tests.models import BasicModel
# Serializer used to test BasicModel
class BasicModelSerializer(serializers.ModelSerializer):
class Meta:
model = BasicModel
class MockPickleRenderer(BaseRenderer):
media_type = 'application/pickle'
class MockJsonRenderer(BaseRenderer):
media_type = 'application/json'
class MockTextMediaRenderer(BaseRenderer):
media_type = 'text/html'
DUMMYSTATUS = status.HTTP_200_OK
DUMMYCONTENT = 'dummycontent'
def RENDERER_A_SERIALIZER(x):
return ('Renderer A: %s' % x).encode('ascii')
def RENDERER_B_SERIALIZER(x):
return ('Renderer B: %s' % x).encode('ascii')
class RendererA(BaseRenderer):
media_type = 'mock/renderera'
format = "formata"
def render(self, data, media_type=None, renderer_context=None):
return RENDERER_A_SERIALIZER(data)
class RendererB(BaseRenderer):
media_type = 'mock/rendererb'
format = "formatb"
def render(self, data, media_type=None, renderer_context=None):
return RENDERER_B_SERIALIZER(data)
class RendererC(RendererB):
media_type = 'mock/rendererc'
format = 'formatc'
charset = "rendererc"
class MockView(APIView):
renderer_classes = (RendererA, RendererB, RendererC)
def get(self, request, **kwargs):
return Response(DUMMYCONTENT, status=DUMMYSTATUS)
class MockViewSettingContentType(APIView):
renderer_classes = (RendererA, RendererB, RendererC)
def get(self, request, **kwargs):
return Response(DUMMYCONTENT, status=DUMMYSTATUS, content_type='setbyview')
class HTMLView(APIView):
renderer_classes = (BrowsableAPIRenderer, )
def get(self, request, **kwargs):
return Response('text')
class HTMLView1(APIView):
renderer_classes = (BrowsableAPIRenderer, JSONRenderer)
def get(self, request, **kwargs):
return Response('text')
class HTMLNewModelViewSet(viewsets.ModelViewSet):
serializer_class = BasicModelSerializer
queryset = BasicModel.objects.all()
class HTMLNewModelView(generics.ListCreateAPIView):
renderer_classes = (BrowsableAPIRenderer,)
permission_classes = []
serializer_class = BasicModelSerializer
queryset = BasicModel.objects.all()
new_model_viewset_router = routers.DefaultRouter()
new_model_viewset_router.register(r'', HTMLNewModelViewSet)
urlpatterns = [
url(r'^setbyview$', MockViewSettingContentType.as_view(renderer_classes=[RendererA, RendererB, RendererC])),
url(r'^.*\.(?P<format>.+)$', MockView.as_view(renderer_classes=[RendererA, RendererB, RendererC])),
url(r'^$', MockView.as_view(renderer_classes=[RendererA, RendererB, RendererC])),
url(r'^html$', HTMLView.as_view()),
url(r'^html1$', HTMLView1.as_view()),
url(r'^html_new_model$', HTMLNewModelView.as_view()),
url(r'^html_new_model_viewset', include(new_model_viewset_router.urls)),
url(r'^restframework', include('rest_framework.urls', namespace='rest_framework'))
]
# TODO: Clean tests bellow - remove duplicates with above, better unit testing, ...
class RendererIntegrationTests(TestCase):
"""
End-to-end testing of renderers using an ResponseMixin on a generic view.
"""
urls = 'tests.test_response'
def test_default_renderer_serializes_content(self):
"""If the Accept header is not set the default renderer should serialize the response."""
resp = self.client.get('/')
self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_head_method_serializes_no_content(self):
"""No response must be included in HEAD requests."""
resp = self.client.head('/')
self.assertEqual(resp.status_code, DUMMYSTATUS)
self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8')
self.assertEqual(resp.content, six.b(''))
def test_default_renderer_serializes_content_on_accept_any(self):
"""If the Accept header is set to */* the default renderer should serialize the response."""
resp = self.client.get('/', HTTP_ACCEPT='*/*')
self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_default_case(self):
"""If the Accept header is set the specified renderer should serialize the response.
(In this case we check that works for the default renderer)"""
resp = self.client.get('/', HTTP_ACCEPT=RendererA.media_type)
self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_non_default_case(self):
"""If the Accept header is set the specified renderer should serialize the response.
(In this case we check that works for a non-default renderer)"""
resp = self.client.get('/', HTTP_ACCEPT=RendererB.media_type)
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_on_accept_query(self):
"""The '_accept' query string should behave in the same way as the Accept header."""
param = '?%s=%s' % (
api_settings.URL_ACCEPT_OVERRIDE,
RendererB.media_type
)
resp = self.client.get('/' + param)
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_on_format_query(self):
"""If a 'format' query is specified, the renderer with the matching
format attribute should serialize the response."""
resp = self.client.get('/?format=%s' % RendererB.format)
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_on_format_kwargs(self):
"""If a 'format' keyword arg is specified, the renderer with the matching
format attribute should serialize the response."""
resp = self.client.get('/something.formatb')
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_is_used_on_format_query_with_matching_accept(self):
"""If both a 'format' query and a matching Accept header specified,
the renderer with the matching format attribute should serialize the response."""
resp = self.client.get('/?format=%s' % RendererB.format,
HTTP_ACCEPT=RendererB.media_type)
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
class Issue122Tests(TestCase):
"""
Tests that covers #122.
"""
urls = 'tests.test_response'
def test_only_html_renderer(self):
"""
Test if no infinite recursion occurs.
"""
self.client.get('/html')
def test_html_renderer_is_first(self):
"""
Test if no infinite recursion occurs.
"""
self.client.get('/html1')
class Issue467Tests(TestCase):
"""
Tests for #467
"""
urls = 'tests.test_response'
def test_form_has_label_and_help_text(self):
resp = self.client.get('/html_new_model')
self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8')
# self.assertContains(resp, 'Text comes here')
# self.assertContains(resp, 'Text description.')
class Issue807Tests(TestCase):
"""
Covers #807
"""
urls = 'tests.test_response'
def test_does_not_append_charset_by_default(self):
"""
Renderers don't include a charset unless set explicitly.
"""
headers = {"HTTP_ACCEPT": RendererA.media_type}
resp = self.client.get('/', **headers)
expected = "{0}; charset={1}".format(RendererA.media_type, 'utf-8')
self.assertEqual(expected, resp['Content-Type'])
def test_if_there_is_charset_specified_on_renderer_it_gets_appended(self):
"""
If renderer class has charset attribute declared, it gets appended
to Response's Content-Type
"""
headers = {"HTTP_ACCEPT": RendererC.media_type}
resp = self.client.get('/', **headers)
expected = "{0}; charset={1}".format(RendererC.media_type, RendererC.charset)
self.assertEqual(expected, resp['Content-Type'])
def test_content_type_set_explicitly_on_response(self):
"""
The content type may be set explicitly on the response.
"""
headers = {"HTTP_ACCEPT": RendererC.media_type}
resp = self.client.get('/setbyview', **headers)
self.assertEqual('setbyview', resp['Content-Type'])
def test_viewset_label_help_text(self):
param = '?%s=%s' % (
api_settings.URL_ACCEPT_OVERRIDE,
'text/html'
)
resp = self.client.get('/html_new_model_viewset/' + param)
self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8')
# self.assertContains(resp, 'Text comes here')
# self.assertContains(resp, 'Text description.')
def test_form_has_label_and_help_text(self):
resp = self.client.get('/html_new_model')
self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8')
# self.assertContains(resp, 'Text comes here')
# self.assertContains(resp, 'Text description.')
| rubendura/django-rest-framework | tests/test_response.py | Python | bsd-2-clause | 10,811 | 0.002312 |
# Copyright ClusterHQ Inc. See LICENSE file for details.
"""
Tests for the Volumes Plugin API provided by the plugin.
"""
from uuid import uuid4, UUID
from twisted.web.http import OK
from twisted.internet import reactor
from .._api import VolumePlugin, DEFAULT_SIZE
from ...apiclient import FakeFlockerClient, Dataset
from ...control._config import dataset_id_from_name
from ...restapi.testtools import buildIntegrationTests, APIAssertionsMixin
class APITestsMixin(APIAssertionsMixin):
"""
Helpers for writing tests for the Docker Volume Plugin API.
"""
NODE_A = uuid4()
NODE_B = uuid4()
def initialize(self):
"""
Create initial objects for the ``VolumePlugin``.
"""
self.flocker_client = FakeFlockerClient()
def test_pluginactivate(self):
"""
``/Plugins.Activate`` indicates the plugin is a volume driver.
"""
# Docker 1.8, at least, sends "null" as the body. Our test
# infrastructure has the opposite bug so just going to send some
# other garbage as the body (12345) to demonstrate that it's
# ignored as per the spec which declares no body.
return self.assertResult(b"POST", b"/Plugin.Activate", 12345, OK,
{u"Implements": [u"VolumeDriver"]})
def test_remove(self):
"""
``/VolumeDriver.Remove`` returns a successful result.
"""
return self.assertResult(b"POST", b"/VolumeDriver.Remove",
{u"Name": u"vol"}, OK, {u"Err": None})
def test_unmount(self):
"""
``/VolumeDriver.Unmount`` returns a successful result.
"""
return self.assertResult(b"POST", b"/VolumeDriver.Unmount",
{u"Name": u"vol"}, OK, {u"Err": None})
def create(self, name):
"""
Call the ``/VolumeDriver.Create`` API to create a volume with the
given name.
:param unicode name: The name of the volume to create.
:return: ``Deferred`` that fires when the volume that was created.
"""
return self.assertResult(b"POST", b"/VolumeDriver.Create",
{u"Name": name}, OK, {u"Err": None})
def test_create_creates(self):
"""
``/VolumeDriver.Create`` creates a new dataset in the configuration.
"""
name = u"myvol"
d = self.create(name)
d.addCallback(
lambda _: self.flocker_client.list_datasets_configuration())
d.addCallback(self.assertItemsEqual, [
Dataset(dataset_id=UUID(dataset_id_from_name(name)),
primary=self.NODE_A,
maximum_size=DEFAULT_SIZE,
metadata={u"name": name})])
return d
def test_create_duplicate_name(self):
"""
If a dataset with the given name already exists,
``/VolumeDriver.Create`` succeeds without create a new volume.
"""
name = u"thename"
# Create a dataset out-of-band with matching name but non-matching
# dataset ID:
d = self.flocker_client.create_dataset(
self.NODE_A, DEFAULT_SIZE, metadata={u"name": name})
d.addCallback(lambda _: self.create(name))
d.addCallback(
lambda _: self.flocker_client.list_datasets_configuration())
d.addCallback(lambda results: self.assertEqual(len(results), 1))
return d
def test_create_duplicate_name_race_condition(self):
"""
If a dataset with the given name is created while the
``/VolumeDriver.Create`` call is in flight, the call does not
result in an error.
"""
name = u"thename"
# Create a dataset out-of-band with matching dataset ID and name
# which the docker plugin won't be able to see.
def create_after_list():
# Clean up the patched version:
del self.flocker_client.list_datasets_configuration
# But first time we're called, we create dataset and lie about
# its existence:
d = self.flocker_client.create_dataset(
self.NODE_A, DEFAULT_SIZE,
metadata={u"name": name},
dataset_id=UUID(dataset_id_from_name(name)))
d.addCallback(lambda _: [])
return d
self.flocker_client.list_datasets_configuration = create_after_list
return self.create(name)
def test_mount(self):
"""
``/VolumeDriver.Mount`` sets the primary of the dataset with matching
name to the current node and then waits for the dataset to
actually arrive.
"""
name = u"myvol"
dataset_id = UUID(dataset_id_from_name(name))
# Create dataset on a different node:
d = self.flocker_client.create_dataset(
self.NODE_B, DEFAULT_SIZE, metadata={u"name": name},
dataset_id=dataset_id)
# After two polling intervals the dataset arrives as state:
reactor.callLater(VolumePlugin._POLL_INTERVAL,
self.flocker_client.synchronize_state)
d.addCallback(lambda _:
self.assertResult(
b"POST", b"/VolumeDriver.Mount",
{u"Name": name}, OK,
{u"Err": None,
u"Mountpoint": u"/flocker/{}".format(dataset_id)}))
d.addCallback(lambda _: self.flocker_client.list_datasets_state())
d.addCallback(lambda ds: self.assertEqual(
[self.NODE_A], [d.primary for d in ds
if d.dataset_id == dataset_id]))
return d
def test_path(self):
"""
``/VolumeDriver.Path`` returns the mount path of the given volume.
"""
name = u"myvol"
dataset_id = UUID(dataset_id_from_name(name))
d = self.create(name)
# After a polling interval the dataset arrives as state:
reactor.callLater(VolumePlugin._POLL_INTERVAL,
self.flocker_client.synchronize_state)
d.addCallback(lambda _: self.assertResponseCode(
b"POST", b"/VolumeDriver.Mount", {u"Name": name}, OK))
d.addCallback(lambda _:
self.assertResult(
b"POST", b"/VolumeDriver.Path",
{u"Name": name}, OK,
{u"Err": None,
u"Mountpoint": u"/flocker/{}".format(dataset_id)}))
return d
def _build_app(test):
test.initialize()
return VolumePlugin(reactor, test.flocker_client, test.NODE_A).app
RealTestsAPI, MemoryTestsAPI = buildIntegrationTests(
APITestsMixin, "API", _build_app)
| adamtheturtle/flocker | flocker/dockerplugin/test/test_api.py | Python | apache-2.0 | 6,785 | 0.000147 |
# -*- coding:utf-8 -*-
from attribute import attribute
from ci_type import citype
from ci_type_relation import cityperelation
from ci_relation import cirelation
from ci import ci
from history import history
from account import account
from special import special
from dns_record import dnsrecord
| kdyq007/cmdb-api | core/__init__.py | Python | gpl-2.0 | 299 | 0.003344 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.gcp.sensors.bigquery`."""
import warnings
# pylint: disable=unused-import
from airflow.gcp.sensors.bigquery import BigQueryTableSensor # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.gcp.sensors.bigquery`.",
DeprecationWarning, stacklevel=2
)
| Fokko/incubator-airflow | airflow/contrib/sensors/bigquery_sensor.py | Python | apache-2.0 | 1,138 | 0 |
#!/usr/bin/env python
from setuptools import setup, find_packages
import versioneer
setup(name='conwhat', #version=versioneer.get_version(),
description='python library for connectome-based white matter atlas analyses in neuroimaging',
long_description='python library for connectome-based white matter atlas analyses in neuroimaging',
keywords='white matter, tractography, MRI, DTI, diffusion, python',
author='John David Griffiths',
author_email='j.davidgriffiths@gmail.com',
url='https://github.com/JohnGriffiths/conwhat',
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=['numpy', 'setuptools'],
classifiers=[
'Intended Audience :: Science/Research',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
],
entry_points={
"console_scripts": [
"conwhat=conwhat.__main__:main",
]
},
#cmdclass=versioneer.get_cmdclass()
)
| JohnGriffiths/ConWhAt | setup.py | Python | bsd-3-clause | 1,353 | 0.005913 |
"""Utility functions for Certbot plugin tests."""
import argparse
import copy
import os
import re
import shutil
import tarfile
import josepy as jose
from certbot._internal import constants
from certbot.tests import util as test_util
from certbot_compatibility_test import errors
_KEY_BASE = "rsa2048_key.pem"
KEY_PATH = test_util.vector_path(_KEY_BASE)
KEY = test_util.load_pyopenssl_private_key(_KEY_BASE)
JWK = jose.JWKRSA(key=test_util.load_rsa_private_key(_KEY_BASE))
IP_REGEX = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
def create_le_config(parent_dir):
"""Sets up LE dirs in parent_dir and returns the config dict"""
config = copy.deepcopy(constants.CLI_DEFAULTS)
le_dir = os.path.join(parent_dir, "certbot")
os.mkdir(le_dir)
for dir_name in ("config", "logs", "work"):
full_path = os.path.join(le_dir, dir_name)
os.mkdir(full_path)
full_name = dir_name + "_dir"
config[full_name] = full_path
config["domains"] = None
return argparse.Namespace(**config)
def extract_configs(configs, parent_dir):
"""Extracts configs to a new dir under parent_dir and returns it"""
config_dir = os.path.join(parent_dir, "configs")
if os.path.isdir(configs):
shutil.copytree(configs, config_dir, symlinks=True)
elif tarfile.is_tarfile(configs):
with tarfile.open(configs, "r") as tar:
tar.extractall(config_dir)
else:
raise errors.Error("Unknown configurations file type")
return config_dir
| stweil/letsencrypt | certbot-compatibility-test/certbot_compatibility_test/util.py | Python | apache-2.0 | 1,520 | 0 |
# Copyright 2004 by Bob Bussell. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""NOEtools: For predicting NOE coordinates from assignment data.
The input and output are modelled on nmrview peaklists.
This modules is suitable for directly generating an nmrview
peaklist with predicted crosspeaks directly from the
input assignment peaklist.
"""
from . import xpktools
def predictNOE(peaklist, originNuc, detectedNuc, originResNum, toResNum):
"""Predict the i->j NOE position based on self peak (diagonal) assignments
Parameters
----------
peaklist : xprtools.Peaklist
List of peaks from which to derive predictions
originNuc : str
Name of originating nucleus.
originResNum : int
Index of originating residue.
detectedNuc : str
Name of detected nucleus.
toResNum : int
Index of detected residue.
Returns
-------
returnLine : str
The .xpk file entry for the predicted crosspeak.
Examples
--------
Using predictNOE(peaklist,"N15","H1",10,12)
where peaklist is of the type xpktools.peaklist
would generate a .xpk file entry for a crosspeak
that originated on N15 of residue 10 and ended up
as magnetization detected on the H1 nucleus of
residue 12
Notes
=====
The initial peaklist is assumed to be diagonal (self peaks only)
and currently there is no checking done to insure that this
assumption holds true. Check your peaklist for errors and
off diagonal peaks before attempting to use predictNOE.
"""
returnLine = "" # The modified line to be returned to the caller
datamap = _data_map(peaklist.datalabels)
# Construct labels for keying into dictionary
originAssCol = datamap[originNuc + ".L"] + 1
originPPMCol = datamap[originNuc + ".P"] + 1
detectedPPMCol = datamap[detectedNuc + ".P"] + 1
# Make a list of the data lines involving the detected
if str(toResNum) in peaklist.residue_dict(detectedNuc) \
and str(originResNum) in peaklist.residue_dict(detectedNuc):
detectedList = peaklist.residue_dict(detectedNuc)[str(toResNum)]
originList = peaklist.residue_dict(detectedNuc)[str(originResNum)]
returnLine = detectedList[0]
for line in detectedList:
aveDetectedPPM = _col_ave(detectedList, detectedPPMCol)
aveOriginPPM = _col_ave(originList, originPPMCol)
originAss = originList[0].split()[originAssCol]
returnLine = xpktools.replace_entry(returnLine, originAssCol + 1, originAss)
returnLine = xpktools.replace_entry(returnLine, originPPMCol + 1, aveOriginPPM)
return returnLine
def _data_map(labelline):
# Generate a map between datalabels and column number
# based on a labelline
i = 0 # A counter
datamap = {} # The data map dictionary
labelList = labelline.split() # Get the label line
# Get the column number for each label
for i in range(len(labelList)):
datamap[labelList[i]] = i
return datamap
def _col_ave(list, col):
# Compute average values from a particular column in a string list
total = 0.0
n = 0
for element in list:
total += float(element.split()[col])
n += 1
return total / n
| zjuchenyuan/BioWeb | Lib/Bio/NMR/NOEtools.py | Python | mit | 3,420 | 0.000877 |
"""
Generic framework path manipulation
"""
import re
__all__ = ["framework_info"]
_STRICT_FRAMEWORK_RE = re.compile(
r"""(?x)
(?P<location>^.*)(?:^|/)
(?P<name>
(?P<shortname>[-_A-Za-z0-9]+).framework/
(?:Versions/(?P<version>[^/]+)/)?
(?P=shortname)
(?:_(?P<suffix>[^_]+))?
)$
"""
)
def framework_info(filename):
"""
A framework name can take one of the following four forms:
Location/Name.framework/Versions/SomeVersion/Name_Suffix
Location/Name.framework/Versions/SomeVersion/Name
Location/Name.framework/Name_Suffix
Location/Name.framework/Name
returns None if not found, or a mapping equivalent to:
dict(
location='Location',
name='Name.framework/Versions/SomeVersion/Name_Suffix',
shortname='Name',
version='SomeVersion',
suffix='Suffix',
)
Note that SomeVersion and Suffix are optional and may be None
if not present
"""
is_framework = _STRICT_FRAMEWORK_RE.match(filename)
if not is_framework:
return None
return is_framework.groupdict()
| etherkit/OpenBeacon2 | macos/venv/lib/python3.8/site-packages/macholib/framework.py | Python | gpl-3.0 | 1,125 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.