text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import json
import zipfile
from io import BytesIO
from ..constants import BULK_API
from ..api.base import BestBuyCore
from ..utils.exceptions import BestBuyBulkAPIError
class BestBuyBulkAPI(BestBuyCore):
def _api_name(self):
return BULK_API
def archive(self, name, file_format):
"""BestBuy generates Bulk files (archives) daily at 9:00 AM CST.
:params:
:name (str): Archive type. The type supported by BestBuy's API are:
- products
- stores
- reviews
- categories
- storeAvailability
:file_format (str): File format in which the archive is to be downloaded.
- {xml or json}: Products, Reviews, Stores, and Categories
- {tsv} for Store Availability
:returns: Unzipped files from Best Buy's API response
:rType: dict
BestBuy bulk docs:
- https://developer.bestbuy.com/documentation/bulkDownload-api
"""
payload = {"query": f"{name}.{file_format}.zip", "params": {}}
response = self._call(payload)
return self._load_zipped_response(response, file_format)
def archive_subset(self, subset, file_format):
"""Bulk files (archives) are generated every day at 9 AM by BestBuy.
:params:
:name (str): Archive type. The archive type supported are:
- productsActive
- productsInactive (Currently empty or deprecated)
- productsMusic (Currently empty or deprecated)
- productsMovie (Currently empty or deprecated)
- productsHardgood (Currently empty or deprecated)
- productsBundle (Currently empty or deprecated)
- productsGame (Currently empty or deprecated)
- productsSoftware (Currently empty or deprecated)
- productsBlackTie (Currently empty or deprecated)
- productsMarketplace (Currently empty or deprecated)
- productsDigital (Currently empty or deprecated)
:file_format (str): File format in which the archive is to be downloaded.
- xml
- json
BestBuy product subsets bulk docs:
- https://developer.bestbuy.com/documentation/bulkDownload-api
"""
payload = {"query": f"subsets/{subset}.{file_format}.zip", "params": {}}
response = self._call(payload)
return self._load_zipped_response(response, file_format)
def _load_zipped_response(self, zipped_response, file_format):
if zipfile.is_zipfile(BytesIO(zipped_response)):
with zipfile.ZipFile(BytesIO(zipped_response), "r") as z:
out = {}
for filename in z.namelist():
with z.open(filename) as f:
data = f.read()
if file_format == "json":
out[filename] = json.loads(data)
else:
out[filename] = data
return out
|
lv10/bestbuyapi
|
bestbuyapi/api/bulk.py
|
Python
|
mit
| 3,123 | 0.000961 |
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._inverse_direction = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._inverse_direction
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
|
thopiekar/Uranium
|
UM/Math/Ray.py
|
Python
|
lgpl-3.0
| 767 | 0.006519 |
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.exceptions import ObjectDoesNotExist
from django.views.decorators.cache import never_cache
from django.http import HttpResponse, HttpResponseRedirect
from session_csrf import anonymous_csrf
from ..models import ZLB, ZLBVirtualServer, ZLBVirtualServerRule, ZLBVirtualServerProtection
from ..models import ZLBRule, ZLBProtection, Offender, ZLBVirtualServerPref
from ..forms import ZLBForm, VirtualServerConfirm
from BanHammer.blacklist.management import zeus
import BanHammer.blacklist.tasks as tasks
from BanHammer import settings
@anonymous_csrf
@never_cache
def index(request, zlb=None, action=None):
request.session['order_by'] = request.GET.get('order_by', 'hostname')
request.session['order'] = request.GET.get('order', 'asc')
order_by = request.session.get('order_by', 'address')
order = request.session.get('order', 'asc')
zlbs = ZLB.objects.all()
if order_by == 'created_date':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.created_date)
elif order_by == 'updated_date':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.updated_date)
elif order_by == 'name':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.name)
elif order_by == 'hostname':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.hostname)
elif order_by == 'datacenter':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.datacenter)
if order == 'desc':
zlbs.reverse()
data = {'zlbs': zlbs}
if action == 'update':
data['zlb'] = zlb
data['action'] = 'update'
data['testing_env'] = settings.TESTING_ENV
return render_to_response(
'zlb/index.html',
data,
context_instance = RequestContext(request)
)
@anonymous_csrf
def new(request):
if request.method == 'POST':
form = ZLBForm(request.POST)
if form.is_valid():
name = form.cleaned_data['name']
hostname = form.cleaned_data['hostname']
datacenter = form.cleaned_data['datacenter']
doc_url = form.cleaned_data['doc_url']
login = form.cleaned_data['login']
password = form.cleaned_data['password']
comment = form.cleaned_data['comment']
zlb = ZLB(
name=name,
hostname=hostname,
datacenter=datacenter,
doc_url=doc_url,
login=login,
password=password,
comment=comment,
)
zlb.save()
return HttpResponseRedirect('/zlbs')
else:
form = ZLBForm()
return render_to_response(
'zlb/new.html',
{'form': form},
context_instance = RequestContext(request)
)
@anonymous_csrf
def edit(request, id):
if request.method == 'POST':
form = ZLBForm(request.POST)
if form.is_valid():
zlb = ZLB.objects.get(id=id)
zlb.name = form.cleaned_data['name']
zlb.hostname = form.cleaned_data['hostname']
zlb.datacenter = form.cleaned_data['datacenter']
zlb.doc_url = form.cleaned_data['doc_url']
zlb.comment = form.cleaned_data['comment']
zlb.login = form.cleaned_data['login']
if form.cleaned_data['password']:
zlb.password = form.cleaned_data['password']
zlb.save()
return HttpResponseRedirect('/zlbs')
else:
initial = ZLB.objects.get(id=id)
initial = initial.__dict__
id = initial['id']
initial['password'] = ''
form = ZLBForm(initial)
return render_to_response(
'zlb/edit.html',
{'form': form, 'id': id},
context_instance = RequestContext(request)
)
@anonymous_csrf
def delete(request, id):
zlb = ZLB.objects.get(id=id)
zlb.delete()
return HttpResponseRedirect('/zlbs')
@anonymous_csrf
@never_cache
def show(request, id):
zlb = ZLB.objects.get(id=id)
if zlb.updating:
return render_to_response(
'zlb/updating.html',
{'zlb': zlb,},
context_instance = RequestContext(request)
)
vs = ZLBVirtualServer.objects.filter(zlb_id=zlb.id)
prefs_o = ZLBVirtualServerPref.objects.filter(zlb=zlb)
prefs = {}
for p in prefs_o:
prefs[p.vs_name] = p
pr = {}
rul = {}
return render_to_response(
'zlb/show.html',
{'zlb': zlb,
'prefs': prefs,
'vs': vs,
'testing_env': settings.TESTING_ENV,},
context_instance = RequestContext(request)
)
@anonymous_csrf
@never_cache
def update(request, id):
tasks.update_zlb.delay(id)
zlb = ZLB.objects.get(id=id)
return HttpResponseRedirect('/zlbs')
def _parse_addr(addresses):
addr_list = addresses.split(', ')
addresses = []
for addr in addr_list:
network = addr.split('/')
addr = network[0]
if len(network) == 2:
cidr = network[1]
else:
cidr = None
if cidr:
offender = Offender.objects.filter(address=addr, cidr=cidr)
else:
offender = Offender.objects.filter(address=addr)
if offender.count() != 0:
addresses.append(offender[0])
else:
addresses.append(addr)
return addresses
@anonymous_csrf
def index_protection(request, zlb_id):
zlb = ZLB.objects.get(id=zlb_id)
protections = ZLBProtection.objects.filter(zlb_id=zlb_id)
for p in protections:
p.allowed_addresses = _parse_addr(p.allowed_addresses)
p.banned_addresses = _parse_addr(p.banned_addresses)
p.virtual_servers = ZLBVirtualServerProtection.objects.filter(zlb_id=zlb_id, protection_id=p.id)
return render_to_response(
'zlb/protections.html',
{'zlb': zlb,
'protections': protections,},
context_instance = RequestContext(request)
)
@anonymous_csrf
def index_rules(request, zlb_id):
zlb = ZLB.objects.get(id=zlb_id)
rules = ZLBRule.objects.filter(zlb_id=zlb_id)
for rule in rules:
rule.virtual_servers = ZLBVirtualServerRule.objects.filter(zlb_id=zlb_id, rule_id=rule.id)
return render_to_response(
'zlb/rules.html',
{'zlb': zlb,
'rules': rules,},
context_instance = RequestContext(request)
)
@never_cache
@anonymous_csrf
def virtual_server(request, zlb_id, vs_id):
if request.method == 'POST':
form = VirtualServerConfirm(request.POST)
if form.is_valid():
confirm = form.cleaned_data['confirm']
vs = ZLBVirtualServer.objects.get(id=vs_id)
pref = ZLBVirtualServerPref.objects.filter(zlb_id=zlb_id,vs_name=vs.name)
if pref.count() == 0:
p = ZLBVirtualServerPref(
zlb_id=zlb_id,
vs_name=vs.name,
favorite=False,
confirm=confirm,
)
p.save()
else:
pref = pref[0]
pref.confirm = confirm
pref.save()
return HttpResponseRedirect('/zlb/%s/virtual_server/%s' % (zlb_id, vs_id))
else:
form = VirtualServerConfirm()
zlb = ZLB.objects.get(id=zlb_id)
virtual_server = ZLBVirtualServer.objects.get(id=vs_id)
prefs = ZLBVirtualServerPref.objects.filter(zlb=zlb,vs_name=virtual_server.name)
rules = ZLBVirtualServerRule.objects.filter(virtualserver=virtual_server)
protections = ZLBVirtualServerProtection.objects.filter(virtualserver=virtual_server)
for p in protections:
p.protection.allowed_addresses = _parse_addr(p.protection.allowed_addresses)
p.protection.banned_addresses = _parse_addr(p.protection.banned_addresses)
return render_to_response(
'zlb/virtual_server.html',
{'zlb': zlb,
'virtual_server': virtual_server,
'prefs': prefs,
'rules': rules,
'protections': protections,
'form': form,},
context_instance = RequestContext(request)
)
@never_cache
@anonymous_csrf
def virtual_server_name(request, zlb_id, vs_name):
virtual_server_o = ZLBVirtualServer.objects.get(zlb_id=zlb_id, name=vs_name)
return virtual_server(request, zlb_id, virtual_server_o.id)
@anonymous_csrf
def virtual_server_favorite(request, zlb_id, vs_id):
vs = ZLBVirtualServer.objects.get(id=vs_id)
pref = ZLBVirtualServerPref.objects.filter(zlb_id=zlb_id,vs_name=vs.name)
if pref.count() == 0:
p = ZLBVirtualServerPref(
zlb_id=zlb_id,
vs_name=vs.name,
favorite=True,
)
p.save()
else:
pref = pref[0]
pref.favorite = True
pref.save()
return HttpResponseRedirect('/zlb/%s/virtual_server/%s' % (zlb_id, vs_id))
@anonymous_csrf
def virtual_server_unfavorite(request, zlb_id, vs_id):
vs = ZLBVirtualServer.objects.get(id=vs_id)
pref = ZLBVirtualServerPref.objects.get(zlb_id=zlb_id,vs_name=vs.name)
pref.favorite = False
pref.save()
return HttpResponseRedirect('/zlb/%s/virtual_server/%s' % (zlb_id, vs_id))
@anonymous_csrf
def virtual_server_unconfirm(request, zlb_id, vs_id):
vs = ZLBVirtualServer.objects.get(id=vs_id)
pref = ZLBVirtualServerPref.objects.get(zlb_id=zlb_id,vs_name=vs.name)
pref.confirm = ''
pref.save()
return HttpResponseRedirect('/zlb/%s/virtual_server/%s' % (zlb_id, vs_id))
|
mozilla/BanHammer
|
BanHammer/blacklist/views/zlb.py
|
Python
|
bsd-3-clause
| 9,627 | 0.006232 |
#!/usr/bin/env python
# Copyright 2012 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
#
# Performs per host Linux Bridge configuration for Neutron.
# Based on the structure of the OpenVSwitch agent in the
# Neutron OpenVSwitch Plugin.
import sys
import netaddr
from neutron_lib import constants
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import service
from oslo_utils import excutils
from six import moves
from neutron._i18n import _LE, _LI, _LW
from neutron.agent.linux import bridge_lib
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.common import config as common_config
from neutron.common import exceptions
from neutron.common import profiler as setup_profiler
from neutron.common import topics
from neutron.common import utils as n_utils
from neutron.plugins.common import constants as p_const
from neutron.plugins.common import utils as p_utils
from neutron.plugins.ml2.drivers.agent import _agent_manager_base as amb
from neutron.plugins.ml2.drivers.agent import _common_agent as ca
from neutron.plugins.ml2.drivers.agent import config as cagt_config # noqa
from neutron.plugins.ml2.drivers.l2pop.rpc_manager \
import l2population_rpc as l2pop_rpc
from neutron.plugins.ml2.drivers.linuxbridge.agent import arp_protect
from neutron.plugins.ml2.drivers.linuxbridge.agent.common import config # noqa
from neutron.plugins.ml2.drivers.linuxbridge.agent.common \
import constants as lconst
from neutron.plugins.ml2.drivers.linuxbridge.agent.common \
import utils as lb_utils
LOG = logging.getLogger(__name__)
LB_AGENT_BINARY = 'neutron-linuxbridge-agent'
BRIDGE_NAME_PREFIX = "brq"
MAX_VLAN_POSTFIX_LEN = 5
VXLAN_INTERFACE_PREFIX = "vxlan-"
class LinuxBridgeManager(amb.CommonAgentManagerBase):
def __init__(self, bridge_mappings, interface_mappings):
super(LinuxBridgeManager, self).__init__()
self.bridge_mappings = bridge_mappings
self.interface_mappings = interface_mappings
self.validate_interface_mappings()
self.validate_bridge_mappings()
self.ip = ip_lib.IPWrapper()
# VXLAN related parameters:
self.local_ip = cfg.CONF.VXLAN.local_ip
self.vxlan_mode = lconst.VXLAN_NONE
if cfg.CONF.VXLAN.enable_vxlan:
device = self.get_local_ip_device()
self.validate_vxlan_group_with_local_ip()
self.local_int = device.name
self.check_vxlan_support()
def validate_interface_mappings(self):
for physnet, interface in self.interface_mappings.items():
if not ip_lib.device_exists(interface):
LOG.error(_LE("Interface %(intf)s for physical network %(net)s"
" does not exist. Agent terminated!"),
{'intf': interface, 'net': physnet})
sys.exit(1)
def validate_bridge_mappings(self):
for physnet, bridge in self.bridge_mappings.items():
if not ip_lib.device_exists(bridge):
LOG.error(_LE("Bridge %(brq)s for physical network %(net)s"
" does not exist. Agent terminated!"),
{'brq': bridge, 'net': physnet})
sys.exit(1)
def validate_vxlan_group_with_local_ip(self):
if not cfg.CONF.VXLAN.vxlan_group:
return
try:
ip_addr = netaddr.IPAddress(self.local_ip)
# Ensure the configured group address/range is valid and multicast
group_net = netaddr.IPNetwork(cfg.CONF.VXLAN.vxlan_group)
if not group_net.is_multicast():
raise ValueError()
if not ip_addr.version == group_net.version:
raise ValueError()
except (netaddr.core.AddrFormatError, ValueError):
LOG.error(_LE("Invalid VXLAN Group: %(group)s, must be an address "
"or network (in CIDR notation) in a multicast "
"range of the same address family as local_ip: "
"%(ip)s"),
{'group': cfg.CONF.VXLAN.vxlan_group,
'ip': self.local_ip})
sys.exit(1)
def get_local_ip_device(self):
"""Return the device with local_ip on the host."""
device = self.ip.get_device_by_ip(self.local_ip)
if not device:
LOG.error(_LE("Tunneling cannot be enabled without the local_ip "
"bound to an interface on the host. Please "
"configure local_ip %s on the host interface to "
"be used for tunneling and restart the agent."),
self.local_ip)
sys.exit(1)
return device
def get_existing_bridge_name(self, physical_network):
if not physical_network:
return None
return self.bridge_mappings.get(physical_network)
@staticmethod
def get_bridge_name(network_id):
if not network_id:
LOG.warning(_LW("Invalid Network ID, will lead to incorrect "
"bridge name"))
bridge_name = BRIDGE_NAME_PREFIX + \
network_id[:lconst.RESOURCE_ID_LENGTH]
return bridge_name
def get_subinterface_name(self, physical_interface, vlan_id):
if not vlan_id:
LOG.warning(_LW("Invalid VLAN ID, will lead to incorrect "
"subinterface name"))
vlan_postfix = '.%s' % vlan_id
# For the vlan subinterface name prefix we use:
# * the physical_interface, if len(physical_interface) +
# len(vlan_postifx) <= 15 for backward compatibility reasons
# Example: physical_interface = eth0
# prefix = eth0.1
# prefix = eth0.1111
#
# * otherwise a unique hash per physical_interface to help debugging
# Example: physical_interface = long_interface
# prefix = longHASHED.1
# prefix = longHASHED.1111
#
# Remark: For some physical_interface values, the used prefix can be
# both, the physical_interface itself or a hash, depending
# on the vlan_postfix length.
# Example: physical_interface = mix_interface
# prefix = mix_interface.1 (backward compatible)
# prefix = mix_iHASHED.1111
if (len(physical_interface) + len(vlan_postfix) >
constants.DEVICE_NAME_MAX_LEN):
physical_interface = p_utils.get_interface_name(
physical_interface, max_len=(constants.DEVICE_NAME_MAX_LEN -
MAX_VLAN_POSTFIX_LEN))
return "%s%s" % (physical_interface, vlan_postfix)
@staticmethod
def get_tap_device_name(interface_id):
return lb_utils.get_tap_device_name(interface_id)
def get_vxlan_device_name(self, segmentation_id):
if 0 <= int(segmentation_id) <= p_const.MAX_VXLAN_VNI:
return VXLAN_INTERFACE_PREFIX + str(segmentation_id)
else:
LOG.warning(_LW("Invalid Segmentation ID: %s, will lead to "
"incorrect vxlan device name"), segmentation_id)
def get_vxlan_group(self, segmentation_id):
net = netaddr.IPNetwork(cfg.CONF.VXLAN.vxlan_group)
# Map the segmentation ID to (one of) the group address(es)
return str(net.network +
(int(segmentation_id) & int(net.hostmask)))
def get_deletable_bridges(self):
bridge_list = bridge_lib.get_bridge_names()
bridges = {b for b in bridge_list if b.startswith(BRIDGE_NAME_PREFIX)}
bridges.difference_update(self.bridge_mappings.values())
return bridges
def get_tap_devices_count(self, bridge_name):
if_list = bridge_lib.BridgeDevice(bridge_name).get_interfaces()
return len([interface for interface in if_list if
interface.startswith(constants.TAP_DEVICE_PREFIX)])
def ensure_vlan_bridge(self, network_id, phy_bridge_name,
physical_interface, vlan_id):
"""Create a vlan and bridge unless they already exist."""
interface = self.ensure_vlan(physical_interface, vlan_id)
if phy_bridge_name:
return self.ensure_bridge(phy_bridge_name)
else:
bridge_name = self.get_bridge_name(network_id)
ips, gateway = self.get_interface_details(interface)
if self.ensure_bridge(bridge_name, interface, ips, gateway):
return interface
def ensure_vxlan_bridge(self, network_id, segmentation_id):
"""Create a vxlan and bridge unless they already exist."""
interface = self.ensure_vxlan(segmentation_id)
if not interface:
LOG.error(_LE("Failed creating vxlan interface for "
"%(segmentation_id)s"),
{segmentation_id: segmentation_id})
return
bridge_name = self.get_bridge_name(network_id)
self.ensure_bridge(bridge_name, interface)
return interface
def get_interface_details(self, interface):
device = self.ip.device(interface)
ips = device.addr.list(scope='global')
# Update default gateway if necessary
gateway = device.route.get_gateway(scope='global')
return ips, gateway
def ensure_flat_bridge(self, network_id, phy_bridge_name,
physical_interface):
"""Create a non-vlan bridge unless it already exists."""
if phy_bridge_name:
return self.ensure_bridge(phy_bridge_name)
else:
bridge_name = self.get_bridge_name(network_id)
ips, gateway = self.get_interface_details(physical_interface)
if self.ensure_bridge(bridge_name, physical_interface, ips,
gateway):
return physical_interface
def ensure_local_bridge(self, network_id, phy_bridge_name):
"""Create a local bridge unless it already exists."""
if phy_bridge_name:
bridge_name = phy_bridge_name
else:
bridge_name = self.get_bridge_name(network_id)
return self.ensure_bridge(bridge_name)
def ensure_vlan(self, physical_interface, vlan_id):
"""Create a vlan unless it already exists."""
interface = self.get_subinterface_name(physical_interface, vlan_id)
if not ip_lib.device_exists(interface):
LOG.debug("Creating subinterface %(interface)s for "
"VLAN %(vlan_id)s on interface "
"%(physical_interface)s",
{'interface': interface, 'vlan_id': vlan_id,
'physical_interface': physical_interface})
try:
int_vlan = self.ip.add_vlan(interface, physical_interface,
vlan_id)
except RuntimeError:
with excutils.save_and_reraise_exception() as ctxt:
if ip_lib.vlan_in_use(vlan_id):
ctxt.reraise = False
LOG.error(_LE("Unable to create VLAN interface for "
"VLAN ID %s because it is in use by "
"another interface."), vlan_id)
return
int_vlan.disable_ipv6()
int_vlan.link.set_up()
LOG.debug("Done creating subinterface %s", interface)
return interface
def ensure_vxlan(self, segmentation_id):
"""Create a vxlan unless it already exists."""
interface = self.get_vxlan_device_name(segmentation_id)
if not ip_lib.device_exists(interface):
LOG.debug("Creating vxlan interface %(interface)s for "
"VNI %(segmentation_id)s",
{'interface': interface,
'segmentation_id': segmentation_id})
args = {'dev': self.local_int}
if self.vxlan_mode == lconst.VXLAN_MCAST:
args['group'] = self.get_vxlan_group(segmentation_id)
if cfg.CONF.VXLAN.ttl:
args['ttl'] = cfg.CONF.VXLAN.ttl
if cfg.CONF.VXLAN.tos:
args['tos'] = cfg.CONF.VXLAN.tos
if cfg.CONF.VXLAN.l2_population:
args['proxy'] = cfg.CONF.VXLAN.arp_responder
try:
int_vxlan = self.ip.add_vxlan(interface, segmentation_id,
**args)
except RuntimeError:
with excutils.save_and_reraise_exception() as ctxt:
# perform this check after an attempt rather than before
# to avoid excessive lookups and a possible race condition.
if ip_lib.vxlan_in_use(segmentation_id):
ctxt.reraise = False
LOG.error(_LE("Unable to create VXLAN interface for "
"VNI %s because it is in use by another "
"interface."), segmentation_id)
return None
int_vxlan.disable_ipv6()
int_vxlan.link.set_up()
LOG.debug("Done creating vxlan interface %s", interface)
return interface
def update_interface_ip_details(self, destination, source, ips,
gateway):
if ips or gateway:
dst_device = self.ip.device(destination)
src_device = self.ip.device(source)
# Append IP's to bridge if necessary
if ips:
for ip in ips:
dst_device.addr.add(cidr=ip['cidr'])
if gateway:
# Ensure that the gateway can be updated by changing the metric
metric = 100
if 'metric' in gateway:
metric = gateway['metric'] - 1
dst_device.route.add_gateway(gateway=gateway['gateway'],
metric=metric)
src_device.route.delete_gateway(gateway=gateway['gateway'])
# Remove IP's from interface
if ips:
for ip in ips:
src_device.addr.delete(cidr=ip['cidr'])
def _bridge_exists_and_ensure_up(self, bridge_name):
"""Check if the bridge exists and make sure it is up."""
br = ip_lib.IPDevice(bridge_name)
br.set_log_fail_as_error(False)
try:
# If the device doesn't exist this will throw a RuntimeError
br.link.set_up()
except RuntimeError:
return False
return True
def ensure_bridge(self, bridge_name, interface=None, ips=None,
gateway=None):
"""Create a bridge unless it already exists."""
# _bridge_exists_and_ensure_up instead of device_exists is used here
# because there are cases where the bridge exists but it's not UP,
# for example:
# 1) A greenthread was executing this function and had not yet executed
# "ip link set bridge_name up" before eventlet switched to this
# thread running the same function
# 2) The Nova VIF driver was running concurrently and had just created
# the bridge, but had not yet put it UP
if not self._bridge_exists_and_ensure_up(bridge_name):
LOG.debug("Starting bridge %(bridge_name)s for subinterface "
"%(interface)s",
{'bridge_name': bridge_name, 'interface': interface})
bridge_device = bridge_lib.BridgeDevice.addbr(bridge_name)
if bridge_device.setfd(0):
return
if bridge_device.disable_stp():
return
if bridge_device.disable_ipv6():
return
if bridge_device.link.set_up():
return
LOG.debug("Done starting bridge %(bridge_name)s for "
"subinterface %(interface)s",
{'bridge_name': bridge_name, 'interface': interface})
else:
bridge_device = bridge_lib.BridgeDevice(bridge_name)
if not interface:
return bridge_name
# Update IP info if necessary
self.update_interface_ip_details(bridge_name, interface, ips, gateway)
# Check if the interface is part of the bridge
if not bridge_device.owns_interface(interface):
try:
# Check if the interface is not enslaved in another bridge
bridge = bridge_lib.BridgeDevice.get_interface_bridge(
interface)
if bridge:
bridge.delif(interface)
bridge_device.addif(interface)
except Exception as e:
LOG.error(_LE("Unable to add %(interface)s to %(bridge_name)s"
"! Exception: %(e)s"),
{'interface': interface, 'bridge_name': bridge_name,
'e': e})
return
return bridge_name
def ensure_physical_in_bridge(self, network_id,
network_type,
physical_network,
segmentation_id):
if network_type == p_const.TYPE_VXLAN:
if self.vxlan_mode == lconst.VXLAN_NONE:
LOG.error(_LE("Unable to add vxlan interface for network %s"),
network_id)
return
return self.ensure_vxlan_bridge(network_id, segmentation_id)
# NOTE(nick-ma-z): Obtain mappings of physical bridge and interfaces
physical_bridge = self.get_existing_bridge_name(physical_network)
physical_interface = self.interface_mappings.get(physical_network)
if not physical_bridge and not physical_interface:
LOG.error(_LE("No bridge or interface mappings"
" for physical network %s"),
physical_network)
return
if network_type == p_const.TYPE_FLAT:
return self.ensure_flat_bridge(network_id, physical_bridge,
physical_interface)
elif network_type == p_const.TYPE_VLAN:
return self.ensure_vlan_bridge(network_id, physical_bridge,
physical_interface,
segmentation_id)
else:
LOG.error(_LE("Unknown network_type %(network_type)s for network "
"%(network_id)s."), {network_type: network_type,
network_id: network_id})
def add_tap_interface(self, network_id, network_type, physical_network,
segmentation_id, tap_device_name, device_owner):
"""Add tap interface and handle interface missing exceptions."""
try:
return self._add_tap_interface(network_id, network_type,
physical_network, segmentation_id,
tap_device_name, device_owner)
except Exception:
with excutils.save_and_reraise_exception() as ctx:
if not ip_lib.device_exists(tap_device_name):
# the exception was likely a side effect of the tap device
# being removed during handling so we just return false
# like we would if it didn't exist to begin with.
ctx.reraise = False
return False
def _add_tap_interface(self, network_id, network_type, physical_network,
segmentation_id, tap_device_name, device_owner):
"""Add tap interface.
If a VIF has been plugged into a network, this function will
add the corresponding tap device to the relevant bridge.
"""
if not ip_lib.device_exists(tap_device_name):
LOG.debug("Tap device: %s does not exist on "
"this host, skipped", tap_device_name)
return False
bridge_name = self.get_existing_bridge_name(physical_network)
if not bridge_name:
bridge_name = self.get_bridge_name(network_id)
if network_type == p_const.TYPE_LOCAL:
self.ensure_local_bridge(network_id, bridge_name)
else:
phy_dev_name = self.ensure_physical_in_bridge(network_id,
network_type,
physical_network,
segmentation_id)
if not phy_dev_name:
return False
self.ensure_tap_mtu(tap_device_name, phy_dev_name)
# Avoid messing with plugging devices into a bridge that the agent
# does not own
if device_owner.startswith(constants.DEVICE_OWNER_PREFIXES):
# Check if device needs to be added to bridge
if not bridge_lib.BridgeDevice.get_interface_bridge(
tap_device_name):
data = {'tap_device_name': tap_device_name,
'bridge_name': bridge_name}
LOG.debug("Adding device %(tap_device_name)s to bridge "
"%(bridge_name)s", data)
if bridge_lib.BridgeDevice(bridge_name).addif(tap_device_name):
return False
else:
data = {'tap_device_name': tap_device_name,
'device_owner': device_owner,
'bridge_name': bridge_name}
LOG.debug("Skip adding device %(tap_device_name)s to "
"%(bridge_name)s. It is owned by %(device_owner)s and "
"thus added elsewhere.", data)
return True
def ensure_tap_mtu(self, tap_dev_name, phy_dev_name):
"""Ensure the MTU on the tap is the same as the physical device."""
phy_dev_mtu = ip_lib.IPDevice(phy_dev_name).link.mtu
ip_lib.IPDevice(tap_dev_name).link.set_mtu(phy_dev_mtu)
def plug_interface(self, network_id, network_segment, tap_name,
device_owner):
return self.add_tap_interface(network_id, network_segment.network_type,
network_segment.physical_network,
network_segment.segmentation_id,
tap_name, device_owner)
def delete_bridge(self, bridge_name):
bridge_device = bridge_lib.BridgeDevice(bridge_name)
if bridge_device.exists():
physical_interfaces = set(self.interface_mappings.values())
interfaces_on_bridge = bridge_device.get_interfaces()
for interface in interfaces_on_bridge:
self.remove_interface(bridge_name, interface)
if interface.startswith(VXLAN_INTERFACE_PREFIX):
self.delete_interface(interface)
else:
# Match the vlan/flat interface in the bridge.
# If the bridge has an IP, it mean that this IP was moved
# from the current interface, which also mean that this
# interface was not created by the agent.
ips, gateway = self.get_interface_details(bridge_name)
if ips:
self.update_interface_ip_details(interface,
bridge_name,
ips, gateway)
elif interface not in physical_interfaces:
self.delete_interface(interface)
try:
LOG.debug("Deleting bridge %s", bridge_name)
if bridge_device.link.set_down():
return
if bridge_device.delbr():
return
LOG.debug("Done deleting bridge %s", bridge_name)
except RuntimeError:
with excutils.save_and_reraise_exception() as ctxt:
if not bridge_device.exists():
# the exception was likely a side effect of the bridge
# being removed by nova during handling,
# so we just return
ctxt.reraise = False
LOG.debug("Cannot delete bridge %s; it does not exist",
bridge_name)
return
else:
LOG.debug("Cannot delete bridge %s; it does not exist",
bridge_name)
def remove_interface(self, bridge_name, interface_name):
bridge_device = bridge_lib.BridgeDevice(bridge_name)
if bridge_device.exists():
if not bridge_device.owns_interface(interface_name):
return True
LOG.debug("Removing device %(interface_name)s from bridge "
"%(bridge_name)s",
{'interface_name': interface_name,
'bridge_name': bridge_name})
try:
bridge_device.delif(interface_name)
LOG.debug("Done removing device %(interface_name)s from "
"bridge %(bridge_name)s",
{'interface_name': interface_name,
'bridge_name': bridge_name})
return True
except RuntimeError:
with excutils.save_and_reraise_exception() as ctxt:
if not bridge_device.owns_interface(interface_name):
# the exception was likely a side effect of the tap
# being deleted by some other agent during handling
ctxt.reraise = False
LOG.debug("Cannot remove %(interface_name)s from "
"%(bridge_name)s. It is not on the bridge.",
{'interface_name': interface_name,
'bridge_name': bridge_name})
return False
else:
LOG.debug("Cannot remove device %(interface_name)s bridge "
"%(bridge_name)s does not exist",
{'interface_name': interface_name,
'bridge_name': bridge_name})
return False
def delete_interface(self, interface):
device = self.ip.device(interface)
if device.exists():
LOG.debug("Deleting interface %s",
interface)
device.link.set_down()
device.link.delete()
LOG.debug("Done deleting interface %s", interface)
def get_devices_modified_timestamps(self, devices):
return {d: bridge_lib.get_interface_bridged_time(d) for d in devices}
def get_all_devices(self):
devices = set()
for device in bridge_lib.get_bridge_names():
if device.startswith(constants.TAP_DEVICE_PREFIX):
devices.add(device)
return devices
def vxlan_ucast_supported(self):
if not cfg.CONF.VXLAN.l2_population:
return False
if not ip_lib.iproute_arg_supported(
['bridge', 'fdb'], 'append'):
LOG.warning(_LW('Option "%(option)s" must be supported by command '
'"%(command)s" to enable %(mode)s mode'),
{'option': 'append',
'command': 'bridge fdb',
'mode': 'VXLAN UCAST'})
return False
test_iface = None
for seg_id in moves.range(1, p_const.MAX_VXLAN_VNI + 1):
if (ip_lib.device_exists(self.get_vxlan_device_name(seg_id))
or ip_lib.vxlan_in_use(seg_id)):
continue
test_iface = self.ensure_vxlan(seg_id)
break
else:
LOG.error(_LE('No valid Segmentation ID to perform UCAST test.'))
return False
try:
bridge_lib.FdbInterface.append(constants.FLOODING_ENTRY[0],
test_iface, '1.1.1.1',
log_fail_as_error=False)
return True
except RuntimeError:
return False
finally:
self.delete_interface(test_iface)
def vxlan_mcast_supported(self):
if not cfg.CONF.VXLAN.vxlan_group:
LOG.warning(_LW('VXLAN muticast group(s) must be provided in '
'vxlan_group option to enable VXLAN MCAST mode'))
return False
if not ip_lib.iproute_arg_supported(
['ip', 'link', 'add', 'type', 'vxlan'],
'proxy'):
LOG.warning(_LW('Option "%(option)s" must be supported by command '
'"%(command)s" to enable %(mode)s mode'),
{'option': 'proxy',
'command': 'ip link add type vxlan',
'mode': 'VXLAN MCAST'})
return False
return True
def check_vxlan_support(self):
self.vxlan_mode = lconst.VXLAN_NONE
if self.vxlan_ucast_supported():
self.vxlan_mode = lconst.VXLAN_UCAST
elif self.vxlan_mcast_supported():
self.vxlan_mode = lconst.VXLAN_MCAST
else:
raise exceptions.VxlanNetworkUnsupported()
LOG.debug('Using %s VXLAN mode', self.vxlan_mode)
def fdb_ip_entry_exists(self, mac, ip, interface):
entries = utils.execute(['ip', 'neigh', 'show', 'to', ip,
'dev', interface],
run_as_root=True)
return mac in entries
def fdb_bridge_entry_exists(self, mac, interface, agent_ip=None):
entries = bridge_lib.FdbInterface.show(interface)
if not agent_ip:
return mac in entries
return (agent_ip in entries and mac in entries)
def add_fdb_ip_entry(self, mac, ip, interface):
if cfg.CONF.VXLAN.arp_responder:
ip_lib.IPDevice(interface).neigh.add(ip, mac)
def remove_fdb_ip_entry(self, mac, ip, interface):
if cfg.CONF.VXLAN.arp_responder:
ip_lib.IPDevice(interface).neigh.delete(ip, mac)
def add_fdb_entries(self, agent_ip, ports, interface):
for mac, ip in ports:
if mac != constants.FLOODING_ENTRY[0]:
self.add_fdb_ip_entry(mac, ip, interface)
bridge_lib.FdbInterface.replace(mac, interface, agent_ip,
check_exit_code=False)
elif self.vxlan_mode == lconst.VXLAN_UCAST:
if self.fdb_bridge_entry_exists(mac, interface):
bridge_lib.FdbInterface.append(mac, interface, agent_ip,
check_exit_code=False)
else:
bridge_lib.FdbInterface.add(mac, interface, agent_ip,
check_exit_code=False)
def remove_fdb_entries(self, agent_ip, ports, interface):
for mac, ip in ports:
if mac != constants.FLOODING_ENTRY[0]:
self.remove_fdb_ip_entry(mac, ip, interface)
bridge_lib.FdbInterface.delete(mac, interface, agent_ip,
check_exit_code=False)
elif self.vxlan_mode == lconst.VXLAN_UCAST:
bridge_lib.FdbInterface.delete(mac, interface, agent_ip,
check_exit_code=False)
def get_agent_id(self):
if self.bridge_mappings:
mac = utils.get_interface_mac(
list(self.bridge_mappings.values())[0])
else:
devices = ip_lib.IPWrapper().get_devices(True)
if devices:
mac = utils.get_interface_mac(devices[0].name)
else:
LOG.error(_LE("Unable to obtain MAC address for unique ID. "
"Agent terminated!"))
sys.exit(1)
return 'lb%s' % mac.replace(":", "")
def get_agent_configurations(self):
configurations = {'bridge_mappings': self.bridge_mappings,
'interface_mappings': self.interface_mappings
}
if self.vxlan_mode != lconst.VXLAN_NONE:
configurations['tunneling_ip'] = self.local_ip
configurations['tunnel_types'] = [p_const.TYPE_VXLAN]
configurations['l2_population'] = cfg.CONF.VXLAN.l2_population
return configurations
def get_rpc_callbacks(self, context, agent, sg_agent):
return LinuxBridgeRpcCallbacks(context, agent, sg_agent)
def get_rpc_consumers(self):
consumers = [[topics.PORT, topics.UPDATE],
[topics.NETWORK, topics.DELETE],
[topics.NETWORK, topics.UPDATE],
[topics.SECURITY_GROUP, topics.UPDATE]]
if cfg.CONF.VXLAN.l2_population:
consumers.append([topics.L2POPULATION, topics.UPDATE])
return consumers
def ensure_port_admin_state(self, tap_name, admin_state_up):
LOG.debug("Setting admin_state_up to %s for device %s",
admin_state_up, tap_name)
if admin_state_up:
ip_lib.IPDevice(tap_name).link.set_up()
else:
ip_lib.IPDevice(tap_name).link.set_down()
def setup_arp_spoofing_protection(self, device, device_details):
arp_protect.setup_arp_spoofing_protection(device, device_details)
def delete_arp_spoofing_protection(self, devices):
arp_protect.delete_arp_spoofing_protection(devices)
def delete_unreferenced_arp_protection(self, current_devices):
arp_protect.delete_unreferenced_arp_protection(current_devices)
def get_extension_driver_type(self):
return lconst.EXTENSION_DRIVER_TYPE
class LinuxBridgeRpcCallbacks(
sg_rpc.SecurityGroupAgentRpcCallbackMixin,
l2pop_rpc.L2populationRpcCallBackMixin,
amb.CommonAgentManagerRpcCallBackBase):
# Set RPC API version to 1.0 by default.
# history
# 1.1 Support Security Group RPC
# 1.3 Added param devices_to_update to security_groups_provider_updated
# 1.4 Added support for network_update
target = oslo_messaging.Target(version='1.4')
def network_delete(self, context, **kwargs):
LOG.debug("network_delete received")
network_id = kwargs.get('network_id')
# NOTE(nick-ma-z): Don't remove pre-existing user-defined bridges
if network_id in self.network_map:
phynet = self.network_map[network_id].physical_network
if phynet and phynet in self.agent.mgr.bridge_mappings:
LOG.info(_LI("Physical network %s is defined in "
"bridge_mappings and cannot be deleted."),
network_id)
return
else:
LOG.debug("Network %s is not on this agent.", network_id)
return
bridge_name = self.agent.mgr.get_bridge_name(network_id)
LOG.debug("Delete %s", bridge_name)
self.agent.mgr.delete_bridge(bridge_name)
def port_update(self, context, **kwargs):
port_id = kwargs['port']['id']
device_name = self.agent.mgr.get_tap_device_name(port_id)
# Put the device name in the updated_devices set.
# Do not store port details, as if they're used for processing
# notifications there is no guarantee the notifications are
# processed in the same order as the relevant API requests.
self.updated_devices.add(device_name)
LOG.debug("port_update RPC received for port: %s", port_id)
def network_update(self, context, **kwargs):
network_id = kwargs['network']['id']
LOG.debug("network_update message processed for network "
"%(network_id)s, with ports: %(ports)s",
{'network_id': network_id,
'ports': self.agent.network_ports[network_id]})
for port_data in self.agent.network_ports[network_id]:
self.updated_devices.add(port_data['device'])
def fdb_add(self, context, fdb_entries):
LOG.debug("fdb_add received")
for network_id, values in fdb_entries.items():
segment = self.network_map.get(network_id)
if not segment:
return
if segment.network_type != p_const.TYPE_VXLAN:
return
interface = self.agent.mgr.get_vxlan_device_name(
segment.segmentation_id)
agent_ports = values.get('ports')
for agent_ip, ports in agent_ports.items():
if agent_ip == self.agent.mgr.local_ip:
continue
self.agent.mgr.add_fdb_entries(agent_ip,
ports,
interface)
def fdb_remove(self, context, fdb_entries):
LOG.debug("fdb_remove received")
for network_id, values in fdb_entries.items():
segment = self.network_map.get(network_id)
if not segment:
return
if segment.network_type != p_const.TYPE_VXLAN:
return
interface = self.agent.mgr.get_vxlan_device_name(
segment.segmentation_id)
agent_ports = values.get('ports')
for agent_ip, ports in agent_ports.items():
if agent_ip == self.agent.mgr.local_ip:
continue
self.agent.mgr.remove_fdb_entries(agent_ip,
ports,
interface)
def _fdb_chg_ip(self, context, fdb_entries):
LOG.debug("update chg_ip received")
for network_id, agent_ports in fdb_entries.items():
segment = self.network_map.get(network_id)
if not segment:
return
if segment.network_type != p_const.TYPE_VXLAN:
return
interface = self.agent.mgr.get_vxlan_device_name(
segment.segmentation_id)
for agent_ip, state in agent_ports.items():
if agent_ip == self.agent.mgr.local_ip:
continue
after = state.get('after', [])
for mac, ip in after:
self.agent.mgr.add_fdb_ip_entry(mac, ip, interface)
before = state.get('before', [])
for mac, ip in before:
self.agent.mgr.remove_fdb_ip_entry(mac, ip, interface)
def fdb_update(self, context, fdb_entries):
LOG.debug("fdb_update received")
for action, values in fdb_entries.items():
method = '_fdb_' + action
if not hasattr(self, method):
raise NotImplementedError()
getattr(self, method)(context, values)
def main():
common_config.init(sys.argv[1:])
common_config.setup_logging()
try:
interface_mappings = n_utils.parse_mappings(
cfg.CONF.LINUX_BRIDGE.physical_interface_mappings)
except ValueError as e:
LOG.error(_LE("Parsing physical_interface_mappings failed: %s. "
"Agent terminated!"), e)
sys.exit(1)
LOG.info(_LI("Interface mappings: %s"), interface_mappings)
try:
bridge_mappings = n_utils.parse_mappings(
cfg.CONF.LINUX_BRIDGE.bridge_mappings)
except ValueError as e:
LOG.error(_LE("Parsing bridge_mappings failed: %s. "
"Agent terminated!"), e)
sys.exit(1)
LOG.info(_LI("Bridge mappings: %s"), bridge_mappings)
manager = LinuxBridgeManager(bridge_mappings, interface_mappings)
polling_interval = cfg.CONF.AGENT.polling_interval
quitting_rpc_timeout = cfg.CONF.AGENT.quitting_rpc_timeout
agent = ca.CommonAgentLoop(manager, polling_interval, quitting_rpc_timeout,
constants.AGENT_TYPE_LINUXBRIDGE,
LB_AGENT_BINARY)
setup_profiler.setup("neutron-linuxbridge-agent", cfg.CONF.host)
LOG.info(_LI("Agent initialized successfully, now running... "))
launcher = service.launch(cfg.CONF, agent)
launcher.wait()
|
igor-toga/local-snat
|
neutron/plugins/ml2/drivers/linuxbridge/agent/linuxbridge_neutron_agent.py
|
Python
|
apache-2.0
| 41,399 | 0.000121 |
from unittest import TestCase
from plivo import plivoxml
from tests import PlivoXmlTestCase
class RecordElementTest(TestCase, PlivoXmlTestCase):
def test_set_methods(self):
expected_response = '<Response><Record action="https://foo.example.com" callbackMethod="GET" ' \
'callbackUrl="https://foo.example.com" fileFormat="wav" finishOnKey="#" ' \
'maxLength="10" method="GET" playBeep="false" recordSession="false" ' \
'redirect="false" startOnDialAnswer="false" timeout="100" transcriptionMethod="GET" ' \
'transcriptionType="hybrid" transcriptionUrl="https://foo.example.com"/>' \
'</Response>'
action = 'https://foo.example.com'
method = 'GET'
fileFormat = 'wav'
redirect = False
timeout = 100
maxLength = 10
recordSession = False
startOnDialAnswer = False
playBeep = False
finishOnKey = '#'
transcriptionType = 'hybrid'
transcriptionUrl = 'https://foo.example.com'
transcriptionMethod = 'GET'
callbackUrl = 'https://foo.example.com'
callbackMethod = 'GET'
element = plivoxml.ResponseElement()
response = element.add(
plivoxml.RecordElement().set_action(action).set_method(method)
.set_file_format(fileFormat).set_redirect(redirect).set_timeout(
timeout).set_max_length(maxLength).set_play_beep(playBeep)
.set_finish_on_key(finishOnKey).set_record_session(recordSession).
set_start_on_dial_answer(startOnDialAnswer).set_transcription_type(
transcriptionType).set_transcription_url(transcriptionUrl)
.set_transcription_method(transcriptionMethod).set_callback_url(
callbackUrl).set_callback_method(callbackMethod)).to_string(False)
self.assertXmlEqual(response, expected_response)
|
plivo/plivo-python
|
tests/xml/test_recordElement.py
|
Python
|
mit
| 1,989 | 0.003017 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from migrate import ForeignKeyConstraint
from oslo_log import log as logging
from sqlalchemy import Integer, MetaData, String, Table
from cinder.i18n import _LI
LOG = logging.getLogger(__name__)
def upgrade(migrate_engine):
"""Convert volume_type_id to UUID."""
meta = MetaData()
meta.bind = migrate_engine
volumes = Table('volumes', meta, autoload=True)
volume_types = Table('volume_types', meta, autoload=True)
extra_specs = Table('volume_type_extra_specs', meta, autoload=True)
fkey_remove_list = [volumes.c.volume_type_id,
volume_types.c.id,
extra_specs.c.volume_type_id]
for column in fkey_remove_list:
fkeys = list(column.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
fkey = ForeignKeyConstraint(columns=[column],
refcolumns=[volume_types.c.id],
name=fkey_name)
try:
fkey.drop()
except Exception:
if migrate_engine.url.get_dialect().name.startswith('sqlite'):
pass
else:
raise
volumes.c.volume_type_id.alter(String(36))
volume_types.c.id.alter(String(36))
extra_specs.c.volume_type_id.alter(String(36))
vtype_list = list(volume_types.select().execute())
for t in vtype_list:
new_id = str(uuid.uuid4())
volumes.update().\
where(volumes.c.volume_type_id == t['id']).\
values(volume_type_id=new_id).execute()
extra_specs.update().\
where(extra_specs.c.volume_type_id == t['id']).\
values(volume_type_id=new_id).execute()
volume_types.update().\
where(volume_types.c.id == t['id']).\
values(id=new_id).execute()
for column in fkey_remove_list:
fkeys = list(column.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
fkey = ForeignKeyConstraint(columns=[column],
refcolumns=[volume_types.c.id],
name=fkey_name)
try:
fkey.create()
LOG.info(_LI('Created foreign key %s'), fkey_name)
except Exception:
if migrate_engine.url.get_dialect().name.startswith('sqlite'):
pass
else:
raise
def downgrade(migrate_engine):
"""Convert volume_type from UUID back to int."""
meta = MetaData()
meta.bind = migrate_engine
volumes = Table('volumes', meta, autoload=True)
volume_types = Table('volume_types', meta, autoload=True)
extra_specs = Table('volume_type_extra_specs', meta, autoload=True)
fkey_remove_list = [volumes.c.volume_type_id,
volume_types.c.id,
extra_specs.c.volume_type_id]
for column in fkey_remove_list:
fkeys = list(column.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
fkey = ForeignKeyConstraint(columns=[column],
refcolumns=[volume_types.c.id],
name=fkey_name)
try:
fkey.drop()
except Exception:
if migrate_engine.url.get_dialect().name.startswith('sqlite'):
pass
else:
raise
vtype_list = list(volume_types.select().execute())
new_id = 1
for t in vtype_list:
volumes.update().\
where(volumes.c.volume_type_id == t['id']).\
values(volume_type_id=new_id).execute()
extra_specs.update().\
where(extra_specs.c.volume_type_id == t['id']).\
values(volume_type_id=new_id).execute()
volume_types.update().\
where(volume_types.c.id == t['id']).\
values(id=new_id).execute()
new_id += 1
if migrate_engine.name == 'postgresql':
# NOTE(e0ne): PostgreSQL can't cast string to int automatically
table_column_pairs = [('volumes', 'volume_type_id'),
('volume_types', 'id'),
('volume_type_extra_specs', 'volume_type_id')]
sql = 'ALTER TABLE {0} ALTER COLUMN {1} ' + \
'TYPE INTEGER USING {1}::numeric'
for table, column in table_column_pairs:
migrate_engine.execute(sql.format(table, column))
else:
volumes.c.volume_type_id.alter(Integer)
volume_types.c.id.alter(Integer)
extra_specs.c.volume_type_id.alter(Integer)
for column in fkey_remove_list:
fkeys = list(column.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
fkey = ForeignKeyConstraint(columns=[column],
refcolumns=[volume_types.c.id],
name=fkey_name)
try:
fkey.create()
LOG.info(_LI('Created foreign key %s'), fkey_name)
except Exception:
if migrate_engine.url.get_dialect().name.startswith('sqlite'):
pass
else:
raise
|
julianwang/cinder
|
cinder/db/sqlalchemy/migrate_repo/versions/004_volume_type_to_uuid.py
|
Python
|
apache-2.0
| 5,948 | 0 |
from twisted.trial import unittest
from tipsip.header import Headers
from tipsip.header import Header, AddressHeader, ViaHeader
class HeadersTest(unittest.TestCase):
def test_construct(self):
aq = self.assertEqual
at = self.assertTrue
h = Headers({'Subject': 'lunch'}, f='John', to='abacaba')
h['TO'] = 'Carol'
aq(h['Subject'], 'lunch')
aq(h['from'], 'John')
aq(h['t'], 'Carol')
r = str(h)
for line in r.split('\r\n'):
at(line in ['Subject: lunch', 'From: John', 'To: Carol'])
def test_manipulation(self):
aq = self.assertEqual
at = self.assertTrue
h = Headers()
h['f'] = "from header"
h['to'] = "to header"
at('FROM' in h)
at('To' in h)
to = h.pop('t')
aq(to, "to header")
at(h.has_key('From'))
class HeaderTest(unittest.TestCase):
def test_construct(self):
aq = self.assertEqual
at = self.assertTrue
h = Header('active', params={'expires': '3600'})
aq(str(h), 'active ;expires=3600')
class AddressHeaderTest(unittest.TestCase):
def test_parsing(self):
aq = self.assertEqual
v = AddressHeader.parse('<sips:bob@192.0.2.4>;expires=60')
aq(str(v.uri), 'sips:bob@192.0.2.4')
aq(v.params['expires'], '60')
aq(v.display_name, '')
v = AddressHeader.parse('<sip:server10.biloxi.com;lr>')
aq(str(v.uri), 'sip:server10.biloxi.com;lr')
aq(v.params, {})
aq(v.display_name, '')
v = AddressHeader.parse('The Operator <sip:operator@cs.columbia.edu>;tag=287447')
aq(str(v.uri), 'sip:operator@cs.columbia.edu')
aq(v.display_name, 'The Operator')
aq(v.params, {'tag': '287447'})
v = AddressHeader.parse('sip:echo@example.com')
aq(str(v.uri), 'sip:echo@example.com')
class ViaHeaderTest(unittest.TestCase):
def test_construct(self):
aq = self.assertEqual
v = ViaHeader(transport='UDP', host='192.168.0.1', port='5060', params={'received': '8.8.8.8'})
aq(str(v), 'SIP/2.0/UDP 192.168.0.1:5060 ;received=8.8.8.8')
def test_parsing(self):
aq = self.assertEqual
at = self.assertTrue
v = ViaHeader.parse('SIP/2.0/UDP 127.0.0.1:21375;branch=z9hG4bK-d8754z-2f9c4f090fc81b1f-1---d8754z-;rport')
aq(v.version, 'SIP/2.0')
aq(v.transport, 'UDP')
aq(v.host, '127.0.0.1')
aq(v.port, '21375')
aq(v.params['branch'], 'z9hG4bK-d8754z-2f9c4f090fc81b1f-1---d8754z-')
at('rport' in v.params)
v = ViaHeader.parse('SIP/2.0/UDP pc33.atlanta.com:5066;branch=z9hG4bK776asdhds')
aq(v.port, '5066')
def test_serialize(self):
aq = self.assertEqual
v = ViaHeader.parse('SIP/2.0/UDP 127.0.0.1:21375;rport')
aq(str(v), 'SIP/2.0/UDP 127.0.0.1:21375 ;rport')
|
ivaxer/tipsip
|
tipsip/tests/test_header.py
|
Python
|
isc
| 2,901 | 0.002413 |
# Reference: http://hetland.org/coding/python/levenshtein.py
def levenshtein(a,b):
"Calculates the Levenshtein distance between a and b."
n, m = len(a), len(b)
if n > m:
# Make sure n <= m, to use O(min(n,m)) space
a,b = b,a
n,m = m,n
current = range(n+1)
for i in range(1,m+1):
previous, current = current, [i]+[0]*n
for j in range(1,n+1):
add, delete = previous[j]+1, current[j-1]+1
change = previous[j-1]
if a[j-1] != b[i-1]:
change = change + 1
current[j] = min(add, delete, change)
return current[n]
if __name__=="__main__":
from sys import argv
print levenshtein(argv[1],argv[2])
|
singhj/locality-sensitive-hashing
|
utils/levenshtein.py
|
Python
|
mit
| 745 | 0.016107 |
# -*-mode: python; py-indent-offset: 4; tab-width: 8; coding: iso-8859-1 -*-
# DLLM (non-linear Differentiated Lifting Line Model, open source software)
#
# Copyright (C) 2013-2015 Airbus Group SAS
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# https://github.com/matthieu-meaux/DLLM.git
#
# @author : Matthieu MEAUX
#
# Imports
from MDOTools.OC.operating_condition import OperatingCondition
from DLLM.DLLMGeom.wing_broken import Wing_Broken
from DLLM.DLLMKernel.DLLMSolver import DLLMSolver
OC=OperatingCondition('cond1', atmospheric_model='ISA')
OC.set_Mach(0.8)
OC.set_AoA(3.0)
OC.set_altitude(10000.)
OC.set_T0_deg(15.)
OC.set_P0(101325.)
OC.set_humidity(0.)
OC.compute_atmosphere()
wing_param=Wing_Broken('broken_wing',n_sect=20)
wing_param.import_BC_from_file('input_parameters.par')
wing_param.build_linear_airfoil(OC, AoA0=0.0, set_as_ref=True)
wing_param.build_airfoils_from_ref()
wing_param.update()
wing_param.plot()
DLLM = DLLMSolver('test', wing_param, OC, verbose=1)
DLLM.run_direct()
DLLM.run_post()
DLLM.run_adjoint()
|
matthieu-meaux/DLLM
|
examples/broken_wing/test_broken_wing.py
|
Python
|
gpl-2.0
| 1,720 | 0.005814 |
"""Store various constants here"""
from enum import Enum
# Maximum file upload size (in bytes).
MAX_CONTENT_LENGTH = 1 * 1024 * 1024 * 1024
# Authentication/account creation constants
PWD_HASH_ALGORITHM = 'pbkdf2_sha256'
SALT_SIZE = 24
MIN_USERNAME_LENGTH = 2
MAX_USERNAME_LENGTH = 32
MIN_PASSWORD_LENGTH = 8
MAX_PASSWORD_LENGTH = 1024
HASH_ROUNDS = 100000
PWD_RESET_KEY_LENGTH = 32
# Length of time before recovery key expires, in minutes.
PWD_RESET_KEY_EXPIRATION = 1 * 24 * 60
CREATE_ACCOUNT_KEY_LENGTH = 32
class Gender(Enum):
"""Value of members.gender if member's gender is unknown"""
NO_GENDER = None
"""Value of members.gender if member is female"""
FEMALE = 0
"""Value of members.gender if member is male"""
MALE = 1
CONTACTS = {
'Administration': [{
'name': 'Kevin Gilmartin',
'role': 'Dean of Undergraduate Students',
'email': 'kmg@hss.caltech.edu'
}, {
'name': 'Lesley Nye',
'role': 'Dean of Undergraduate Students',
'email': 'lnye@caltech.edu'
}, {
'name': 'Kristin Weyman',
'role': 'Associate Dean of Undergraduate Students',
'email': 'kweyman@caltech.edu'
}, {
'name': 'Beth Larranaga',
'role': 'Office Manager',
'email': 'rosel@caltech.edu'
}, {
'name': 'Sara Loredo',
'role': 'Office Assistant',
'email': 'sara@caltech.edu'
}],
'Student Life': [{
'name':
'Tom Mannion',
'role':
'Senior Director, Student Activities and Programs',
'email':
'mannion@caltech.edu'
}, {
'name': 'Joe Shepherd',
'role': 'Vice President for Student Affairs',
'email': 'joseph.e.shepherd@caltech.edu'
}, {
'name':
'Felicia Hunt',
'role':
'Assistant Vice President for Student Affairs and Residential Experience',
'email':
'fhunt@caltech.edu'
}, {
'name': 'Maria Katsas',
'role': 'Director of Housing',
'email': 'maria@caltech.edu'
}, {
'name':
'Allie McIntosh',
'role':
'Community Educator and Deputy Title IX Coordinator',
'email':
'allie@caltech.edu'
}, {
'name': 'Jaime Reyes',
'role': 'Acting Director of Dining Services',
'email': 'reyes@caltech.edu'
}]
}
|
ASCIT/donut-python
|
donut/constants.py
|
Python
|
mit
| 2,372 | 0.000422 |
import os
import re
import subprocess
from utils import whereis_exe
class osx_voice():
def __init__(self, voice_line):
mess = voice_line.split(' ')
cleaned = [ part for part in mess if len(part)>0 ]
self.name = cleaned[0]
self.locality = cleaned[1]
self.desc = cleaned[2].replace('# ', '')
def __str__(self):
return self.name + ' ' + self.locality + ' ' + self.desc
def fetch_voices():
osx_voices = []
if whereis_exe("say"):
voices_raw = os.popen("say -v ?").read()
voice_lines = voices_raw.split('\n')
for line in voice_lines:
try:
osx_voices.append(osx_voice(line))
except IndexError:
pass
return osx_voices
def speak(text, voice, rate):
if whereis_exe("say"):
subprocess.call(["say", text, "-v", voice, "-r", rate])
|
brousch/saythis2
|
tts_engines/osx_say.py
|
Python
|
mit
| 888 | 0.003378 |
# -*- coding: utf-8 -*-
# <standard imports>
from __future__ import division
import random
import otree.models
import otree.constants
from otree.db import models
from otree import widgets
from otree.common import Currency as c, currency_range, safe_json
from otree.constants import BaseConstants
from otree.models import BaseSubsession, BaseGroup, BasePlayer
# </standard imports>
from numpy import *
author = 'NIGG'
doc = """
Your app description
"""
class Constants():
name_in_url = 'network'
players_per_group = 5
num_rounds = 40
places = ['A', 'B', 'C', 'D', 'E']
# define more constants here
class Subsession(BaseSubsession):
def before_session_starts(self):
Group.network_histry = []
for i in range(Constants.num_rounds):
for group in self.get_groups():
Group.network_tp = group.network_type()
Group.network_group = group.network()
players = group.get_players()
random.shuffle(players)
group.set_players(players)
Group.network_histry.append([ Group.network_group[0], Group.network_group[1]])
class Group(BaseGroup):
# <built-in>
subsession = models.ForeignKey(Subsession)
# </built-in>
networktype = models.CharField()
def network_type(self):
network_type_group = ['Blue network', 'Red network', 'Brown network']
network_type = random.choice(network_type_group)
return network_type
def network(self):
network_type = str(self.network_tp)
if network_type == 'Blue network':
network = {'A':['B'], 'B':['A', 'C', 'E'], 'C':['B', 'D', 'E'], 'D':['C', 'E'], 'E':['B', 'C', 'D']}
elif network_type == 'Red network':
network = {'A':['B'], 'B':['A', 'C'], 'C':['B', 'D', 'E'], 'D':['C', 'E'], 'E':['C', 'D']}
else:
network = {'A':['B'], 'B':['A', 'C', 'E'], 'C':['B', 'D'], 'D':['C', 'E'], 'E':['B', 'D']}
network_group = [network_type, network]
return network_group
def set_payoffs(self):
network_group = self.network_histry[self.subsession.round_number-1]
self.network_type = network_group[0]
self.network = network_group[1]
player = [0 for i in range(Constants.players_per_group)]
active = [0 for i in range(Constants.players_per_group)]
i = 0
for role in Constants.places:
player[i] = self.get_player_by_role(role)
assign_nghb = self.network[role]
for other_role in assign_nghb:
if self.get_player_by_role(other_role).decision == 'ACTIVE':
active[i] += 1
player[i].payoff = float(100*active[i]/3)
player[i].num_active = active[i]
i += 1
class Player(otree.models.BasePlayer):
# <built-in>
group = models.ForeignKey(Group, null=True)
subsession = models.ForeignKey(Subsession)
# </built-in>
decision = models.CharField(
choices=['ACTIVE', 'INACTIVE'],
doc="""このプレイヤーの選択は""",
widget=widgets.RadioSelect()
)
nghb = models.PositiveIntegerField()
num_active = models.PositiveIntegerField()
def other_player(self):
return self.get_others_in_group()[0]
def role(self):
return Constants.places[self.id_in_group - 1]
def num_nghb(self):
return len(Group.network_histry[self.subsession.round_number-1][1][self.role()])
|
NlGG/experiments
|
Experimental Games on Networks/otree_code/network/models.py
|
Python
|
mit
| 3,635 | 0.009134 |
# coding=utf-8
# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import sys
from interpreter_selection.python_3_selection_testing.main_py2 import main
def test_main():
print(sys.executable)
# Note that ascii exists as a built-in in Python 3 and
# does not exist in Python 2
ret = main()
assert ret == None
|
UnrememberMe/pants
|
testprojects/src/python/interpreter_selection/python_3_selection_testing/test_py2.py
|
Python
|
apache-2.0
| 554 | 0.012635 |
# -*- coding: utf-8 -*-
import json
from vilya.libs import api_errors
from vilya.models.project import CodeDoubanProject
from vilya.views.api.utils import RestAPIUI, api_require_login, jsonize
from vilya.views.api.repos.product import ProductUI
from vilya.views.api.repos.summary import SummaryUI
from vilya.views.api.repos.intern import InternUI
from vilya.views.api.repos.default_branch import DefaultBranchUI
from vilya.views.api.repos.commits import CommitsUI
from vilya.views.api.repos.post_receive import PostReceiveUI
from vilya.views.api.repos.git2svn import GIT2SVNUI
from vilya.views.api.repos.svn2git import SVN2GITUI
from vilya.views.api.repos.pulls import PullsUI
from vilya.views.api.repos.issues import IssuesUI
from vilya.views.api.repos.contents import ContentsUI
from vilya.views.api.repos.push import PushUI
from vilya.views.api.repos.watchers import WatchersUI
_q_exports = []
def _q_lookup(request, name):
return RepositoryUI(name)
def _q_access(request):
request.response.set_content_type('application/json; charset=utf-8')
class RepositoryUI(object):
_q_exports = [
'lang_stats', 'forks', 'pulls', 'summary',
'committers', 'name', 'owner', 'product',
'intern_banned', 'default_branch', 'commits',
'post_receive', 'svn2git', 'git2svn', 'issues',
'contents', 'can_push', 'watchers'
]
def __init__(self, name):
self.name = name
self.repo = CodeDoubanProject.get_by_name(self.name)
def __call__(self, request):
return self._q_index(request)
@jsonize
def _q_index(self, request):
if not self.repo:
raise api_errors.NotFoundError("repo")
return {}
def _q_access(self, request):
self.method = request.method
def _q_lookup(self, request, part):
name = "%s/%s" % (self.name, part)
if not CodeDoubanProject.exists(name):
raise api_errors.NotFoundError("repo")
return RepositoryUI(name)
@jsonize
def lang_stats(self, request):
if not self.repo:
raise api_errors.NotFoundError
if self.method == 'POST':
language = request.get_form_var('language', '')
languages = request.get_form_var('languages', '[]')
try:
languages = json.loads(languages)
except ValueError:
raise api_errors.NotJSONError
self.repo.language = language
self.repo.languages = languages
return {}
else:
return dict(language=self.repo.language,
languages=self.repo.languages)
@property
def forks(self):
return ForksUI(self.repo)
@property
def pulls(self):
return PullsUI(self.repo)
@property
def product(self):
return ProductUI(self.repo)
@property
def summary(self):
return SummaryUI(self.repo)
@property
def intern_banned(self):
return InternUI(self.repo)
@property
def can_push(self):
return PushUI(self.repo)
@property
def default_branch(self):
return DefaultBranchUI(self.repo)
@property
def commits(self):
return CommitsUI(self.repo)
@property
def post_receive(self):
return PostReceiveUI(self.repo)
@property
def svn2git(self):
return SVN2GITUI(self.repo)
@property
def git2svn(self):
return GIT2SVNUI(self.repo)
@property
def issues(self):
return IssuesUI(self.repo)
@property
def contents(self):
return ContentsUI(self.repo)
@property
def watchers(self):
return WatchersUI(self.repo)
class ForksUI(RestAPIUI):
_q_exports = []
_q_methods = ['get', 'post']
def __init__(self, repo):
self.repo = repo
@api_require_login
def post(self, request):
repo = self.repo
fork_repo = repo.new_fork(self.user.name)
if not fork_repo:
# FIXME: repository exists
return []
return fork_repo.as_dict()
def get(self, request):
fork_repos = self.repo.get_forked_projects()
return [project.get_info(without_commits=True)
for project in fork_repos]
|
xtao/code
|
vilya/views/api/repos/__init__.py
|
Python
|
bsd-3-clause
| 4,261 | 0 |
import datetime
import pytz
from django.conf import settings
from django.utils.cache import patch_vary_headers
from django.utils.translation import trans_real
from . import global_tz
from .forms import TimeZoneForm
from .utils import guess_tz_from_lang
def get_tz_from_request(request):
if hasattr(request, 'session'):
session_name = getattr(settings, 'TIMEZONE_SESSION_NAME', 'django_timezone')
tz = request.session.get(session_name, None)
if tz and isinstance(tz, datetime.tzinfo):
return tz
cookie_name = getattr(settings, 'TIMEZONE_COOKIE_NAME', 'TIMEZONE')
form = TimeZoneForm({'timezone': request.COOKIES.get(cookie_name, None)})
if form.is_valid():
return form.cleaned_data['timezone']
return None
class GlobalTimezoneMiddleware(object):
"""
This middleware guesses timezone from language and sets it in current
thread global cache.
"""
def get_tz(self, request):
raise NotImplementedError()
def process_request(self, request):
tz = self.get_tz(request)
if tz:
global_tz.activate(tz)
def process_response(self, request, response):
global_tz.deactivate()
return response
class TimezoneFromLangMiddleware(GlobalTimezoneMiddleware):
"""
Not very smart middelware which guesses timezone from request lang setting.
"""
def get_tz(self, request):
tz = get_tz_from_request(request)
if tz:
return tz
accept_lang = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
langs = trans_real.parse_accept_lang_header(accept_lang)
for lang, unused in langs:
tz = guess_tz_from_lang(lang)
if tz:
break
return tz
|
paluh/django-tz
|
django_tz/middleware.py
|
Python
|
bsd-2-clause
| 1,766 | 0.002831 |
""" Miscellaneous routines and constants.
"""
import logging, sys, traceback
import os.path
import astviewer.qtpy
import astviewer.qtpy._version as qtpy_version
from astviewer.version import DEBUGGING, PROGRAM_NAME, PROGRAM_VERSION, PYTHON_VERSION
from astviewer.qtpy import QtCore, QtWidgets
logger=logging.getLogger(__name__)
QT_API = astviewer.qtpy.API
QT_API_NAME = astviewer.qtpy.API_NAME
QTPY_VERSION = '.'.join(map(str, qtpy_version.version_info))
ABOUT_MESSAGE = ("{}: {}\n\nPython: {}\n{} (api={})"
.format(PROGRAM_NAME, PROGRAM_VERSION, PYTHON_VERSION, QT_API_NAME, QT_API))
def program_directory():
""" Returns the program directory where this program is installed
"""
return os.path.abspath(os.path.dirname(__file__))
def icons_directory():
""" Returns the program directory where this program is installed
"""
return os.path.join(program_directory(), 'icons')
###########
# Logging #
###########
def logging_basic_config(level):
""" Setup basic config logging. Useful for debugging to quickly setup a useful logger"""
fmt = '%(filename)25s:%(lineno)-4d : %(levelname)-7s: %(message)s'
logging.basicConfig(level=level, format=fmt)
# pylint: disable=redefined-outer-name
def log_dictionary(dictionary, msg='', logger=None, level='debug', item_prefix=' '):
""" Writes a log message with key and value for each item in the dictionary.
:param dictionary: the dictionary to be logged
:type dictionary: dict
:param name: An optional message that is logged before the contents
:type name: string
:param logger: A logging.Logger object to log to. If not set, the 'main' logger is used.
:type logger: logging.Logger or a string
:param level: log level. String or int as described in the logging module documentation.
Default: 'debug'.
:type level: string or int
:param item_prefix: String that will be prefixed to each line. Default: two spaces.
:type item_prefix: string
"""
level_nr = logging.getLevelName(level.upper())
if logger is None:
logger = logging.getLogger('main')
if msg :
logger.log(level_nr, "Logging dictionary: {}".format(msg))
if not dictionary:
logger.log(level_nr,"{}<empty dictionary>".format(item_prefix))
return
max_key_len = max([len(k) for k in dictionary.keys()])
for key, value in sorted(dictionary.items()):
logger.log(level_nr, "{0}{1:<{2}s} = {3}".format(item_prefix, key, max_key_len, value))
# pylint: enable=redefined-outer-name
#################
# Type checking #
#################
def class_name(obj):
""" Returns the class name of an object"""
return obj.__class__.__name__
def check_class(obj, target_class, allow_none = False):
""" Checks that the obj is a (sub)type of target_class.
Raises a TypeError if this is not the case.
"""
if not isinstance(obj, target_class):
if not (allow_none and obj is None):
raise TypeError("obj must be a of type {}, got: {}"
.format(target_class, type(obj)))
############
# Qt stuff #
############
class ResizeDetailsMessageBox(QtWidgets.QMessageBox):
""" Message box that enlarges when the 'Show Details' button is clicked.
Can be used to better view stack traces. I could't find how to make a resizeable message
box but this it the next best thing.
Taken from:
http://stackoverflow.com/questions/2655354/how-to-allow-resizing-of-qmessagebox-in-pyqt4
"""
def __init__(self, detailsBoxWidth=700, detailBoxHeight=300, *args, **kwargs):
""" Constructor
:param detailsBoxWidht: The width of the details text box (default=700)
:param detailBoxHeight: The heights of the details text box (default=700)
"""
super(ResizeDetailsMessageBox, self).__init__(*args, **kwargs)
self.detailsBoxWidth = detailsBoxWidth
self.detailBoxHeight = detailBoxHeight
def resizeEvent(self, event):
""" Resizes the details box if present (i.e. when 'Show Details' button was clicked)
"""
result = super(ResizeDetailsMessageBox, self).resizeEvent(event)
details_box = self.findChild(QtWidgets.QTextEdit)
if details_box is not None:
#details_box.setFixedSize(details_box.sizeHint())
details_box.setFixedSize(QtCore.QSize(self.detailsBoxWidth, self.detailBoxHeight))
return result
def handleException(exc_type, exc_value, exc_traceback):
""" Causes the application to quit in case of an unhandled exception (as God intended)
Shows an error dialog before quitting when not in debugging mode.
"""
traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.critical("Bug: uncaught {}".format(exc_type.__name__),
exc_info=(exc_type, exc_value, exc_traceback))
if DEBUGGING:
sys.exit(1)
else:
# Constructing a QApplication in case this hasn't been done yet.
if not QtWidgets.qApp:
_app = QtWidgets.QApplication()
#msgBox = QtWidgets.QMessageBox()
msgBox = ResizeDetailsMessageBox()
msgBox.setText("Bug: uncaught {}".format(exc_type.__name__))
msgBox.setInformativeText(str(exc_value))
lst = traceback.format_exception(exc_type, exc_value, exc_traceback)
msgBox.setDetailedText("".join(lst))
msgBox.setIcon(QtWidgets.QMessageBox.Warning)
msgBox.exec_()
sys.exit(1)
def get_qsettings():
""" Creates a QSettings object for this application.
We do not set the application and organization in the QApplication object to
prevent side-effects in case the AstViewer is imported.
"""
return QtCore.QSettings("titusjan.nl", PROGRAM_NAME)
def get_qapplication_instance():
""" Returns the QApplication instance. Creates one if it doesn't exist.
"""
app = QtWidgets.QApplication.instance()
if app is None:
app = QtWidgets.QApplication(sys.argv)
check_class(app, QtWidgets.QApplication)
return app
|
titusjan/astviewer
|
astviewer/misc.py
|
Python
|
mit
| 6,180 | 0.004854 |
from base_uri import URI
class HomeURI(URI):
path = '/'
|
LINKIWI/modern-paste
|
app/uri/main.py
|
Python
|
mit
| 62 | 0 |
# -*- coding: utf-8 -*-
from .hooks import post_init_hook
from . import models
from . import tests
|
acsone/server-tools
|
base_name_search_improved/__init__.py
|
Python
|
agpl-3.0
| 99 | 0 |
r"""
Solve Poisson equation in 1D with homogeneous Dirichlet bcs on the domain [0, inf)
\nabla^2 u = f,
The equation to solve for a Laguerre basis is
(\nabla u, \nabla v) = -(f, v)
"""
import os
import sys
from sympy import symbols, sin, exp, lambdify
import numpy as np
from shenfun import inner, grad, TestFunction, TrialFunction, \
Array, Function, FunctionSpace, dx
assert len(sys.argv) == 2, 'Call with one command-line argument'
assert isinstance(int(sys.argv[-1]), int)
# Use sympy to compute a rhs, given an analytical solution
x = symbols("x", real=True)
ue = sin(2*x)*exp(-x)
fe = ue.diff(x, 2)
# Size of discretization
N = int(sys.argv[-1])
SD = FunctionSpace(N, 'Laguerre', bc=(0, 0))
u = TrialFunction(SD)
v = TestFunction(SD)
# Get f on quad points
fj = Array(SD, buffer=fe)
# Compute right hand side of Poisson equation
f_hat = Function(SD)
f_hat = inner(v, -fj, output_array=f_hat)
# Get left hand side of Poisson equation
#A = inner(v, -div(grad(u)))
A = inner(grad(v), grad(u))
f_hat = A.solve(f_hat)
uj = f_hat.backward()
uh = uj.forward()
# Compare with analytical solution
ua = Array(SD, buffer=ue)
print("Error=%2.16e" %(np.linalg.norm(uj-ua)))
print("Error=%2.16e" %(np.sqrt(dx(uj-ua)**2)))
assert np.allclose(uj, ua, atol=1e-5)
point = np.array([0.1, 0.2])
p = SD.eval(point, f_hat)
assert np.allclose(p, lambdify(x, ue)(point), atol=1e-5)
if 'pytest' not in os.environ:
import matplotlib.pyplot as plt
xx = np.linspace(0, 16, 100)
plt.plot(xx, lambdify(x, ue)(xx), 'r', xx, uh.eval(xx), 'bo', markersize=2)
plt.show()
|
spectralDNS/shenfun
|
demo/laguerre_dirichlet_poisson1D.py
|
Python
|
bsd-2-clause
| 1,587 | 0.00252 |
from __future__ import absolute_import, division, print_function
from itertools import chain
from dynd import nd
import datashape
from datashape.internal_utils import IndexCallable
from datashape import discover
from functools import partial
from ..dispatch import dispatch
from blaze.expr import Projection, Field
from blaze.expr import Expr, UnaryOp
from .utils import validate, coerce, coerce_to_ordered, ordered_index
from ..utils import partition_all
__all__ = ['DataDescriptor', 'discover', 'compute_up']
def isdimension(ds):
return isinstance(ds, (datashape.Var, datashape.Fixed))
class DataDescriptor(object):
"""
Standard interface to data storage
Data descriptors provide read and write access to common data storage
systems like csv, json, HDF5, and SQL.
They provide Pythonic iteration over these resources as well as efficient
chunked access with DyND arrays.
Data Descriptors implement the following methods:
__iter__ - iterate over storage, getting results as Python objects
chunks - iterate over storage, getting results as DyND arrays
extend - insert new data into storage (if possible.)
Consumes a sequence of core Python objects
extend_chunks - insert new data into storage (if possible.)
Consumes a sequence of DyND arrays
as_dynd - load entire dataset into memory as a DyND array
"""
def extend(self, rows):
""" Extend data with many rows
"""
rows = iter(rows)
row = next(rows)
rows = chain([row], rows)
if not validate(self.schema, row):
raise ValueError('Invalid data:\n\t %s \nfor dshape \n\t%s' %
(str(row), self.schema))
if isinstance(row, dict):
rows = map(partial(coerce_to_ordered, self.schema), rows)
self._extend(rows)
def extend_chunks(self, chunks):
def dtype_of(chunk):
return str(len(chunk) * self.schema)
self._extend_chunks((nd.array(chunk, type=dtype_of(chunk))
for chunk in chunks))
def _extend_chunks(self, chunks):
self.extend((row for chunk in chunks
for row in nd.as_py(chunk, tuple=True)))
def chunks(self, **kwargs):
def dshape(chunk):
return str(len(chunk) * self.dshape.subshape[0])
for chunk in self._chunks(**kwargs):
yield nd.array(chunk, type=dshape(chunk))
def _chunks(self, blen=100):
return partition_all(blen, iter(self))
def as_dynd(self):
return self.dynd[:]
def as_py(self):
if isdimension(self.dshape[0]):
return tuple(self)
else:
return tuple(nd.as_py(self.as_dynd(), tuple=True))
def __array__(self):
return nd.as_numpy(self.as_dynd())
def __getitem__(self, key):
return self.get_py(key)
@property
def dynd(self):
return IndexCallable(self.get_dynd)
def get_py(self, key):
key = ordered_index(key, self.dshape)
subshape = self.dshape._subshape(key)
if hasattr(self, '_get_py'):
result = self._get_py(key)
elif hasattr(self, '_get_dynd'):
result = self._get_dynd(key)
else:
raise AttributeError("Data Descriptor defines neither "
"_get_py nor _get_dynd. Can not index")
return coerce(subshape, result)
def get_dynd(self, key):
key = ordered_index(key, self.dshape)
subshape = self.dshape._subshape(key)
if hasattr(self, '_get_dynd'):
result = self._get_dynd(key)
elif hasattr(self, '_get_py'):
result = nd.array(self._get_py(key), type=str(subshape))
else:
raise AttributeError("Data Descriptor defines neither "
"_get_py nor _get_dynd. Can not index")
# Currently nd.array(result, type=discover(result)) is oddly slower
# than just nd.array(result) , even though no type coercion should be
# necessary. As a short-term solution we check if this is the case and
# short-circuit the `type=` call
# This check can be deleted once these two run at similar speeds
ds_result = discover(result)
if (subshape == ds_result or
(isdimension(subshape[0]) and isdimension(ds_result[0]) and
subshape.subshape[0] == subshape.subshape[0])):
return nd.array(result)
else:
return nd.array(result, type=str(subshape))
def __iter__(self):
if not isdimension(self.dshape[0]):
raise TypeError("Data Descriptor not iterable, has dshape %s" %
self.dshape)
schema = self.dshape.subshape[0]
try:
seq = self._iter()
except NotImplementedError:
seq = iter(nd.as_py(self.as_dynd(), tuple=True))
if not isdimension(self.dshape[0]):
yield coerce(self.dshape, nd.as_py(self.as_dynd(), tuple=True))
else:
for block in partition_all(100, seq):
x = coerce(len(block) * schema, block)
for row in x:
yield row
def _iter(self):
raise NotImplementedError()
_dshape = None
@property
def dshape(self):
return datashape.dshape(self._dshape or datashape.Var() * self.schema)
_schema = None
@property
def schema(self):
if self._schema:
return datashape.dshape(self._schema)
if isdimension(self.dshape[0]):
return self.dshape.subarray(1)
raise TypeError('Datashape is not indexable to schema\n%s' %
self.dshape)
@property
def columns(self):
rec = self.schema[0]
if isinstance(rec, datashape.Record):
return rec.names
else:
raise TypeError('Columns attribute only valid on tabular '
'datashapes of records, got %s' % self.dshape)
@dispatch((Expr, UnaryOp), DataDescriptor)
def compute_up(t, ddesc, **kwargs):
return compute_up(t, iter(ddesc)) # use Python streaming by default
@dispatch(Projection, DataDescriptor)
def compute_up(t, ddesc, **kwargs):
return ddesc[:, t.fields]
@dispatch(Field, DataDescriptor)
def compute_up(t, ddesc, **kwargs):
return ddesc[:, t.fields[0]]
@dispatch(DataDescriptor)
def discover(dd):
return dd.dshape
|
vitan/blaze
|
blaze/data/core.py
|
Python
|
bsd-3-clause
| 6,508 | 0.000154 |
"""An AdaNet evaluator implementation in Tensorflow using a single graph.
Copyright 2018 The AdaNet Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from absl import logging
from adanet import tf_compat
import numpy as np
import tensorflow.compat.v2 as tf
# TODO: Remove uses of Evaluator once AdaNet Ranker is implemented.
class Evaluator(object):
"""Evaluates candidate ensemble performance."""
class Objective(object):
"""The Evaluator objective for the metric being optimized.
Two objectives are currently supported:
- MINIMIZE: Lower is better for the metric being optimized.
- MAXIMIZE: Higher is better for the metric being optimized.
"""
MINIMIZE = "minimize"
MAXIMIZE = "maximize"
def __init__(self,
input_fn,
metric_name="adanet_loss",
objective=Objective.MINIMIZE,
steps=None):
"""Initializes a new Evaluator instance.
Args:
input_fn: Input function returning a tuple of: features - Dictionary of
string feature name to `Tensor`. labels - `Tensor` of labels.
metric_name: The name of the evaluation metrics to use when choosing the
best ensemble. Must refer to a valid evaluation metric.
objective: Either `Objective.MINIMIZE` or `Objective.MAXIMIZE`.
steps: Number of steps for which to evaluate the ensembles. If an
`OutOfRangeError` occurs, evaluation stops. If set to None, will iterate
the dataset until all inputs are exhausted.
Returns:
An :class:`adanet.Evaluator` instance.
"""
self._input_fn = input_fn
self._steps = steps
self._metric_name = metric_name
self._objective = objective
if objective == self.Objective.MINIMIZE:
self._objective_fn = np.nanargmin
elif objective == self.Objective.MAXIMIZE:
self._objective_fn = np.nanargmax
else:
raise ValueError(
"Evaluator objective must be one of MINIMIZE or MAXIMIZE.")
@property
def input_fn(self):
"""Return the input_fn."""
return self._input_fn
@property
def steps(self):
"""Return the number of evaluation steps."""
return self._steps
@property
def metric_name(self):
"""Returns the name of the metric being optimized."""
return self._metric_name
@property
def objective_fn(self):
"""Returns a fn which selects the best metric based on the objective."""
return self._objective_fn
def evaluate(self, sess, ensemble_metrics):
"""Evaluates the given AdaNet objectives on the data from `input_fn`.
The candidates are fed the same batches of features and labels as
provided by `input_fn`, and their losses are computed and summed over
`steps` batches.
Args:
sess: `Session` instance with most recent variable values loaded.
ensemble_metrics: A list dictionaries of `tf.metrics` for each candidate
ensemble.
Returns:
List of evaluated metrics.
"""
evals_completed = 0
if self.steps is None:
logging_frequency = 1000
elif self.steps < 10:
logging_frequency = 1
else:
logging_frequency = math.floor(self.steps / 10.)
objective_metrics = [em[self._metric_name] for em in ensemble_metrics]
sess.run(tf_compat.v1.local_variables_initializer())
while True:
if self.steps is not None and evals_completed == self.steps:
break
try:
evals_completed += 1
if (evals_completed % logging_frequency == 0 or
self.steps == evals_completed):
logging.info("Ensemble evaluation [%d/%s]", evals_completed,
self.steps or "??")
sess.run(objective_metrics)
except tf.errors.OutOfRangeError:
logging.info("Encountered end of input after %d evaluations",
evals_completed)
break
# Evaluating the first element is idempotent for metric tuples.
return sess.run([metric[0] for metric in objective_metrics])
|
tensorflow/adanet
|
adanet/core/evaluator.py
|
Python
|
apache-2.0
| 4,624 | 0.00519 |
from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
from django.utils.timezone import now
from shelf.models import BookItem
# Create your models here.
class Rental(models.Model):
who = models.ForeignKey(User)
what = models.ForeignKey(BookItem)
when = models.DateTimeField(default = datetime.now)
returned = models.DateTimeField(null = True, blank = True)
def __str__(self):
return"{User},{Book},{rent},{ret}".format(User = self.who,Book = self.what,rent=self.when,ret = self.returned)
|
KredekPth/Kurs_django
|
rental/models.py
|
Python
|
mit
| 564 | 0.031915 |
#Defaults - overridable via. pypayd.conf or command-line arguments
DEFAULT_KEYPATH = '0/0/1'
DEFAULT_TICKER = 'dummy'
DEFAULT_CURRENCY = 'USD'
DEFAULT_WALLET_FILE = 'wallet.txt'
DEFAULT_WALLET_PASSWORD = "foobar"
DEFAULT_MNEMONIC_TYPE = "electrumseed"
DB = None
DEFAULT_DB = "pypayd.db"
DEFAULT_TESTNET_DB = "pypayd_testnet.db"
#Pypay server settings
RPC_HOST ='127.0.0.1'
RPC_PORT = 3080
VERSION = 0.1
AUTH_REQUIRED = True
#Blockchain
TESTNET = False
BLOCKCHAIN_CONNECT = 'http://localhost:3001' #'https://test-insight.bitpay.com' #None
LOCAL_BLOCKCHAIN = False
BLOCKCHAIN_SERVICE = 'insight'
#generate a new address for every order if gen_new == True
GEN_NEW = False
#delay between requests to the blockchain service for new transactions
POLLING_DELAY = 30
#maximum time a leaf (address) is used to process orders before a new one is generated
MAX_LEAF_LIFE = 604800
#maximum number of transactions per address before a new one is generated
MAX_LEAF_TX = 9999
#maximum amount of time an order received for generated amount will be considered valid
ORDER_LIFE = 86400
#time from last order creation, after which an adress is considered stale and no longer polled
LEAF_POLL_LIFE = ORDER_LIFE*2
#log file settings
LOG = None
MAX_LOG_SIZE = 16*1024*1024
UPDATE_ON_CONFIRM = 6 #can also take a list, such as [6, 20, 100]
DATA_DIR = ""
DB = None
KEYPATH = None
LAST_USED_KEYPATH = None
RPC_USER = 'user'
RPC_PASSWORD= 'password'
# INTERNAL
STATE = {"last_order_updates": {"order_id":None, "timestamp": None}}
# UNUSED
ZMQ_BIND = None
ZMQ_FEED = False
SOCKETIO_BIND = None
SOCKETIO_FEED = False
|
pik/pypayd
|
pypayd/config.py
|
Python
|
mit
| 1,595 | 0.013166 |
#
# Unit Tests for the colors.py functions
#
# Rajul Srivastava (rajul09@gmail.com)
#
import unittest
import logging
import numpy as np
import ginga.colors
class TestError(Exception):
pass
class TestColors(unittest.TestCase):
def setUp(self):
self.logger = logging.getLogger("TestColors")
self.color_list_length = len(ginga.colors.color_dict)
# Tests for the lookup_color() funtion
def test_lookup_color_white_tuple(self):
expected = (1.0, 1.0, 1.0)
actual = ginga.colors.lookup_color("white", "tuple")
assert np.allclose(expected, actual)
def test_lookup_color_black_tuple(self):
expected = (0.0, 0.0, 0.0)
actual = ginga.colors.lookup_color("black", "tuple")
assert np.allclose(expected, actual)
def test_lookup_color_white_hash(self):
expected = "#ffffff"
actual = ginga.colors.lookup_color("white", "hash")
assert expected == actual
def test_lookup_color_black_black(self):
expected = "#000000"
actual = ginga.colors.lookup_color("black", "hash")
assert expected == actual
def test_lookup_color_yellow_tuple(self):
expected = (1.0, 1.0, 0.0)
actual = ginga.colors.lookup_color("yellow")
assert np.allclose(expected, actual)
def test_lookup_color_unknown(self):
self.assertRaises(KeyError, ginga.colors.lookup_color, "unknown_color")
def test_lookup_color_raise_exception_unknown_key(self):
self.assertRaises(KeyError, ginga.colors.lookup_color, "unknown_key")
def test_lookup_color_raise_exception_unknown_format(self):
self.assertRaises(ValueError, ginga.colors.lookup_color, "white", "unknown_format")
# Tests for the get_colors() function
def test_get_colors_len(self):
expected = self.color_list_length
actual = len(ginga.colors.get_colors())
assert expected == actual
def test_add_and_get_colors_len(self):
ginga.colors.add_color("test_color_white", (0.0, 0.0, 0.0))
expected = self.color_list_length + 1
actual = len(ginga.colors.get_colors())
assert expected == actual
ginga.colors.remove_color("test_color_white")
# Tests for the add_color() and remove_color() function
def test_add_and_remove_color_len(self):
ginga.colors.add_color("test_color_white", (0.0, 0.0, 0.0))
expected = self.color_list_length + 1
actual = len(ginga.colors.color_dict)
assert expected == actual
expected = len(ginga.colors.color_dict)
actual = len(ginga.colors.color_list)
assert expected == actual
ginga.colors.remove_color("test_color_white")
expected = self.color_list_length
actual = len(ginga.colors.color_dict)
assert expected == actual
expected = len(ginga.colors.color_dict)
actual = len(ginga.colors.color_list)
assert expected == actual
def test_add_and_remove_color_rbg(self):
ginga.colors.add_color("test_color_white", (0.0, 0.0, 0.0))
expected = (0.0, 0.0, 0.0)
actual = ginga.colors.lookup_color("test_color_white")
assert np.allclose(expected, actual)
ginga.colors.remove_color("test_color_white")
self.assertRaises(KeyError, ginga.colors.remove_color, "test_color_white")
def test_add_color_wrong_rbg_type(self):
self.assertRaises(TypeError, ginga.colors.add_color, "white", "string_wrong_format")
def test_add_color_wrong_rbg_values(self):
self.assertRaises(ValueError, ginga.colors.add_color, "test_color", (-1.0, 0.0, 0.0))
def test_add_color_wrong_tuple_length(self):
self.assertRaises(ValueError, ginga.colors.add_color, "test_color", (0.0, 0.0))
def test_remove_color_unknown(self):
self.assertRaises(KeyError, ginga.colors.remove_color, "unknown_color")
# Tests for recalc_color_list() function
def test_recalc_color_list(self):
ginga.colors.color_dict["test_color_white"] = (0.0, 0.0, 0.0)
expected = len(ginga.colors.color_dict) - 1
actual = len(ginga.colors.color_list)
assert expected == actual
ginga.colors.recalc_color_list()
expected = len(ginga.colors.color_dict)
actual = len(ginga.colors.color_list)
assert expected == actual
del ginga.colors.color_dict["test_color_white"]
expected = len(ginga.colors.color_dict) + 1
actual = len(ginga.colors.color_list)
assert expected == actual
ginga.colors.recalc_color_list()
expected = len(ginga.colors.color_dict)
actual = len(ginga.colors.color_list)
assert expected == actual
# Tests for scan_rgbtxt_buf() function
def test_scan_rgbtxt_buf(self):
test_rgb_lines = '''
255 255 255 white
0 0 0 black
255 0 0 red
0 255 0 green
0 0 255 blue
'''
result = ginga.colors.scan_rgbtxt_buf(test_rgb_lines)
assert isinstance(result, dict)
expected = 5
actual = len(result)
assert expected == actual
expected = (1.0, 1.0, 1.0)
actual = result["white"]
assert np.allclose(expected, actual)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
#END
|
rupak0577/ginga
|
ginga/tests/test_colors.py
|
Python
|
bsd-3-clause
| 4,797 | 0.0271 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Runs a ResNet model on the ImageNet dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
from absl import logging
import tensorflow as tf
from tf2_common.utils.flags import core as flags_core
from tf2_common.utils.logs import logger
from tf2_common.utils.misc import distribution_utils
from tf2_common.utils.misc import keras_utils
from tf2_common.utils.misc import model_helpers
from tf2_common.utils.mlp_log import mlp_log
from tf2_resnet import common
from tf2_resnet import imagenet_preprocessing
from tf2_resnet import resnet_model
import tensorflow_model_optimization as tfmot
def run(flags_obj):
"""Run ResNet ImageNet training and eval loop using native Keras APIs.
Args:
flags_obj: An object containing parsed flag values.
Raises:
ValueError: If fp16 is passed as it is not currently supported.
NotImplementedError: If some features are not currently supported.
Returns:
Dictionary of training and eval stats.
"""
mlp_log.mlperf_print('init_start', None)
common.print_flags(flags_obj)
keras_utils.set_session_config(
enable_eager=flags_obj.enable_eager,
enable_xla=flags_obj.enable_xla)
# Execute flag override logic for better model performance
if flags_obj.tf_gpu_thread_mode:
keras_utils.set_gpu_thread_mode_and_count(
per_gpu_thread_count=flags_obj.per_gpu_thread_count,
gpu_thread_mode=flags_obj.tf_gpu_thread_mode,
num_gpus=flags_obj.num_gpus,
datasets_num_private_threads=flags_obj.datasets_num_private_threads)
common.set_cudnn_batchnorm_mode()
dtype = flags_core.get_tf_dtype(flags_obj)
if dtype == tf.float16:
loss_scale = flags_core.get_loss_scale(flags_obj, default_for_fp16=128)
policy = tf.compat.v2.keras.mixed_precision.experimental.Policy(
'mixed_float16', loss_scale=loss_scale)
tf.compat.v2.keras.mixed_precision.experimental.set_policy(policy)
if not keras_utils.is_v2_0():
raise ValueError('--dtype=fp16 is not supported in TensorFlow 1.')
elif dtype == tf.bfloat16:
policy = tf.compat.v2.keras.mixed_precision.experimental.Policy(
'mixed_bfloat16')
tf.compat.v2.keras.mixed_precision.experimental.set_policy(policy)
data_format = flags_obj.data_format
if data_format is None:
data_format = ('channels_first'
if tf.test.is_built_with_cuda() else 'channels_last')
tf.keras.backend.set_image_data_format(data_format)
# Configures cluster spec for distribution strategy.
_ = distribution_utils.configure_cluster(flags_obj.worker_hosts,
flags_obj.task_index)
strategy = distribution_utils.get_distribution_strategy(
distribution_strategy=flags_obj.distribution_strategy,
num_gpus=flags_obj.num_gpus,
all_reduce_alg=flags_obj.all_reduce_alg,
num_packs=flags_obj.num_packs,
tpu_address=flags_obj.tpu,
tpu_zone=flags_obj.tpu_zone if flags_obj.tpu else None)
if strategy:
# flags_obj.enable_get_next_as_optional controls whether enabling
# get_next_as_optional behavior in DistributedIterator. If true, last
# partial batch can be supported.
strategy.extended.experimental_enable_get_next_as_optional = (
flags_obj.enable_get_next_as_optional
)
strategy_scope = distribution_utils.get_strategy_scope(strategy)
# pylint: disable=protected-access
if flags_obj.use_synthetic_data:
distribution_utils.set_up_synthetic_data()
input_fn = common.get_synth_input_fn(
height=imagenet_preprocessing.DEFAULT_IMAGE_SIZE,
width=imagenet_preprocessing.DEFAULT_IMAGE_SIZE,
num_channels=imagenet_preprocessing.NUM_CHANNELS,
num_classes=flags_obj.num_classes,
dtype=dtype,
drop_remainder=True)
else:
distribution_utils.undo_set_up_synthetic_data()
input_fn = imagenet_preprocessing.input_fn
# When `enable_xla` is True, we always drop the remainder of the batches
# in the dataset, as XLA-GPU doesn't support dynamic shapes.
# drop_remainder = flags_obj.enable_xla
# Current resnet_model.resnet50 input format is always channel-last.
# We use keras_application mobilenet model which input format is depends on
# the keras beckend image data format.
# This use_keras_image_data_format flags indicates whether image preprocessor
# output format should be same as the keras backend image data format or just
# channel-last format.
use_keras_image_data_format = (flags_obj.model == 'mobilenet')
train_input_dataset = input_fn(
is_training=True,
data_dir=flags_obj.data_dir,
batch_size=flags_obj.batch_size,
parse_record_fn=imagenet_preprocessing.get_parse_record_fn(
use_keras_image_data_format=use_keras_image_data_format),
datasets_num_private_threads=flags_obj.datasets_num_private_threads,
dtype=dtype,
drop_remainder=flags_obj.drop_train_remainder,
tf_data_experimental_slack=flags_obj.tf_data_experimental_slack,
training_dataset_cache=flags_obj.training_dataset_cache,
)
eval_input_dataset = None
if not flags_obj.skip_eval:
eval_input_dataset = input_fn(
is_training=False,
data_dir=flags_obj.data_dir,
batch_size=flags_obj.batch_size,
parse_record_fn=imagenet_preprocessing.get_parse_record_fn(
use_keras_image_data_format=use_keras_image_data_format),
dtype=dtype,
drop_remainder=flags_obj.drop_eval_remainder)
steps_per_epoch, train_epochs = common.get_num_train_iterations(flags_obj)
mlp_log.mlperf_print('global_batch_size', flags_obj.batch_size)
mlp_log.mlperf_print('num_train_examples',
imagenet_preprocessing.NUM_IMAGES['train'])
mlp_log.mlperf_print('num_eval_examples',
imagenet_preprocessing.NUM_IMAGES['validation'])
learning_rate_schedule_fn = None
with strategy_scope:
optimizer, learning_rate_schedule_fn = common.get_optimizer(
flags_obj=flags_obj,
steps_per_epoch=steps_per_epoch,
train_steps=train_epochs * steps_per_epoch)
if flags_obj.fp16_implementation == 'graph_rewrite':
# Note: when flags_obj.fp16_implementation == "graph_rewrite", dtype as
# determined by flags_core.get_tf_dtype(flags_obj) would be 'float32'
# which will ensure tf.compat.v2.keras.mixed_precision and
# tf.train.experimental.enable_mixed_precision_graph_rewrite do not double
# up.
optimizer = tf.train.experimental.enable_mixed_precision_graph_rewrite(
optimizer)
if flags_obj.model == 'resnet50_v1.5':
resnet_model.change_keras_layer(flags_obj.use_tf_keras_layers)
model = resnet_model.resnet50(num_classes=flags_obj.num_classes)
elif flags_obj.model == 'mobilenet':
# TODO(kimjaehong): Remove layers attribute when minimum TF version
# support 2.0 layers by default.
model = tf.keras.applications.mobilenet.MobileNet(
weights=None, classes=flags_obj.num_classes, layers=tf.keras.layers)
if flags_obj.pretrained_filepath:
model.load_weights(flags_obj.pretrained_filepath)
if flags_obj.pruning_method == 'polynomial_decay':
if dtype != tf.float32:
raise NotImplementedError(
'Pruning is currently only supported on dtype=tf.float32.')
pruning_params = {
'pruning_schedule':
tfmot.sparsity.keras.PolynomialDecay(
initial_sparsity=flags_obj.pruning_initial_sparsity,
final_sparsity=flags_obj.pruning_final_sparsity,
begin_step=flags_obj.pruning_begin_step,
end_step=flags_obj.pruning_end_step,
frequency=flags_obj.pruning_frequency),
}
model = tfmot.sparsity.keras.prune_low_magnitude(model, **pruning_params)
elif flags_obj.pruning_method:
raise NotImplementedError(
'Only polynomial_decay is currently supported.')
# TODO(b/138957587): Remove when force_v2_in_keras_compile is on longer
# a valid arg for this model. Also remove as a valid flag.
if flags_obj.force_v2_in_keras_compile is not None:
model.compile(
loss='sparse_categorical_crossentropy',
optimizer=optimizer,
metrics=(['sparse_categorical_accuracy']
if flags_obj.report_accuracy_metrics else None),
run_eagerly=flags_obj.run_eagerly,
experimental_run_tf_function=flags_obj.force_v2_in_keras_compile)
else:
model.compile(
loss='sparse_categorical_crossentropy',
optimizer=optimizer,
metrics=(['sparse_categorical_accuracy']
if flags_obj.report_accuracy_metrics else None),
run_eagerly=flags_obj.run_eagerly)
callbacks = common.get_callbacks(
steps_per_epoch=steps_per_epoch,
learning_rate_schedule_fn=learning_rate_schedule_fn,
pruning_method=flags_obj.pruning_method,
enable_checkpoint_and_export=flags_obj.enable_checkpoint_and_export,
model_dir=flags_obj.model_dir)
num_eval_steps = common.get_num_eval_steps(flags_obj)
if flags_obj.skip_eval:
# Only build the training graph. This reduces memory usage introduced by
# control flow ops in layers that have different implementations for
# training and inference (e.g., batch norm).
if flags_obj.set_learning_phase_to_train:
# TODO(haoyuzhang): Understand slowdown of setting learning phase when
# not using distribution strategy.
tf.keras.backend.set_learning_phase(1)
num_eval_steps = None
if not strategy and flags_obj.explicit_gpu_placement:
# TODO(b/135607227): Add device scope automatically in Keras training loop
# when not using distribition strategy.
no_dist_strat_device = tf.device('/device:GPU:0')
no_dist_strat_device.__enter__()
mlp_log.mlperf_print('init_stop', None)
mlp_log.mlperf_print('run_start', None)
for epoch in range(train_epochs):
mlp_log.mlperf_print('epoch_start', None,
metadata={'first_epoch_num': epoch,
'epoch_count': 1})
mlp_log.mlperf_print('block_start', None)
history = model.fit(train_input_dataset,
epochs=1,
steps_per_epoch=steps_per_epoch,
callbacks=callbacks,
verbose=2)
mlp_log.mlperf_print('block_stop', None)
eval_output = None
if not flags_obj.skip_eval:
mlp_log.mlperf_print('eval_start', None)
eval_output = model.evaluate(eval_input_dataset,
steps=num_eval_steps,
verbose=2)
mlp_log.mlperf_print('eval_stop', None)
eval_accuracy = eval_output[1]
mlp_log.mlperf_print(
'eval_accuracy', eval_accuracy, metadata={'epoch_num': epoch})
if eval_accuracy >= flags_obj.target_accuracy:
break
mlp_log.mlperf_print('epoch_stop', None)
mlp_log.mlperf_print('run_stop', None)
if flags_obj.pruning_method:
model = tfmot.sparsity.keras.strip_pruning(model)
if flags_obj.enable_checkpoint_and_export:
if dtype == tf.bfloat16:
logging.warning('Keras model.save does not support bfloat16 dtype.')
else:
# Keras model.save assumes a float32 input designature.
export_path = os.path.join(flags_obj.model_dir, 'saved_model')
model.save(export_path, include_optimizer=False)
if not strategy and flags_obj.explicit_gpu_placement:
no_dist_strat_device.__exit__()
stats = common.build_stats(history, eval_output, callbacks)
return stats
def define_imagenet_keras_flags():
common.define_keras_flags(
model=True,
optimizer=True,
pretrained_filepath=True)
common.define_pruning_flags()
flags_core.set_defaults()
flags.adopt_module_key_flags(common)
def main(_):
model_helpers.apply_clean(flags.FLAGS)
with logger.benchmark_context(flags.FLAGS):
stats = run(flags.FLAGS)
logging.info('Run stats:\n%s', stats)
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
define_imagenet_keras_flags()
app.run(main)
|
mlperf/training_results_v0.7
|
Google/benchmarks/resnet/implementations/resnet-cloud-TF2.0-tpu-v3-32/resnet_imagenet_main.py
|
Python
|
apache-2.0
| 12,959 | 0.007408 |
import numpy
scale = 1000
def unit(v):
return (v / numpy.linalg.norm(v))
def angle(v1, v2):
v1_u = unit(v1)
v2_u = unit(v2)
angle = numpy.arccos(numpy.dot(v1_u, v2_u))
if numpy.isnan(angle):
if (v1_u == v2_u).all():
return 0.0
else:
return numpy.pi
return angle
def generate(models, solidName, fileName):
# Find the minimum positional vector for all models
positionMin = numpy.array([0, 0, 0, 0], dtype='float')
for model in models:
for geometry in model.geometry.geometry:
for mesh in geometry.meshes.meshes:
positions = mesh.positions
for v in positions:
if v[0] < positionMin[0]:
positionMin[0] = v[0]
if v[1] < positionMin[1]:
positionMin[1] = v[1]
if v[2] < positionMin[2]:
positionMin[2] = v[2]
if v[3] < positionMin[3]:
positionMin[3] = v[3]
# Translate position coordinates to always be positive
positionMin *= -1
#Open file
with open(fileName, 'w') as f:
print("Writing "+fileName+"...")
# Write name header
f.write("solid "+solidName+"\n")
# Iterate through all models
for model in models:
# Write positional vectors (once to start)
for geometry in model.geometry.geometry:
for mesh in geometry.meshes.meshes:
indices = mesh.indices.data
positions = mesh.positions
normals = mesh.normals
parts = mesh.parts.data
# Loop through all the parts in the mesh
for i, part in enumerate(parts):
# Check if this part has been duplicated
ignore = False
for j in range(i):
if (parts[i].indexStart == parts[j].indexStart) or (parts[i].indexCount == parts[j].indexCount):
ignore = True
# Skip anything meeting one of the following::
# duplicate part
# levelOfDetail greater than one
# diffuseTexture.contains("target_reticles")
if ignore or part.levelOfDetail > 1 or ("target_reticles" in part.diffuseTexture):
continue
start = part.indexStart
count = part.indexCount
# Process indices in sets of 3
if part.primitive == 3:
increment = 3
# Process indices as triangle strip
elif part.primitive == 5:
increment = 1
count -= 2
j = 0
while j < count:
# Skip if any two of the indices match (ignoring lines)
if (indices[start+j+0] == indices[start+j+1]) or (indices[start+j+0] == indices[start+j+2]) or (indices[start+j+1] == indices[start+j+2]):
j += 1
continue
# Calculate the average normal
n = (normals[indices[start+j+0]] + normals[indices[start+j+1]] + normals[indices[start+j+2]])[0:3]
# Calculate normal of vertices
v1 = positions[indices[start+j+1]][0:3] - positions[indices[start+j+0]][0:3]
v2 = positions[indices[start+j+2]][0:3] - positions[indices[start+j+1]][0:3]
n2 = numpy.cross(v1, v2)
# Calculate the angle between the two normals
# Reverse the vertices orientation if the angle is > pi/2 (90*)
a = angle(unit(n), unit(n2))
if a > numpy.pi/2:
flip = True
else:
flip = False
# Write the normal and loop start to file
# the normal doesn't matter for this, the order of vertices does
f.write("facet normal 0.0 0.0 0.0\n")
f.write(" outer loop\n")
if flip:
# write the three vertices to the file in reverse order
k = 3
while k > 0:
v = positions[indices[start+j+(k-1)]]
v = (v + positionMin) * scale
f.write(" vertex "+str(v[0])+" "+str(v[1])+" "+str(v[2])+"\n")
k -= 1
else:
# write the three vertices to the file in forward order
for k in range(3):
v = positions[indices[start+j+k]]
v = (v + positionMin) * scale
f.write(" vertex "+str(v[0])+" "+str(v[1])+" "+str(v[2])+"\n")
# Write the loop and normal end to file
f.write(" endloop\n")
f.write("endfacet\n")
j += increment
else:
# Skip this if it ever happens
continue
f.close()
return
|
bepo13/destinydb-stl-generator-v0
|
src/DestinyModelGenStl.py
|
Python
|
mit
| 6,240 | 0.005769 |
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
from ryu.ofproto import ether
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
from ryu.lib.packet import ether_types
from ryu.lib.packet import arp, ipv4
from ryu.topology.api import get_switch, get_link, get_host
from ryu.topology import event, switches
import networkx as nx
from ryu.lib import hub
class actualSDN_switch(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(actualSDN_switch, self).__init__(*args, **kwargs)
self.vtable = {}
# default vlan table
self.vtable = {'00:00:00:00:00:01':'1',
'00:00:00:00:00:02':'1',
'00:00:00:00:00:03':'1'}
self.mac_to_ip = {} # mac <-> ip
self.ip_to_mac = {} # ip <-> mac
self.mac_to_port = {} # host in which port
self.stable = {} #dpid<->datapath
self.default_datapath = None
self.default_ev = None
self.host_enter = 0 # host enter number
self.switch_enter = 0 # switch enter number
self.mac_to_dp = {} # mac <-> datapath
self.switches = [] #all switches' dpid
self.switches_dp = [] #all switches' datapath
# self.path_db = [] # store shortest path
# monitor init
self.datapaths={} # all datapaths
self.monitor_thread = hub.spawn(self._monitor)
self.bandwidth = {}
#networkx init
self.topology_api_app = self
self.directed_Topo = nx.DiGraph()
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
datapath = ev.msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
match = parser.OFPMatch()
self.datapaths[datapath.id] = datapath
self.default_datapath = datapath
actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
ofproto.OFPCML_NO_BUFFER)]
self.add_flow(datapath, 0, match, actions)
# read the mac_table(valid user) and put the information into the mac_to_ip and ip_to_mac
with open('./mac_table.txt') as f:
line = f.readlines()
line = [x.strip('\n') for x in line]
for content in line:
tmp = content.split(',')
mac = tmp[0]
ip = tmp[1]
self.mac_to_ip[mac] = ip
self.ip_to_mac[ip] = mac
#self.host_num = len(self.ip_to_mac)
self.host_num = 3
# _monitor, _request_stats adn _port_stats_reply_handler, the three functions are used when monitor the traffic
def _monitor(self):
while True:
for dp in self.datapaths.values():
self._request_stats(dp)
hub.sleep(3)
def _request_stats(self, datapath):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
req = parser.OFPPortStatsRequest(datapath, 0 , ofproto.OFPP_ANY)
datapath.send_msg(req)
@set_ev_cls(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER)
def _port_stats_reply_handler(self, ev):
body = ev.msg.body
parser = ev.msg.datapath.ofproto_parser
self.logger.info('datapath port '
'rx-pkts rx-bytes '
'tx-pkts tx-bytes bandwidth')
self.logger.info('---------------- -------- '
'-------- -------- '
'-------- -------- --------')
for stat in sorted(body):
if stat.port_no < 7:
index = str(ev.msg.datapath.id) + '-' + str(stat.port_no)
if index not in self.bandwidth:
self.bandwidth[index] = 0
transfer_bytes = stat.rx_bytes + stat.tx_bytes
speed = (transfer_bytes - self.bandwidth[index]) / 3
self.logger.info('%016x %8x %8d %8d %8d %8d %8d\n',
ev.msg.datapath.id, stat.port_no,
stat.rx_packets, stat.rx_bytes,
stat.tx_packets, stat.tx_bytes, speed)
self.bandwidth[index] = transfer_bytes
def add_flow(self, datapath, priority, match, actions, buffer_id=None):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
buffer_id = ofproto.OFP_NO_BUFFER
inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS,actions)]
if buffer_id:
mod = parser.OFPFlowMod(datapath=datapath, buffer_id=buffer_id,
priority=priority, match=match,
instructions=inst)
else:
mod = parser.OFPFlowMod(datapath=datapath, priority=priority,
match=match, instructions=inst)
datapath.send_msg(mod)
print('add flow!!')
# delete flow
def del_flow(self, datapath, match):
ofproto = datapath.ofproto
ofproto_parser = datapath.ofproto_parser
mod = ofproto_parser.OFPFlowMod(datapath=datapath,
command= ofproto.OFPFC_DELETE,out_port=ofproto.OFPP_ANY, out_group=ofproto.OFPG_ANY,match=match)
datapath.send_msg(mod)
print('del flow')
# when src in topo and change port, this situation will run this function to delete flows which are relative the src.
def ShortestPathDeleteFlow(self, datapath, *args):
if datapath==None:
return
ofproto = datapath.ofproto
ofproto_parser = datapath.ofproto_parser
#print('stable',self.stable)
for key, value in self.stable.items():
for arg in args:
match = ofproto_parser.OFPMatch(eth_dst=arg)
self.del_flow(value, match)
match = ofproto_parser.OFPMatch(eth_src=arg)
self.del_flow(value, match)
print('SP del flow end')
# handle arp package
def _handle_arp(self, datapath, in_port, pkt_ethernet, arp_pkt):
if arp_pkt.opcode != arp.ARP_REQUEST:
return
if self.ip_to_mac.get(arp_pkt.dst_ip) == None:
return
#Browse Target hardware adress from ip_to_mac table.
get_mac = self.ip_to_mac[arp_pkt.dst_ip]
#target_ip_addr = arp_pkt.dst_ip
pkt = packet.Packet()
#Create ethernet packet
pkt.add_protocol(ethernet.ethernet(ethertype=ether.ETH_TYPE_ARP,dst=pkt_ethernet.src,src=get_mac))
#Create ARP Reply packet
pkt.add_protocol(arp.arp(opcode=arp.ARP_REPLY,
src_mac=get_mac,
src_ip=arp_pkt.dst_ip,
dst_mac=arp_pkt.src_mac,
dst_ip=arp_pkt.src_ip))
self._send_packet(datapath, in_port, pkt)
print('arp', get_mac, pkt_ethernet.src,)
# add host in the direct topo
def AddHost(self, dpid, host, in_port):
#Add host into directed_topo
self.directed_Topo.add_node(host)
#Add edge switch's port to src host
self.directed_Topo.add_edge(dpid, host, {'port':in_port})
#Add edge host to switch
self.directed_Topo.add_edge(host, dpid)
return
@set_ev_cls(event.EventSwitchEnter)
def get_topology_data(self, ev):
#Topo information of switch
self.switch_enter += 1
#Get Switch List
switch_list = get_switch(self.topology_api_app, None)
self.switches = [switch.dp.id for switch in switch_list]
self.switches_dp = [switch.dp for switch in switch_list]
#Add switch dpid into Directed Topology
self.directed_Topo.add_nodes_from(self.switches)
#Get Link List
links_list = get_link(self.topology_api_app, None)
#When all Link enter
if self.switch_enter == len(self.switches):
links = [(link.src.dpid, link.dst.dpid, {'port':link.src.port_no}) for link in links_list ]
links.sort()
self.directed_Topo.add_edges_from(links)
print('****List Of Links****')
print(self.directed_Topo.edges(data = True))
# install direct topo.
# if the hosts in the same vlan, the function will install paths between them.
def default_path_install(self, ev):
for src in self.vtable:
for dst in self.vtable:
if src != dst:
if self.vtable[src] == self.vtable[dst]:
print('****Shortest path****')
print('vtable', self.vtable)
print(self.directed_Topo.edges(data = True))
self.ShortestPathInstall(ev, src, dst)
# Using networkx, the paths between the hosts in the same vlan are the shortest.
def ShortestPathInstall(self, ev, src, dst):
#Compute shortest path
path = nx.shortest_path(self.directed_Topo, src, dst)
#Add Flow along with the path
for k, sw in enumerate(self.switches):
if sw in path:
next = path[path.index(sw)+1]
out_port = self.directed_Topo[sw][next]['port']
actions = [self.switches_dp[k].ofproto_parser.OFPActionOutput(out_port)]
match = self.switches_dp[k].ofproto_parser.OFPMatch(eth_src=src, eth_dst=dst)
inst = [actions]
self.add_flow(self.switches_dp[k], 1, match, actions, inst)
return
def _send_packet(self, datapath, in_port, pkt):
ofproto =datapath.ofproto
parser = datapath.ofproto_parser
pkt.serialize()
data = pkt.data
actions = [parser.OFPActionOutput(port=in_port)]
out = parser.OFPPacketOut(datapath=datapath,
buffer_id=ofproto.OFP_NO_BUFFER,
in_port=ofproto.OFPP_CONTROLLER,
actions=actions,
data=data)
datapath.send_msg(out)
# the main function
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
# If you hit this you might want to increase
# the "miss_send_length" of your switch
if ev.msg.msg_len < ev.msg.total_len:
self.logger.debug("packet truncated: only %s of %s bytes",
ev.msg.msg_len, ev.msg.total_len)
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
in_port = msg.match['in_port']
pkt = packet.Packet(msg.data)
pkt_ethernet = pkt.get_protocols(ethernet.ethernet)[0]
if not pkt_ethernet:
return
if pkt_ethernet.ethertype == 35020:
# ignore lldp packet
return
arp_pkt = pkt.get_protocol(arp.arp)
if pkt_ethernet.ethertype== 2054:
self._handle_arp(datapath, in_port, pkt_ethernet, arp_pkt)
return
dst = pkt_ethernet.dst
src = pkt_ethernet.src
out_port = None
dpid = datapath.id
self.mac_to_port.setdefault(dpid, {})
self.mac_to_dp.setdefault(src, datapath)
self.stable.setdefault(dpid, datapath)
self.logger.info("packet in %s %s %s %s", dpid, src, dst, in_port)
# when the src is valid
if src in self.vtable:
# if the valid src not in the direct topo
if not self.directed_Topo.has_node(src):
print('add', src)
self.AddHost(dpid,src,in_port)
#Add information to mac_to_port
self.mac_to_port[dpid][src] = in_port
self.host_enter += 1
# if entered host > 3, it will install shortest path
if self.host_enter == self.host_num:
self.default_path_install(ev)
#change port function
else:
#change port: del relative flow and reinstall
if in_port != self.mac_to_port.get(dpid).get(src):
#Delete the wrong flow
self.ShortestPathDeleteFlow(datapath, src)
#Update mac_to_port table
for key, value in self.mac_to_port.items():
if value.has_key(src):
for mac, port in value.items():
if mac == src:
del self.mac_to_port[key][mac]
break
self.mac_to_port[dpid][src] = in_port
#Change Graph
#Remove wrong
self.directed_Topo.remove_node(src)
#Add Correct host
self.AddHost(dpid, src, in_port)
#Add new flows and path
self.default_path_install(ev)
# when the dst is in the direct topo
if dst in self.mac_to_port[dpid]:
if self.vtable[src] != None and self.vtable[src] == self.vtable[dst]:
out_port = self.mac_to_port[dpid][dst]
actions = [parser.OFPActionOutput(out_port)]
print('out_port',out_port)
else:
out_port = ofproto.OFPP_FLOOD
actions=[parser.OFPActionOutput(out_port)]
else:
out_port = ofproto.OFPP_FLOOD
actions = [parser.OFPActionOutput(out_port)]
# install a flow to avoid packet_in next time
if out_port != ofproto.OFPP_FLOOD:
match = parser.OFPMatch(in_port=in_port, eth_dst=dst, eth_src=src)
# verify if we have a valid buffer_id, if yes avoid to send both
# flow_mod & packet_out
if msg.buffer_id != ofproto.OFP_NO_BUFFER:
self.add_flow(datapath, 1, match, actions, msg.buffer_id)
return
else:
self.add_flow(datapath, 1, match, actions)
data = None
if msg.buffer_id == ofproto.OFP_NO_BUFFER:
data = msg.data
out = parser.OFPPacketOut(datapath=datapath, buffer_id=msg.buffer_id,
in_port=in_port, actions=actions, data=data)
datapath.send_msg(out)
|
ray6/sdn
|
actualSDN.py
|
Python
|
mit
| 14,660 | 0.007572 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import nerve
import os
import cgi
import traceback
import urllib.parse
class WSGIHandler (nerve.Server):
def __init__(self, **config):
super().__init__(**config)
def __call__(self, environ, start_response):
#nerve.logs.redirect(environ['wsgi.errors'])
#for (key, value) in sorted(environ.items()):
# print(key, value, file=environ['wsgi.errors'])
reqtype = environ['REQUEST_METHOD']
scheme = environ['REQUEST_SCHEME'] if 'REQUEST_SCHEME' in environ else ''
servername = environ['SERVER_NAME']
path = environ['PATH_INFO']
querystring = environ['QUERY_STRING']
uri = urllib.parse.urlunsplit( (scheme, servername, path, querystring, '') )
getvars = nerve.core.delistify(cgi.parse_qs(querystring))
try:
contentlen = int(environ.get('CONTENT_LENGTH', 0))
except (ValueError):
contentlen = 0
contents = environ['wsgi.input'].read(contentlen).decode('utf-8')
if 'CONTENT_TYPE' in environ:
(mimetype, pdict) = cgi.parse_header(environ['CONTENT_TYPE'])
else:
mimetype = None # empty post doesn't provide a content-type.
if mimetype == None:
postvars = { }
elif mimetype == 'multipart/form-data':
postvars = nerve.core.delistify(cgi.parse_multipart(self.rfile, pdict))
elif mimetype == 'application/x-www-form-urlencoded':
postvars = nerve.core.delistify(urllib.parse.parse_qs(contents, keep_blank_values=True))
elif mimetype == 'application/json':
postvars = json.loads(contents)
else:
raise Exception("unrecognized content-type in POST " + self.path + " (" + mimetype + ")")
postvars.update(getvars)
headers = { }
for (key, value) in environ.items():
if key.startswith('HTTP_'):
name = key[5:].lower().replace('_', '-')
headers[name] = value
request = nerve.Request(self, None, reqtype, uri, postvars, headers=headers)
controller = self.make_controller(request)
controller.handle_request(request)
redirect = controller.get_redirect()
error = controller.get_error()
headers = controller.get_headers()
mimetype = controller.get_mimetype()
output = controller.get_output()
if redirect:
status = '302 Found'
headers += [ ('Location', redirect) ]
elif error:
if type(error) is nerve.NotFoundError:
status = '404 Not Found'
else:
status = '500 Internal Server Error'
else:
status = '200 OK'
if isinstance(output, str):
output = bytes(output, 'utf-8')
if mimetype:
headers += [ ('Content-Type', mimetype) ]
if output:
headers += [ ('Content-Length', str(len(output))) ]
else:
headers += [ ('Content-Length', '0') ]
#print(path, file=environ['wsgi.errors'])
#for (key, value) in sorted(headers):
# print(key, value, file=environ['wsgi.errors'])
start_response(status, headers)
#nerve.logs.redirect(None)
yield output if output is not None else b''
|
transistorfet/nerve
|
nerve/http/servers/wsgi.py
|
Python
|
gpl-3.0
| 3,372 | 0.007711 |
import logging
from ._base import Service
from ..domain import Template
log = logging.getLogger(__name__)
class TemplateService(Service):
def __init__(self, template_store, **kwargs):
super().__init__(**kwargs)
self.template_store = template_store
def all(self):
"""Get all templates."""
templates = self.template_store.filter()
return templates
def find(self, key):
"""Find a template with a matching key."""
key = Template.strip(key)
# Find an exact match
template = self.template_store.read(key)
if template:
return template
# Else, find an alias match
for template in self.all():
if key in template.aliases_stripped:
return template
# Else, no match
raise self.exceptions.not_found
def validate(self):
"""Ensure all template are valid and conflict-free."""
templates = self.all()
keys = {template.key: template for template in templates}
for template in templates:
log.info("checking template '%s' ...", template)
if not template.validate():
return False
for alias in template.aliases:
log.info("checking alias '%s' -> '%s' ...", alias, template.key)
if alias not in template.aliases_lowercase:
msg = "alias '%s' should be lowercase characters or dashes"
log.error(msg, alias)
return False
try:
existing = keys[alias]
except KeyError:
keys[alias] = template
else:
msg = "alias '%s' already used in template: %s"
log.error(msg, alias, existing)
return False
return True
|
CptSpaceToaster/memegen
|
memegen/services/template.py
|
Python
|
mit
| 1,885 | 0.000531 |
"""
GUI progressbar decorator for iterators.
Includes a default (x)range iterator printing to stderr.
Usage:
>>> from tqdm_gui import tgrange[, tqdm_gui]
>>> for i in tgrange(10): #same as: for i in tqdm_gui(xrange(10))
... ...
"""
# future division is important to divide integers and get as
# a result precise floating numbers (instead of truncated int)
from __future__ import division, absolute_import
# import compatibility functions and utilities
import sys
from time import time
from ._utils import _range
# to inherit from the tqdm class
from ._tqdm import tqdm
__author__ = {"github.com/": ["casperdcl", "lrq3000"]}
__all__ = ['tqdm_gui', 'tgrange']
class tqdm_gui(tqdm): # pragma: no cover
"""
Experimental GUI version of tqdm!
"""
@classmethod
def write(cls, s, file=None, end="\n"):
"""
Print a message via tqdm_gui (just an alias for print)
"""
if file is None:
file = sys.stdout
# TODO: print text on GUI?
file.write(s)
file.write(end)
def __init__(self, *args, **kwargs):
import matplotlib as mpl
import matplotlib.pyplot as plt
from collections import deque
kwargs['gui'] = True
super(tqdm_gui, self).__init__(*args, **kwargs)
# Initialize the GUI display
if self.disable or not kwargs['gui']:
return
self.fp.write('Warning: GUI is experimental/alpha\n')
self.mpl = mpl
self.plt = plt
self.sp = None
# Remember if external environment uses toolbars
self.toolbar = self.mpl.rcParams['toolbar']
self.mpl.rcParams['toolbar'] = 'None'
self.mininterval = max(self.mininterval, 0.5)
self.fig, ax = plt.subplots(figsize=(9, 2.2))
# self.fig.subplots_adjust(bottom=0.2)
if self.total:
self.xdata = []
self.ydata = []
self.zdata = []
else:
self.xdata = deque([])
self.ydata = deque([])
self.zdata = deque([])
self.line1, = ax.plot(self.xdata, self.ydata, color='b')
self.line2, = ax.plot(self.xdata, self.zdata, color='k')
ax.set_ylim(0, 0.001)
if self.total:
ax.set_xlim(0, 100)
ax.set_xlabel('percent')
self.fig.legend((self.line1, self.line2), ('cur', 'est'),
loc='center right')
# progressbar
self.hspan = plt.axhspan(0, 0.001,
xmin=0, xmax=0, color='g')
else:
# ax.set_xlim(-60, 0)
ax.set_xlim(0, 60)
ax.invert_xaxis()
ax.set_xlabel('seconds')
ax.legend(('cur', 'est'), loc='lower left')
ax.grid()
# ax.set_xlabel('seconds')
ax.set_ylabel((self.unit if self.unit else 'it') + '/s')
if self.unit_scale:
plt.ticklabel_format(style='sci', axis='y',
scilimits=(0, 0))
ax.yaxis.get_offset_text().set_x(-0.15)
# Remember if external environment is interactive
self.wasion = plt.isinteractive()
plt.ion()
self.ax = ax
def __iter__(self):
# TODO: somehow allow the following:
# if not self.gui:
# return super(tqdm_gui, self).__iter__()
iterable = self.iterable
if self.disable:
for obj in iterable:
yield obj
return
# ncols = self.ncols
mininterval = self.mininterval
maxinterval = self.maxinterval
miniters = self.miniters
dynamic_miniters = self.dynamic_miniters
unit = self.unit
unit_scale = self.unit_scale
ascii = self.ascii
start_t = self.start_t
last_print_t = self.last_print_t
last_print_n = self.last_print_n
n = self.n
# dynamic_ncols = self.dynamic_ncols
smoothing = self.smoothing
avg_time = self.avg_time
bar_format = self.bar_format
plt = self.plt
ax = self.ax
xdata = self.xdata
ydata = self.ydata
zdata = self.zdata
line1 = self.line1
line2 = self.line2
for obj in iterable:
yield obj
# Update and print the progressbar.
# Note: does not call self.update(1) for speed optimisation.
n += 1
delta_it = n - last_print_n
# check the counter first (avoid calls to time())
if delta_it >= miniters:
cur_t = time()
delta_t = cur_t - last_print_t
if delta_t >= mininterval:
elapsed = cur_t - start_t
# EMA (not just overall average)
if smoothing and delta_t:
avg_time = delta_t / delta_it \
if avg_time is None \
else smoothing * delta_t / delta_it + \
(1 - smoothing) * avg_time
# Inline due to multiple calls
total = self.total
# instantaneous rate
y = delta_it / delta_t
# overall rate
z = n / elapsed
# update line data
xdata.append(n * 100.0 / total if total else cur_t)
ydata.append(y)
zdata.append(z)
# Discard old values
# xmin, xmax = ax.get_xlim()
# if (not total) and elapsed > xmin * 1.1:
if (not total) and elapsed > 66:
xdata.popleft()
ydata.popleft()
zdata.popleft()
ymin, ymax = ax.get_ylim()
if y > ymax or z > ymax:
ymax = 1.1 * y
ax.set_ylim(ymin, ymax)
ax.figure.canvas.draw()
if total:
line1.set_data(xdata, ydata)
line2.set_data(xdata, zdata)
try:
poly_lims = self.hspan.get_xy()
except AttributeError:
self.hspan = plt.axhspan(0, 0.001, xmin=0,
xmax=0, color='g')
poly_lims = self.hspan.get_xy()
poly_lims[0, 1] = ymin
poly_lims[1, 1] = ymax
poly_lims[2] = [n / total, ymax]
poly_lims[3] = [poly_lims[2, 0], ymin]
if len(poly_lims) > 4:
poly_lims[4, 1] = ymin
self.hspan.set_xy(poly_lims)
else:
t_ago = [cur_t - i for i in xdata]
line1.set_data(t_ago, ydata)
line2.set_data(t_ago, zdata)
ax.set_title(self.format_meter(
n, total, elapsed, 0,
self.desc, ascii, unit, unit_scale,
1 / avg_time if avg_time else None, bar_format),
fontname="DejaVu Sans Mono", fontsize=11)
plt.pause(1e-9)
# If no `miniters` was specified, adjust automatically
# to the maximum iteration rate seen so far.
if dynamic_miniters:
if maxinterval and delta_t > maxinterval:
# Set miniters to correspond to maxinterval
miniters = delta_it * maxinterval / delta_t
elif mininterval and delta_t:
# EMA-weight miniters to converge
# towards the timeframe of mininterval
miniters = smoothing * delta_it * mininterval \
/ delta_t + (1 - smoothing) * miniters
else:
miniters = smoothing * delta_it + \
(1 - smoothing) * miniters
# Store old values for next call
last_print_n = n
last_print_t = cur_t
# Closing the progress bar.
# Update some internal variables for close().
self.last_print_n = last_print_n
self.n = n
self.close()
def update(self, n=1):
# if not self.gui:
# return super(tqdm_gui, self).close()
if self.disable:
return
if n < 0:
n = 1
self.n += n
delta_it = self.n - self.last_print_n # should be n?
if delta_it >= self.miniters:
# We check the counter first, to reduce the overhead of time()
cur_t = time()
delta_t = cur_t - self.last_print_t
if delta_t >= self.mininterval:
elapsed = cur_t - self.start_t
# EMA (not just overall average)
if self.smoothing and delta_t:
self.avg_time = delta_t / delta_it \
if self.avg_time is None \
else self.smoothing * delta_t / delta_it + \
(1 - self.smoothing) * self.avg_time
# Inline due to multiple calls
total = self.total
ax = self.ax
# instantaneous rate
y = delta_it / delta_t
# smoothed rate
z = self.n / elapsed
# update line data
self.xdata.append(self.n * 100.0 / total
if total else cur_t)
self.ydata.append(y)
self.zdata.append(z)
# Discard old values
if (not total) and elapsed > 66:
self.xdata.popleft()
self.ydata.popleft()
self.zdata.popleft()
ymin, ymax = ax.get_ylim()
if y > ymax or z > ymax:
ymax = 1.1 * y
ax.set_ylim(ymin, ymax)
ax.figure.canvas.draw()
if total:
self.line1.set_data(self.xdata, self.ydata)
self.line2.set_data(self.xdata, self.zdata)
try:
poly_lims = self.hspan.get_xy()
except AttributeError:
self.hspan = self.plt.axhspan(0, 0.001, xmin=0,
xmax=0, color='g')
poly_lims = self.hspan.get_xy()
poly_lims[0, 1] = ymin
poly_lims[1, 1] = ymax
poly_lims[2] = [self.n / total, ymax]
poly_lims[3] = [poly_lims[2, 0], ymin]
if len(poly_lims) > 4:
poly_lims[4, 1] = ymin
self.hspan.set_xy(poly_lims)
else:
t_ago = [cur_t - i for i in self.xdata]
self.line1.set_data(t_ago, self.ydata)
self.line2.set_data(t_ago, self.zdata)
ax.set_title(self.format_meter(
self.n, total, elapsed, 0,
self.desc, self.ascii, self.unit, self.unit_scale,
1 / self.avg_time if self.avg_time else None,
self.bar_format),
fontname="DejaVu Sans Mono", fontsize=11)
self.plt.pause(1e-9)
# If no `miniters` was specified, adjust automatically to the
# maximum iteration rate seen so far.
# e.g.: After running `tqdm.update(5)`, subsequent
# calls to `tqdm.update()` will only cause an update after
# at least 5 more iterations.
if self.dynamic_miniters:
if self.maxinterval and delta_t > self.maxinterval:
self.miniters = self.miniters * self.maxinterval \
/ delta_t
elif self.mininterval and delta_t:
self.miniters = self.smoothing * delta_it \
* self.mininterval / delta_t + \
(1 - self.smoothing) * self.miniters
else:
self.miniters = self.smoothing * delta_it + \
(1 - self.smoothing) * self.miniters
# Store old values for next call
self.last_print_n = self.n
self.last_print_t = cur_t
def close(self):
# if not self.gui:
# return super(tqdm_gui, self).close()
if self.disable:
return
self.disable = True
self._instances.remove(self)
# Restore toolbars
self.mpl.rcParams['toolbar'] = self.toolbar
# Return to non-interactive mode
if not self.wasion:
self.plt.ioff()
if not self.leave:
self.plt.close(self.fig)
def tgrange(*args, **kwargs):
"""
A shortcut for tqdm_gui(xrange(*args), **kwargs).
On Python3+ range is used instead of xrange.
"""
return tqdm_gui(_range(*args), **kwargs)
|
dhaase-de/dh-python-dh
|
dh/thirdparty/tqdm/_tqdm_gui.py
|
Python
|
mit
| 13,510 | 0 |
"""Test code for pooling"""
import numpy as np
import tvm
import topi
import math
from topi.util import get_const_tuple
pool_code = {
"avg": 0,
"max": 1
}
def verify_pool(n, ic, ih, kh, sh, padding, pool_type, ceil_mode, count_include_pad=True):
iw = ih
kw = kh
sw = sh
pt, pl, pb, pr = padding
A = tvm.placeholder((n, ic, ih, iw), name='A')
B = topi.cpp.nn.pool(A, [kh, kw], [sh, sw], padding,
pool_code[pool_type], ceil_mode, "NCHW", count_include_pad)
B = topi.cpp.nn.relu(B)
dtype = A.dtype
bshape = get_const_tuple(B.shape)
ashape = get_const_tuple(A.shape)
if ceil_mode:
assert bshape[2] == int(math.ceil(float(ashape[2] - kh + pt + pb) / sh) + 1)
assert bshape[3] == int(math.ceil(float(ashape[3] - kw + pl + pr) / sw) + 1)
else:
assert bshape[2] == int(math.floor(float(ashape[2] - kh + pt + pb) / sh) + 1)
assert bshape[3] == int(math.floor(float(ashape[3] - kw + pl + pr) / sw) + 1)
a_np = np.random.uniform(size=(n, ic, ih, iw)).astype(dtype)
pad_np = np.zeros(shape=(n, ic, ih+pt+pb, iw+pl+pr)).astype(dtype)
no_zero = (range(n), range(ic), (range(pt, ih+pt)), (range(pl, iw+pl)))
pad_np[np.ix_(*no_zero)] = a_np
_, oc, oh, ow = get_const_tuple(B.shape)
b_np = np.zeros(shape=(n, oc, oh, ow)).astype(dtype)
if pool_type == 'avg':
for i in range(oh):
for j in range(ow):
if count_include_pad:
b_np[:,:,i,j] = np.mean(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3))
else:
pad_count = np.sum(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw] > 0, axis=(2,3))
b_np[:,:,i,j] = np.sum(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3)) / np.maximum(pad_count, 1)
elif pool_type =='max':
for i in range(oh):
for j in range(ow):
b_np[:,:,i,j] = np.max(pad_np[:, :, i*sh:i*sh+kh, j*sw:j*sw+kw], axis=(2,3))
b_np = np.maximum(b_np, 0.0)
def check_device(device):
ctx = tvm.context(device, 0)
if not ctx.exist:
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
target = topi.cpp.TEST_create_target(device)
if device == "llvm":
s = topi.cpp.generic.default_schedule(target, [B], False)
else:
s = topi.cpp.cuda.schedule_pool(target, [B])
a = tvm.nd.array(a_np, ctx)
b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=dtype), ctx)
f = tvm.build(s, [A, B], device)
f(a, b)
np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5)
for device in ['cuda', 'opencl', 'metal', 'rocm']:
check_device(device)
def test_pool():
verify_pool(1, 256, 32, 2, 2, [0, 0, 0, 0], 'avg', False, True)
verify_pool(1, 256, 31, 3, 3, [1, 2, 1, 2], 'avg', False, True)
verify_pool(1, 256, 32, 2, 2, [1, 2, 1, 2], 'avg', False, False)
verify_pool(1, 256, 31, 4, 4, [3, 3, 3, 3], 'avg', False, False)
verify_pool(1, 256, 31, 4, 4, [0, 0, 0, 0], 'avg', False, False)
verify_pool(1, 256, 32, 2, 2, [0, 0, 0, 0], 'max', False)
verify_pool(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', False)
verify_pool(1, 256, 31, 3, 3, [2, 1, 2, 1], 'max', True)
verify_pool(1, 256, 31, 3, 3, [2, 1, 0, 3], 'avg', False, True)
verify_pool(1, 256, 32, 2, 2, [0, 3, 2, 1], 'avg', False, False)
verify_pool(1, 256, 31, 3, 3, [1, 0, 3, 2], 'max', False)
verify_pool(1, 256, 31, 3, 3, [3, 2, 1, 0], 'max', True)
def verify_global_pool(n, c, h, w, pool_type):
A = tvm.placeholder((n, c, h, w), name='A')
B = topi.cpp.nn.global_pool(A, pool_code[pool_type])
B = topi.cpp.nn.relu(B)
a_np = np.random.uniform(size=get_const_tuple(A.shape)).astype(A.dtype)
if pool_type == 'avg':
b_np = np.mean(a_np, axis=(2,3), keepdims=True)
elif pool_type =='max':
b_np = np.max(a_np, axis=(2,3), keepdims=True)
b_np = np.maximum(b_np, 0.0)
def check_device(device):
if not tvm.module.enabled(device):
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
target = topi.cpp.TEST_create_target(device)
if device == "llvm":
s = topi.cpp.generic.default_schedule(target, [B], False)
else:
s = topi.cpp.cuda.schedule_global_pool(target, [B])
ctx = tvm.context(device, 0)
a = tvm.nd.array(a_np, ctx)
b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx)
f = tvm.build(s, [A, B], device)
f(a, b)
np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5)
for device in ['cuda', 'opencl', 'metal', 'rocm']:
check_device(device)
def test_global_pool():
verify_global_pool(1, 1024, 7, 7, 'avg')
verify_global_pool(4, 1024, 7, 7, 'avg')
verify_global_pool(1, 1024, 7, 7, 'max')
verify_global_pool(4, 1024, 7, 7, 'max')
if __name__ == "__main__":
test_pool()
test_global_pool()
|
mlperf/training_results_v0.6
|
Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/topi/tests/python_cpp/test_topi_pooling.py
|
Python
|
apache-2.0
| 5,140 | 0.006226 |
""" Cisco_IOS_XR_infra_objmgr_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR infra\-objmgr package configuration.
This module contains definitions
for the following management objects\:
object\-group\: Object\-group configuration
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class EndPortEnum(Enum):
"""
EndPortEnum
End port
.. data:: echo = 7
Echo (7)
.. data:: discard = 9
Discard (9)
.. data:: daytime = 13
Daytime (13)
.. data:: chargen = 19
Character generator (19)
.. data:: ftp_data = 20
FTP data connections (used infrequently, 20)
.. data:: ftp = 21
File Transfer Protocol (21)
.. data:: ssh = 22
Secure Shell (22)
.. data:: telnet = 23
Telnet (23)
.. data:: smtp = 25
Simple Mail Transport Protocol (25)
.. data:: time = 37
Time (37)
.. data:: nicname = 43
Nicname (43)
.. data:: tacacs = 49
TAC Access Control System (49)
.. data:: domain = 53
Domain Name Service (53)
.. data:: gopher = 70
Gopher (70)
.. data:: finger = 79
Finger (79)
.. data:: www = 80
World Wide Web (HTTP, 80)
.. data:: host_name = 101
NIC hostname server (101)
.. data:: pop2 = 109
Post Office Protocol v2 (109)
.. data:: pop3 = 110
Post Office Protocol v3 (110)
.. data:: sun_rpc = 111
Sun Remote Procedure Call (111)
.. data:: ident = 113
Ident Protocol (113)
.. data:: nntp = 119
Network News Transport Protocol (119)
.. data:: bgp = 179
Border Gateway Protocol (179)
.. data:: irc = 194
Internet Relay Chat (194)
.. data:: pim_auto_rp = 496
PIM Auto-RP (496)
.. data:: exec_ = 512
Exec (rsh, 512)
.. data:: login = 513
Login (rlogin, 513)
.. data:: cmd = 514
Remote commands (rcmd, 514)
.. data:: lpd = 515
Printer service (515)
.. data:: uucp = 540
Unix-to-Unix Copy Program (540)
.. data:: klogin = 543
Kerberos login (543)
.. data:: kshell = 544
Kerberos shell (544)
.. data:: talk = 517
Talk (517)
.. data:: ldp = 646
LDP session connection attempts (MPLS, 646)
"""
echo = 7
discard = 9
daytime = 13
chargen = 19
ftp_data = 20
ftp = 21
ssh = 22
telnet = 23
smtp = 25
time = 37
nicname = 43
tacacs = 49
domain = 53
gopher = 70
finger = 79
www = 80
host_name = 101
pop2 = 109
pop3 = 110
sun_rpc = 111
ident = 113
nntp = 119
bgp = 179
irc = 194
pim_auto_rp = 496
exec_ = 512
login = 513
cmd = 514
lpd = 515
uucp = 540
klogin = 543
kshell = 544
talk = 517
ldp = 646
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['EndPortEnum']
class PortEnum(Enum):
"""
PortEnum
Port
.. data:: echo = 7
Echo (7)
.. data:: discard = 9
Discard (9)
.. data:: daytime = 13
Daytime (13)
.. data:: chargen = 19
Character generator (19)
.. data:: ftp_data = 20
FTP data connections (used infrequently, 20)
.. data:: ftp = 21
File Transfer Protocol (21)
.. data:: ssh = 22
Secure Shell (22)
.. data:: telnet = 23
Telnet (23)
.. data:: smtp = 25
Simple Mail Transport Protocol (25)
.. data:: time = 37
Time (37)
.. data:: nicname = 43
Nicname (43)
.. data:: tacacs = 49
TAC Access Control System (49)
.. data:: domain = 53
Domain Name Service (53)
.. data:: gopher = 70
Gopher (70)
.. data:: finger = 79
Finger (79)
.. data:: www = 80
World Wide Web (HTTP, 80)
.. data:: host_name = 101
NIC hostname server (101)
.. data:: pop2 = 109
Post Office Protocol v2 (109)
.. data:: pop3 = 110
Post Office Protocol v3 (110)
.. data:: sun_rpc = 111
Sun Remote Procedure Call (111)
.. data:: ident = 113
Ident Protocol (113)
.. data:: nntp = 119
Network News Transport Protocol (119)
.. data:: bgp = 179
Border Gateway Protocol (179)
.. data:: irc = 194
Internet Relay Chat (194)
.. data:: pim_auto_rp = 496
PIM Auto-RP (496)
.. data:: exec_ = 512
Exec (rsh, 512)
.. data:: login = 513
Login (rlogin, 513)
.. data:: cmd = 514
Remote commands (rcmd, 514)
.. data:: lpd = 515
Printer service (515)
.. data:: uucp = 540
Unix-to-Unix Copy Program (540)
.. data:: klogin = 543
Kerberos login (543)
.. data:: kshell = 544
Kerberos shell (544)
.. data:: talk = 517
Talk (517)
.. data:: ldp = 646
LDP session connection attempts (MPLS, 646)
"""
echo = 7
discard = 9
daytime = 13
chargen = 19
ftp_data = 20
ftp = 21
ssh = 22
telnet = 23
smtp = 25
time = 37
nicname = 43
tacacs = 49
domain = 53
gopher = 70
finger = 79
www = 80
host_name = 101
pop2 = 109
pop3 = 110
sun_rpc = 111
ident = 113
nntp = 119
bgp = 179
irc = 194
pim_auto_rp = 496
exec_ = 512
login = 513
cmd = 514
lpd = 515
uucp = 540
klogin = 543
kshell = 544
talk = 517
ldp = 646
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['PortEnum']
class PortOperatorEnum(Enum):
"""
PortOperatorEnum
Port operator
.. data:: equal = 0
Match packets on ports equal to entered port
number
.. data:: not_equal = 1
Match packets on ports not equal to entered
port number
.. data:: greater_than = 2
Match packets on ports greater than entered
port number
.. data:: less_than = 3
Match packets on ports less than entered port
number
"""
equal = 0
not_equal = 1
greater_than = 2
less_than = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['PortOperatorEnum']
class StartPortEnum(Enum):
"""
StartPortEnum
Start port
.. data:: echo = 7
Echo (7)
.. data:: discard = 9
Discard (9)
.. data:: daytime = 13
Daytime (13)
.. data:: chargen = 19
Character generator (19)
.. data:: ftp_data = 20
FTP data connections (used infrequently, 20)
.. data:: ftp = 21
File Transfer Protocol (21)
.. data:: ssh = 22
Secure Shell (22)
.. data:: telnet = 23
Telnet (23)
.. data:: smtp = 25
Simple Mail Transport Protocol (25)
.. data:: time = 37
Time (37)
.. data:: nicname = 43
Nicname (43)
.. data:: tacacs = 49
TAC Access Control System (49)
.. data:: domain = 53
Domain Name Service (53)
.. data:: gopher = 70
Gopher (70)
.. data:: finger = 79
Finger (79)
.. data:: www = 80
World Wide Web (HTTP, 80)
.. data:: host_name = 101
NIC hostname server (101)
.. data:: pop2 = 109
Post Office Protocol v2 (109)
.. data:: pop3 = 110
Post Office Protocol v3 (110)
.. data:: sun_rpc = 111
Sun Remote Procedure Call (111)
.. data:: ident = 113
Ident Protocol (113)
.. data:: nntp = 119
Network News Transport Protocol (119)
.. data:: bgp = 179
Border Gateway Protocol (179)
.. data:: irc = 194
Internet Relay Chat (194)
.. data:: pim_auto_rp = 496
PIM Auto-RP (496)
.. data:: exec_ = 512
Exec (rsh, 512)
.. data:: login = 513
Login (rlogin, 513)
.. data:: cmd = 514
Remote commands (rcmd, 514)
.. data:: lpd = 515
Printer service (515)
.. data:: uucp = 540
Unix-to-Unix Copy Program (540)
.. data:: klogin = 543
Kerberos login (543)
.. data:: kshell = 544
Kerberos shell (544)
.. data:: talk = 517
Talk (517)
.. data:: ldp = 646
LDP session connection attempts (MPLS, 646)
"""
echo = 7
discard = 9
daytime = 13
chargen = 19
ftp_data = 20
ftp = 21
ssh = 22
telnet = 23
smtp = 25
time = 37
nicname = 43
tacacs = 49
domain = 53
gopher = 70
finger = 79
www = 80
host_name = 101
pop2 = 109
pop3 = 110
sun_rpc = 111
ident = 113
nntp = 119
bgp = 179
irc = 194
pim_auto_rp = 496
exec_ = 512
login = 513
cmd = 514
lpd = 515
uucp = 540
klogin = 543
kshell = 544
talk = 517
ldp = 646
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['StartPortEnum']
class ObjectGroup(object):
"""
Object\-group configuration
.. attribute:: network
Network object group
**type**\: :py:class:`Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network>`
.. attribute:: port
Port object group
**type**\: :py:class:`Port <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.network = ObjectGroup.Network()
self.network.parent = self
self.port = ObjectGroup.Port()
self.port.parent = self
class Port(object):
"""
Port object group
.. attribute:: udf_objects
Table of port objects groups
**type**\: :py:class:`UdfObjects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port.UdfObjects>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.udf_objects = ObjectGroup.Port.UdfObjects()
self.udf_objects.parent = self
class UdfObjects(object):
"""
Table of port objects groups
.. attribute:: udf_object
Port object group
**type**\: list of :py:class:`UdfObject <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port.UdfObjects.UdfObject>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.udf_object = YList()
self.udf_object.parent = self
self.udf_object.name = 'udf_object'
class UdfObject(object):
"""
Port object group
.. attribute:: object_name <key>
Port object group name \- maximum 64 characters
**type**\: str
**length:** 1..64
.. attribute:: description
Up to 100 characters describing this object
**type**\: str
**length:** 1..100
.. attribute:: nested_groups
Table of nested port object groups
**type**\: :py:class:`NestedGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port.UdfObjects.UdfObject.NestedGroups>`
.. attribute:: operators
Table of port operators
**type**\: :py:class:`Operators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port.UdfObjects.UdfObject.Operators>`
.. attribute:: port_ranges
Table of port range addresses
**type**\: :py:class:`PortRanges <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port.UdfObjects.UdfObject.PortRanges>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.object_name = None
self.description = None
self.nested_groups = ObjectGroup.Port.UdfObjects.UdfObject.NestedGroups()
self.nested_groups.parent = self
self.operators = ObjectGroup.Port.UdfObjects.UdfObject.Operators()
self.operators.parent = self
self.port_ranges = ObjectGroup.Port.UdfObjects.UdfObject.PortRanges()
self.port_ranges.parent = self
class Operators(object):
"""
Table of port operators
.. attribute:: operator
op class
**type**\: list of :py:class:`Operator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port.UdfObjects.UdfObject.Operators.Operator>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.operator = YList()
self.operator.parent = self
self.operator.name = 'operator'
class Operator(object):
"""
op class
.. attribute:: operator_type <key>
operation for ports
**type**\: :py:class:`PortOperatorEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.PortOperatorEnum>`
.. attribute:: port <key>
Port number
**type**\: one of the below types:
**type**\: :py:class:`PortEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.PortEnum>`
----
**type**\: int
**range:** 0..65535
----
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.operator_type = None
self.port = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.operator_type is None:
raise YPYModelError('Key property operator_type is None')
if self.port is None:
raise YPYModelError('Key property port is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:operator[Cisco-IOS-XR-infra-objmgr-cfg:operator-type = ' + str(self.operator_type) + '][Cisco-IOS-XR-infra-objmgr-cfg:port = ' + str(self.port) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.operator_type is not None:
return True
if self.port is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port.UdfObjects.UdfObject.Operators.Operator']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:operators'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.operator is not None:
for child_ref in self.operator:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port.UdfObjects.UdfObject.Operators']['meta_info']
class NestedGroups(object):
"""
Table of nested port object groups
.. attribute:: nested_group
nested object group
**type**\: list of :py:class:`NestedGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port.UdfObjects.UdfObject.NestedGroups.NestedGroup>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nested_group = YList()
self.nested_group.parent = self
self.nested_group.name = 'nested_group'
class NestedGroup(object):
"""
nested object group
.. attribute:: nested_group_name <key>
Name of a nested object group
**type**\: str
**length:** 1..64
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nested_group_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.nested_group_name is None:
raise YPYModelError('Key property nested_group_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:nested-group[Cisco-IOS-XR-infra-objmgr-cfg:nested-group-name = ' + str(self.nested_group_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.nested_group_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port.UdfObjects.UdfObject.NestedGroups.NestedGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:nested-groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.nested_group is not None:
for child_ref in self.nested_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port.UdfObjects.UdfObject.NestedGroups']['meta_info']
class PortRanges(object):
"""
Table of port range addresses
.. attribute:: port_range
Match only packets on a given port range
**type**\: list of :py:class:`PortRange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Port.UdfObjects.UdfObject.PortRanges.PortRange>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.port_range = YList()
self.port_range.parent = self
self.port_range.name = 'port_range'
class PortRange(object):
"""
Match only packets on a given port range
.. attribute:: start_port <key>
Port number
**type**\: one of the below types:
**type**\: :py:class:`StartPortEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.StartPortEnum>`
----
**type**\: int
**range:** 0..65535
----
.. attribute:: end_port <key>
Port number
**type**\: one of the below types:
**type**\: :py:class:`EndPortEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.EndPortEnum>`
----
**type**\: int
**range:** 0..65535
----
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.start_port = None
self.end_port = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.start_port is None:
raise YPYModelError('Key property start_port is None')
if self.end_port is None:
raise YPYModelError('Key property end_port is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:port-range[Cisco-IOS-XR-infra-objmgr-cfg:start-port = ' + str(self.start_port) + '][Cisco-IOS-XR-infra-objmgr-cfg:end-port = ' + str(self.end_port) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.start_port is not None:
return True
if self.end_port is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port.UdfObjects.UdfObject.PortRanges.PortRange']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:port-ranges'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.port_range is not None:
for child_ref in self.port_range:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port.UdfObjects.UdfObject.PortRanges']['meta_info']
@property
def _common_path(self):
if self.object_name is None:
raise YPYModelError('Key property object_name is None')
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:port/Cisco-IOS-XR-infra-objmgr-cfg:udf-objects/Cisco-IOS-XR-infra-objmgr-cfg:udf-object[Cisco-IOS-XR-infra-objmgr-cfg:object-name = ' + str(self.object_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.object_name is not None:
return True
if self.description is not None:
return True
if self.nested_groups is not None and self.nested_groups._has_data():
return True
if self.operators is not None and self.operators._has_data():
return True
if self.port_ranges is not None and self.port_ranges._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port.UdfObjects.UdfObject']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:port/Cisco-IOS-XR-infra-objmgr-cfg:udf-objects'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.udf_object is not None:
for child_ref in self.udf_object:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port.UdfObjects']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:port'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.udf_objects is not None and self.udf_objects._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Port']['meta_info']
class Network(object):
"""
Network object group
.. attribute:: ipv4
IPv4 object group
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4>`
.. attribute:: ipv6
IPv6 object group
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ipv4 = ObjectGroup.Network.Ipv4()
self.ipv4.parent = self
self.ipv6 = ObjectGroup.Network.Ipv6()
self.ipv6.parent = self
class Ipv6(object):
"""
IPv6 object group
.. attribute:: udf_objects
Table of ipv6 object groups
**type**\: :py:class:`UdfObjects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.udf_objects = ObjectGroup.Network.Ipv6.UdfObjects()
self.udf_objects.parent = self
class UdfObjects(object):
"""
Table of ipv6 object groups
.. attribute:: udf_object
IPv6 object group
**type**\: list of :py:class:`UdfObject <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.udf_object = YList()
self.udf_object.parent = self
self.udf_object.name = 'udf_object'
class UdfObject(object):
"""
IPv6 object group
.. attribute:: object_name <key>
IPv6 object group name \- maximum 64 characters
**type**\: str
**length:** 1..64
.. attribute:: address_ranges
Table of ipv6 address ranges
**type**\: :py:class:`AddressRanges <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.AddressRanges>`
.. attribute:: addresses
Table of ipv6 addresses
**type**\: :py:class:`Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Addresses>`
.. attribute:: description
Up to 100 characters describing this object
**type**\: str
**length:** 1..100
.. attribute:: hosts
Table of ipv6 host addresses
**type**\: :py:class:`Hosts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Hosts>`
.. attribute:: nested_groups
Table of nested ipv6 object groups
**type**\: :py:class:`NestedGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.NestedGroups>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.object_name = None
self.address_ranges = ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.AddressRanges()
self.address_ranges.parent = self
self.addresses = ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Addresses()
self.addresses.parent = self
self.description = None
self.hosts = ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Hosts()
self.hosts.parent = self
self.nested_groups = ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.NestedGroups()
self.nested_groups.parent = self
class NestedGroups(object):
"""
Table of nested ipv6 object groups
.. attribute:: nested_group
nested object group
**type**\: list of :py:class:`NestedGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.NestedGroups.NestedGroup>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nested_group = YList()
self.nested_group.parent = self
self.nested_group.name = 'nested_group'
class NestedGroup(object):
"""
nested object group
.. attribute:: nested_group_name <key>
Enter the name of a nested object group
**type**\: str
**length:** 1..64
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nested_group_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.nested_group_name is None:
raise YPYModelError('Key property nested_group_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:nested-group[Cisco-IOS-XR-infra-objmgr-cfg:nested-group-name = ' + str(self.nested_group_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.nested_group_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.NestedGroups.NestedGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:nested-groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.nested_group is not None:
for child_ref in self.nested_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.NestedGroups']['meta_info']
class AddressRanges(object):
"""
Table of ipv6 address ranges
.. attribute:: address_range
Range of host addresses
**type**\: list of :py:class:`AddressRange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.AddressRanges.AddressRange>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address_range = YList()
self.address_range.parent = self
self.address_range.name = 'address_range'
class AddressRange(object):
"""
Range of host addresses
.. attribute:: start_address <key>
IPv6 address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: end_address <key>
IPv6 address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.start_address = None
self.end_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.start_address is None:
raise YPYModelError('Key property start_address is None')
if self.end_address is None:
raise YPYModelError('Key property end_address is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:address-range[Cisco-IOS-XR-infra-objmgr-cfg:start-address = ' + str(self.start_address) + '][Cisco-IOS-XR-infra-objmgr-cfg:end-address = ' + str(self.end_address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.start_address is not None:
return True
if self.end_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.AddressRanges.AddressRange']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:address-ranges'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.address_range is not None:
for child_ref in self.address_range:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.AddressRanges']['meta_info']
class Addresses(object):
"""
Table of ipv6 addresses
.. attribute:: address
IPv6 address
**type**\: list of :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Addresses.Address>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = YList()
self.address.parent = self
self.address.name = 'address'
class Address(object):
"""
IPv6 address
.. attribute:: prefix <key>
IPv6 prefix x\:x\:\:x/y
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: prefix_length <key>
Prefix of the IP Address
**type**\: int
**range:** 0..128
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.prefix = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.prefix is None:
raise YPYModelError('Key property prefix is None')
if self.prefix_length is None:
raise YPYModelError('Key property prefix_length is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:address[Cisco-IOS-XR-infra-objmgr-cfg:prefix = ' + str(self.prefix) + '][Cisco-IOS-XR-infra-objmgr-cfg:prefix-length = ' + str(self.prefix_length) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.prefix is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Addresses.Address']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.address is not None:
for child_ref in self.address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Addresses']['meta_info']
class Hosts(object):
"""
Table of ipv6 host addresses
.. attribute:: host
A single host address
**type**\: list of :py:class:`Host <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Hosts.Host>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.host = YList()
self.host.parent = self
self.host.name = 'host'
class Host(object):
"""
A single host address
.. attribute:: host_address <key>
host ipv6 address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.host_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.host_address is None:
raise YPYModelError('Key property host_address is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:host[Cisco-IOS-XR-infra-objmgr-cfg:host-address = ' + str(self.host_address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.host_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Hosts.Host']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:hosts'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.host is not None:
for child_ref in self.host:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject.Hosts']['meta_info']
@property
def _common_path(self):
if self.object_name is None:
raise YPYModelError('Key property object_name is None')
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:network/Cisco-IOS-XR-infra-objmgr-cfg:ipv6/Cisco-IOS-XR-infra-objmgr-cfg:udf-objects/Cisco-IOS-XR-infra-objmgr-cfg:udf-object[Cisco-IOS-XR-infra-objmgr-cfg:object-name = ' + str(self.object_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.object_name is not None:
return True
if self.address_ranges is not None and self.address_ranges._has_data():
return True
if self.addresses is not None and self.addresses._has_data():
return True
if self.description is not None:
return True
if self.hosts is not None and self.hosts._has_data():
return True
if self.nested_groups is not None and self.nested_groups._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects.UdfObject']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:network/Cisco-IOS-XR-infra-objmgr-cfg:ipv6/Cisco-IOS-XR-infra-objmgr-cfg:udf-objects'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.udf_object is not None:
for child_ref in self.udf_object:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6.UdfObjects']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:network/Cisco-IOS-XR-infra-objmgr-cfg:ipv6'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.udf_objects is not None and self.udf_objects._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv6']['meta_info']
class Ipv4(object):
"""
IPv4 object group
.. attribute:: udf_objects
Table of ipv4 object groups
**type**\: :py:class:`UdfObjects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.udf_objects = ObjectGroup.Network.Ipv4.UdfObjects()
self.udf_objects.parent = self
class UdfObjects(object):
"""
Table of ipv4 object groups
.. attribute:: udf_object
IPv4 object group
**type**\: list of :py:class:`UdfObject <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.udf_object = YList()
self.udf_object.parent = self
self.udf_object.name = 'udf_object'
class UdfObject(object):
"""
IPv4 object group
.. attribute:: object_name <key>
IPv4 object group name \- maximum 64 characters
**type**\: str
**length:** 1..64
.. attribute:: address_ranges
Table of ipv4 host address ranges
**type**\: :py:class:`AddressRanges <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.AddressRanges>`
.. attribute:: addresses
Table of addresses
**type**\: :py:class:`Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Addresses>`
.. attribute:: description
Up to 100 characters describing this object
**type**\: str
**length:** 1..100
.. attribute:: hosts
Table of host addresses
**type**\: :py:class:`Hosts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Hosts>`
.. attribute:: nested_groups
Table of nested ipv4 object groups
**type**\: :py:class:`NestedGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.NestedGroups>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.object_name = None
self.address_ranges = ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.AddressRanges()
self.address_ranges.parent = self
self.addresses = ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Addresses()
self.addresses.parent = self
self.description = None
self.hosts = ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Hosts()
self.hosts.parent = self
self.nested_groups = ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.NestedGroups()
self.nested_groups.parent = self
class NestedGroups(object):
"""
Table of nested ipv4 object groups
.. attribute:: nested_group
Nested object group
**type**\: list of :py:class:`NestedGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.NestedGroups.NestedGroup>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nested_group = YList()
self.nested_group.parent = self
self.nested_group.name = 'nested_group'
class NestedGroup(object):
"""
Nested object group
.. attribute:: nested_group_name <key>
Nested object group
**type**\: str
**length:** 1..64
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nested_group_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.nested_group_name is None:
raise YPYModelError('Key property nested_group_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:nested-group[Cisco-IOS-XR-infra-objmgr-cfg:nested-group-name = ' + str(self.nested_group_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.nested_group_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.NestedGroups.NestedGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:nested-groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.nested_group is not None:
for child_ref in self.nested_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.NestedGroups']['meta_info']
class AddressRanges(object):
"""
Table of ipv4 host address ranges
.. attribute:: address_range
Range of host addresses
**type**\: list of :py:class:`AddressRange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.AddressRanges.AddressRange>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address_range = YList()
self.address_range.parent = self
self.address_range.name = 'address_range'
class AddressRange(object):
"""
Range of host addresses
.. attribute:: start_address <key>
IPv4 address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: end_address <key>
IPv4 address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.start_address = None
self.end_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.start_address is None:
raise YPYModelError('Key property start_address is None')
if self.end_address is None:
raise YPYModelError('Key property end_address is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:address-range[Cisco-IOS-XR-infra-objmgr-cfg:start-address = ' + str(self.start_address) + '][Cisco-IOS-XR-infra-objmgr-cfg:end-address = ' + str(self.end_address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.start_address is not None:
return True
if self.end_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.AddressRanges.AddressRange']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:address-ranges'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.address_range is not None:
for child_ref in self.address_range:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.AddressRanges']['meta_info']
class Addresses(object):
"""
Table of addresses
.. attribute:: address
IPv4 address
**type**\: list of :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Addresses.Address>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = YList()
self.address.parent = self
self.address.name = 'address'
class Address(object):
"""
IPv4 address
.. attribute:: prefix <key>
IPv4 address/prefix
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: prefix_length <key>
Prefix of the IP Address
**type**\: int
**range:** 0..32
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.prefix = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.prefix is None:
raise YPYModelError('Key property prefix is None')
if self.prefix_length is None:
raise YPYModelError('Key property prefix_length is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:address[Cisco-IOS-XR-infra-objmgr-cfg:prefix = ' + str(self.prefix) + '][Cisco-IOS-XR-infra-objmgr-cfg:prefix-length = ' + str(self.prefix_length) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.prefix is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Addresses.Address']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.address is not None:
for child_ref in self.address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Addresses']['meta_info']
class Hosts(object):
"""
Table of host addresses
.. attribute:: host
A single host address
**type**\: list of :py:class:`Host <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_cfg.ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Hosts.Host>`
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.host = YList()
self.host.parent = self
self.host.name = 'host'
class Host(object):
"""
A single host address
.. attribute:: host_address <key>
Host ipv4 address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
"""
_prefix = 'infra-objmgr-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.host_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.host_address is None:
raise YPYModelError('Key property host_address is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:host[Cisco-IOS-XR-infra-objmgr-cfg:host-address = ' + str(self.host_address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.host_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Hosts.Host']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-objmgr-cfg:hosts'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.host is not None:
for child_ref in self.host:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject.Hosts']['meta_info']
@property
def _common_path(self):
if self.object_name is None:
raise YPYModelError('Key property object_name is None')
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:network/Cisco-IOS-XR-infra-objmgr-cfg:ipv4/Cisco-IOS-XR-infra-objmgr-cfg:udf-objects/Cisco-IOS-XR-infra-objmgr-cfg:udf-object[Cisco-IOS-XR-infra-objmgr-cfg:object-name = ' + str(self.object_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.object_name is not None:
return True
if self.address_ranges is not None and self.address_ranges._has_data():
return True
if self.addresses is not None and self.addresses._has_data():
return True
if self.description is not None:
return True
if self.hosts is not None and self.hosts._has_data():
return True
if self.nested_groups is not None and self.nested_groups._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects.UdfObject']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:network/Cisco-IOS-XR-infra-objmgr-cfg:ipv4/Cisco-IOS-XR-infra-objmgr-cfg:udf-objects'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.udf_object is not None:
for child_ref in self.udf_object:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4.UdfObjects']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:network/Cisco-IOS-XR-infra-objmgr-cfg:ipv4'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.udf_objects is not None and self.udf_objects._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network.Ipv4']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group/Cisco-IOS-XR-infra-objmgr-cfg:network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.ipv4 is not None and self.ipv4._has_data():
return True
if self.ipv6 is not None and self.ipv6._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup.Network']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-objmgr-cfg:object-group'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.network is not None and self.network._has_data():
return True
if self.port is not None and self.port._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_objmgr_cfg as meta
return meta._meta_table['ObjectGroup']['meta_info']
|
111pontes/ydk-py
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_objmgr_cfg.py
|
Python
|
apache-2.0
| 90,489 | 0.018323 |
import json
from treeherder.log_parser.parsers import (EmptyPerformanceData,
PerformanceParser)
def test_performance_log_parsing_malformed_perfherder_data():
"""
If we have malformed perfherder data lines, we should just ignore
them and still be able to parse the valid ones
"""
parser = PerformanceParser()
# invalid json
parser.parse_line("PERFHERDER_DATA: {oh noes i am not valid json}", 1)
try:
# Empty performance data
parser.parse_line("PERFHERDER_DATA: {}", 2)
except EmptyPerformanceData:
pass
valid_perfherder_data = {
"framework": {"name": "talos"}, "suites": [{
"name": "basic_compositor_video",
"subtests": [{
"name": "240p.120fps.mp4_scale_fullscreen_startup",
"value": 1234
}]
}]
}
parser.parse_line('PERFHERDER_DATA: {}'.format(
json.dumps(valid_perfherder_data)), 3)
assert parser.get_artifact() == [valid_perfherder_data]
|
KWierso/treeherder
|
tests/log_parser/test_performance_parser.py
|
Python
|
mpl-2.0
| 1,056 | 0 |
#!/usr/bin/env python
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import gc
import sys
from types import FrameType
from itertools import chain
# From http://code.activestate.com/recipes/523004-find-cyclical-references/
def print_cycles(objects, outstream=sys.stdout, show_progress=False):
'''Find reference cycles
:param list objects:
A list of objects to find cycles in. It is often useful to pass in
gc.garbage to find the cycles that are preventing some objects from
being garbage collected.
:param file outstream:
The stream for output.
:param bool show_progress:
If True, print the number of objects reached as they are found.
'''
def print_path(path):
for i, step in enumerate(path):
# next "wraps around"
next = path[(i + 1) % len(path)]
outstream.write(" %s -- " % str(type(step)))
written = False
if isinstance(step, dict):
for key, val in step.items():
if val is next:
outstream.write("[%s]" % repr(key))
written = True
break
if key is next:
outstream.write("[key] = %s" % repr(val))
written = True
break
elif isinstance(step, (list, tuple)):
for i, item in enumerate(step):
if item is next:
outstream.write("[%d]" % i)
written = True
elif getattr(type(step), '__getattribute__', None) in (object.__getattribute__, type.__getattribute__):
for attr in chain(dir(step), getattr(step, '__dict__', ())):
if getattr(step, attr, None) is next:
try:
outstream.write('%r.%s' % (step, attr))
except TypeError:
outstream.write('.%s' % (step, attr))
written = True
break
if not written:
outstream.write(repr(step))
outstream.write(" ->\n")
outstream.write("\n")
def recurse(obj, start, all, current_path):
if show_progress:
outstream.write("%d\r" % len(all))
all[id(obj)] = None
referents = gc.get_referents(obj)
for referent in referents:
# If we've found our way back to the start, this is
# a cycle, so print it out
if referent is start:
try:
outstream.write('Cyclic reference: %r\n' % referent)
except TypeError:
try:
outstream.write('Cyclic reference: %i (%r)\n' % (id(referent), type(referent)))
except TypeError:
outstream.write('Cyclic reference: %i\n' % id(referent))
print_path(current_path)
# Don't go back through the original list of objects, or
# through temporary references to the object, since those
# are just an artifact of the cycle detector itself.
elif referent is objects or isinstance(referent, FrameType):
continue
# We haven't seen this object before, so recurse
elif id(referent) not in all:
recurse(referent, start, all, current_path + (obj,))
for obj in objects:
# We are not interested in non-powerline cyclic references
try:
if not type(obj).__module__.startswith('powerline'):
continue
except AttributeError:
continue
recurse(obj, obj, {}, ())
|
magus424/powerline
|
powerline/lib/debug.py
|
Python
|
mit
| 3,036 | 0.027339 |
from datetime import datetime
from collections import defaultdict
DEFAULT_RELEASE = datetime(1970, 1, 1)
_SORT_KEY = lambda eps: eps[0].released or DEFAULT_RELEASE
class PodcastGrouper(object):
"""Groups episodes of two podcasts based on certain features
The results are sorted by release timestamp"""
DEFAULT_RELEASE = datetime(1970, 1, 1)
def __init__(self, podcasts):
if not podcasts or (None in podcasts):
raise ValueError("podcasts must not be None")
self.podcasts = podcasts
def __get_episodes(self):
episodes = {}
for podcast in self.podcasts:
episodes.update(dict((e.id, e.id) for e in podcast.episode_set.all()))
return episodes
def group(self, get_features):
episodes = self.__get_episodes()
episode_groups = defaultdict(list)
episode_features = map(get_features, episodes.items())
for features, episode_id in episode_features:
episode = episodes[episode_id]
episode_groups[features].append(episode)
# groups = sorted(episode_groups.values(), key=_SORT_KEY)
groups = episode_groups.values()
return enumerate(groups)
|
gpodder/mygpo
|
mygpo/administration/group.py
|
Python
|
agpl-3.0
| 1,212 | 0.00165 |
from __future__ import print_function, unicode_literals
from future.builtins import open
import os
import re
import sys
from contextlib import contextmanager
from functools import wraps
from getpass import getpass, getuser
from glob import glob
from importlib import import_module
from posixpath import join
from mezzanine.utils.conf import real_project_name
from fabric.api import abort, env, cd, prefix, sudo as _sudo, run as _run, \
hide, task, local
from fabric.context_managers import settings as fab_settings
from fabric.contrib.console import confirm
from fabric.contrib.files import exists, upload_template
from fabric.contrib.project import rsync_project
from fabric.colors import yellow, green, blue, red
from fabric.decorators import hosts
################
# Config setup #
################
env.proj_app = real_project_name("electionNepal")
conf = {}
if sys.argv[0].split(os.sep)[-1] in ("fab", "fab-script.py"):
# Ensure we import settings from the current dir
try:
conf = import_module("%s.settings" % env.proj_app).FABRIC
try:
conf["HOSTS"][0]
except (KeyError, ValueError):
raise ImportError
except (ImportError, AttributeError):
print("Aborting, no hosts defined.")
exit()
env.db_pass = conf.get("DB_PASS", None)
env.admin_pass = conf.get("ADMIN_PASS", None)
env.user = conf.get("SSH_USER", getuser())
env.password = conf.get("SSH_PASS", None)
env.key_filename = conf.get("SSH_KEY_PATH", None)
env.hosts = conf.get("HOSTS", [""])
env.proj_name = conf.get("PROJECT_NAME", env.proj_app)
env.venv_home = conf.get("VIRTUALENV_HOME", "/home/%s/.virtualenvs" % env.user)
env.venv_path = join(env.venv_home, env.proj_name)
env.proj_path = "/home/%s/mezzanine/%s" % (env.user, env.proj_name)
env.manage = "%s/bin/python %s/manage.py" % (env.venv_path, env.proj_path)
env.domains = conf.get("DOMAINS", [conf.get("LIVE_HOSTNAME", env.hosts[0])])
env.domains_nginx = " ".join(env.domains)
env.domains_regex = "|".join(env.domains)
env.domains_python = ", ".join(["'%s'" % s for s in env.domains])
env.ssl_disabled = "#" if len(env.domains) > 1 else ""
env.vcs_tools = ["git", "hg"]
env.deploy_tool = conf.get("DEPLOY_TOOL", "rsync")
env.reqs_path = conf.get("REQUIREMENTS_PATH", None)
env.locale = conf.get("LOCALE", "en_US.UTF-8")
env.num_workers = conf.get("NUM_WORKERS",
"multiprocessing.cpu_count() * 2 + 1")
env.secret_key = conf.get("SECRET_KEY", "")
env.nevercache_key = conf.get("NEVERCACHE_KEY", "")
# Remote git repos need to be "bare" and reside separated from the project
if env.deploy_tool == "git":
env.repo_path = "/home/%s/git/%s.git" % (env.user, env.proj_name)
else:
env.repo_path = env.proj_path
##################
# Template setup #
##################
# Each template gets uploaded at deploy time, only if their
# contents has changed, in which case, the reload command is
# also run.
templates = {
"nginx": {
"local_path": "deploy/nginx.conf.template",
"remote_path": "/etc/nginx/sites-enabled/%(proj_name)s.conf",
"reload_command": "service nginx restart",
},
"supervisor": {
"local_path": "deploy/supervisor.conf.template",
"remote_path": "/etc/supervisor/conf.d/%(proj_name)s.conf",
"reload_command": "supervisorctl update gunicorn_%(proj_name)s",
},
"cron": {
"local_path": "deploy/crontab.template",
"remote_path": "/etc/cron.d/%(proj_name)s",
"owner": "root",
"mode": "600",
},
"gunicorn": {
"local_path": "deploy/gunicorn.conf.py.template",
"remote_path": "%(proj_path)s/gunicorn.conf.py",
},
"settings": {
"local_path": "deploy/local_settings.py.template",
"remote_path": "%(proj_path)s/%(proj_app)s/local_settings.py",
},
}
######################################
# Context for virtualenv and project #
######################################
@contextmanager
def virtualenv():
"""
Runs commands within the project's virtualenv.
"""
with cd(env.venv_path):
with prefix("source %s/bin/activate" % env.venv_path):
yield
@contextmanager
def project():
"""
Runs commands within the project's directory.
"""
with virtualenv():
with cd(env.proj_path):
yield
@contextmanager
def update_changed_requirements():
"""
Checks for changes in the requirements file across an update,
and gets new requirements if changes have occurred.
"""
reqs_path = join(env.proj_path, env.reqs_path)
get_reqs = lambda: run("cat %s" % reqs_path, show=False)
old_reqs = get_reqs() if env.reqs_path else ""
yield
if old_reqs:
new_reqs = get_reqs()
if old_reqs == new_reqs:
# Unpinned requirements should always be checked.
for req in new_reqs.split("\n"):
if req.startswith("-e"):
if "@" not in req:
# Editable requirement without pinned commit.
break
elif req.strip() and not req.startswith("#"):
if not set(">=<") & set(req):
# PyPI requirement without version.
break
else:
# All requirements are pinned.
return
pip("-r %s/%s" % (env.proj_path, env.reqs_path))
###########################################
# Utils and wrappers for various commands #
###########################################
def _print(output):
print()
print(output)
print()
def print_command(command):
_print(blue("$ ", bold=True) +
yellow(command, bold=True) +
red(" ->", bold=True))
@task
def run(command, show=True, *args, **kwargs):
"""
Runs a shell comand on the remote server.
"""
if show:
print_command(command)
with hide("running"):
return _run(command, *args, **kwargs)
@task
def sudo(command, show=True, *args, **kwargs):
"""
Runs a command as sudo on the remote server.
"""
if show:
print_command(command)
with hide("running"):
return _sudo(command, *args, **kwargs)
def log_call(func):
@wraps(func)
def logged(*args, **kawrgs):
header = "-" * len(func.__name__)
_print(green("\n".join([header, func.__name__, header]), bold=True))
return func(*args, **kawrgs)
return logged
def get_templates():
"""
Returns each of the templates with env vars injected.
"""
injected = {}
for name, data in templates.items():
injected[name] = dict([(k, v % env) for k, v in data.items()])
return injected
def upload_template_and_reload(name):
"""
Uploads a template only if it has changed, and if so, reload the
related service.
"""
template = get_templates()[name]
local_path = template["local_path"]
if not os.path.exists(local_path):
project_root = os.path.dirname(os.path.abspath(__file__))
local_path = os.path.join(project_root, local_path)
remote_path = template["remote_path"]
reload_command = template.get("reload_command")
owner = template.get("owner")
mode = template.get("mode")
remote_data = ""
if exists(remote_path):
with hide("stdout"):
remote_data = sudo("cat %s" % remote_path, show=False)
with open(local_path, "r") as f:
local_data = f.read()
# Escape all non-string-formatting-placeholder occurrences of '%':
local_data = re.sub(r"%(?!\(\w+\)s)", "%%", local_data)
if "%(db_pass)s" in local_data:
env.db_pass = db_pass()
local_data %= env
clean = lambda s: s.replace("\n", "").replace("\r", "").strip()
if clean(remote_data) == clean(local_data):
return
upload_template(local_path, remote_path, env, use_sudo=True, backup=False)
if owner:
sudo("chown %s %s" % (owner, remote_path))
if mode:
sudo("chmod %s %s" % (mode, remote_path))
if reload_command:
sudo(reload_command)
def rsync_upload():
"""
Uploads the project with rsync excluding some files and folders.
"""
excludes = ["*.pyc", "*.pyo", "*.db", ".DS_Store", ".coverage",
"local_settings.py", "/static", "/.git", "/.hg"]
local_dir = os.getcwd() + os.sep
return rsync_project(remote_dir=env.proj_path, local_dir=local_dir,
exclude=excludes)
def vcs_upload():
"""
Uploads the project with the selected VCS tool.
"""
if env.deploy_tool == "git":
remote_path = "ssh://%s@%s%s" % (env.user, env.host_string,
env.repo_path)
if not exists(env.repo_path):
run("mkdir -p %s" % env.repo_path)
with cd(env.repo_path):
run("git init --bare")
local("git push -f %s master" % remote_path)
with cd(env.repo_path):
run("GIT_WORK_TREE=%s git checkout -f master" % env.proj_path)
run("GIT_WORK_TREE=%s git reset --hard" % env.proj_path)
elif env.deploy_tool == "hg":
remote_path = "ssh://%s@%s/%s" % (env.user, env.host_string,
env.repo_path)
with cd(env.repo_path):
if not exists("%s/.hg" % env.repo_path):
run("hg init")
print(env.repo_path)
with fab_settings(warn_only=True):
push = local("hg push -f %s" % remote_path)
if push.return_code == 255:
abort()
run("hg update")
def db_pass():
"""
Prompts for the database password if unknown.
"""
if not env.db_pass:
env.db_pass = getpass("Enter the database password: ")
return env.db_pass
@task
def apt(packages):
"""
Installs one or more system packages via apt.
"""
return sudo("apt-get install -y -q " + packages)
@task
def pip(packages):
"""
Installs one or more Python packages within the virtual environment.
"""
with virtualenv():
return run("pip install %s" % packages)
def postgres(command):
"""
Runs the given command as the postgres user.
"""
show = not command.startswith("psql")
return sudo(command, show=show, user="postgres")
@task
def psql(sql, show=True):
"""
Runs SQL against the project's database.
"""
out = postgres('psql -c "%s"' % sql)
if show:
print_command(sql)
return out
@task
def backup(filename):
"""
Backs up the project database.
"""
tmp_file = "/tmp/%s" % filename
# We dump to /tmp because user "postgres" can't write to other user folders
# We cd to / because user "postgres" might not have read permissions
# elsewhere.
with cd("/"):
postgres("pg_dump -Fc %s > %s" % (env.proj_name, tmp_file))
run("cp %s ." % tmp_file)
sudo("rm -f %s" % tmp_file)
@task
def restore(filename):
"""
Restores the project database from a previous backup.
"""
return postgres("pg_restore -c -d %s %s" % (env.proj_name, filename))
@task
def python(code, show=True):
"""
Runs Python code in the project's virtual environment, with Django loaded.
"""
setup = "import os;" \
"os.environ[\'DJANGO_SETTINGS_MODULE\']=\'%s.settings\';" \
"import django;" \
"django.setup();" % env.proj_app
full_code = 'python -c "%s%s"' % (setup, code.replace("`", "\\\`"))
with project():
if show:
print_command(code)
result = run(full_code, show=False)
return result
def static():
"""
Returns the live STATIC_ROOT directory.
"""
return python("from django.conf import settings;"
"print(settings.STATIC_ROOT)", show=False).split("\n")[-1]
@task
def manage(command):
"""
Runs a Django management command.
"""
return run("%s %s" % (env.manage, command))
###########################
# Security best practices #
###########################
@task
@log_call
@hosts(["root@%s" % host for host in env.hosts])
def secure(new_user=env.user):
"""
Minimal security steps for brand new servers.
Installs system updates, creates new user (with sudo privileges) for future
usage, and disables root login via SSH.
"""
run("apt-get update -q")
run("apt-get upgrade -y -q")
run("adduser --gecos '' %s" % new_user)
run("usermod -G sudo %s" % new_user)
run("sed -i 's:RootLogin yes:RootLogin no:' /etc/ssh/sshd_config")
run("service ssh restart")
print(green("Security steps completed. Log in to the server as '%s' from "
"now on." % new_user, bold=True))
#########################
# Install and configure #
#########################
@task
@log_call
def install():
"""
Installs the base system and Python requirements for the entire server.
"""
# Install system requirements
sudo("apt-get update -y -q")
apt("nginx libjpeg-dev python-dev python-setuptools git-core "
"postgresql libpq-dev memcached supervisor python-pip")
run("mkdir -p /home/%s/logs" % env.user)
# Install Python requirements
sudo("pip install -U pip virtualenv virtualenvwrapper mercurial")
# Set up virtualenv
run("mkdir -p %s" % env.venv_home)
run("echo 'export WORKON_HOME=%s' >> /home/%s/.bashrc" % (env.venv_home,
env.user))
run("echo 'source /usr/local/bin/virtualenvwrapper.sh' >> "
"/home/%s/.bashrc" % env.user)
print(green("Successfully set up git, mercurial, pip, virtualenv, "
"supervisor, memcached.", bold=True))
@task
@log_call
def create():
"""
Creates the environment needed to host the project.
The environment consists of: system locales, virtualenv, database, project
files, SSL certificate, and project-specific Python requirements.
"""
# Generate project locale
locale = env.locale.replace("UTF-8", "utf8")
with hide("stdout"):
if locale not in run("locale -a"):
sudo("locale-gen %s" % env.locale)
sudo("update-locale %s" % env.locale)
sudo("service postgresql restart")
run("exit")
# Create project path
run("mkdir -p %s" % env.proj_path)
# Set up virtual env
run("mkdir -p %s" % env.venv_home)
with cd(env.venv_home):
if exists(env.proj_name):
if confirm("Virtualenv already exists in host server: %s"
"\nWould you like to replace it?" % env.proj_name):
run("rm -rf %s" % env.proj_name)
else:
abort()
run("virtualenv %s" % env.proj_name)
# Upload project files
if env.deploy_tool in env.vcs_tools:
vcs_upload()
else:
rsync_upload()
# Create DB and DB user
pw = db_pass()
user_sql_args = (env.proj_name, pw.replace("'", "\'"))
user_sql = "CREATE USER %s WITH ENCRYPTED PASSWORD '%s';" % user_sql_args
psql(user_sql, show=False)
shadowed = "*" * len(pw)
print_command(user_sql.replace("'%s'" % pw, "'%s'" % shadowed))
psql("CREATE DATABASE %s WITH OWNER %s ENCODING = 'UTF8' "
"LC_CTYPE = '%s' LC_COLLATE = '%s' TEMPLATE template0;" %
(env.proj_name, env.proj_name, env.locale, env.locale))
# Set up SSL certificate
if not env.ssl_disabled:
conf_path = "/etc/nginx/conf"
if not exists(conf_path):
sudo("mkdir %s" % conf_path)
with cd(conf_path):
crt_file = env.proj_name + ".crt"
key_file = env.proj_name + ".key"
if not exists(crt_file) and not exists(key_file):
try:
crt_local, = glob(join("deploy", "*.crt"))
key_local, = glob(join("deploy", "*.key"))
except ValueError:
parts = (crt_file, key_file, env.domains[0])
sudo("openssl req -new -x509 -nodes -out %s -keyout %s "
"-subj '/CN=%s' -days 3650" % parts)
else:
upload_template(crt_local, crt_file, use_sudo=True)
upload_template(key_local, key_file, use_sudo=True)
# Install project-specific requirements
upload_template_and_reload("settings")
with project():
if env.reqs_path:
pip("-r %s/%s" % (env.proj_path, env.reqs_path))
pip("gunicorn setproctitle psycopg2 "
"django-compressor python-memcached")
# Bootstrap the DB
manage("createdb --noinput --nodata")
python("from django.conf import settings;"
"from django.contrib.sites.models import Site;"
"Site.objects.filter(id=settings.SITE_ID).update(domain='%s');"
% env.domains[0])
for domain in env.domains:
python("from django.contrib.sites.models import Site;"
"Site.objects.get_or_create(domain='%s');" % domain)
if env.admin_pass:
pw = env.admin_pass
user_py = ("from django.contrib.auth import get_user_model;"
"User = get_user_model();"
"u, _ = User.objects.get_or_create(username='admin');"
"u.is_staff = u.is_superuser = True;"
"u.set_password('%s');"
"u.save();" % pw)
python(user_py, show=False)
shadowed = "*" * len(pw)
print_command(user_py.replace("'%s'" % pw, "'%s'" % shadowed))
return True
@task
@log_call
def remove():
"""
Blow away the current project.
"""
if exists(env.venv_path):
run("rm -rf %s" % env.venv_path)
if exists(env.proj_path):
run("rm -rf %s" % env.proj_path)
for template in get_templates().values():
remote_path = template["remote_path"]
if exists(remote_path):
sudo("rm %s" % remote_path)
if exists(env.repo_path):
run("rm -rf %s" % env.repo_path)
sudo("supervisorctl update")
psql("DROP DATABASE IF EXISTS %s;" % env.proj_name)
psql("DROP USER IF EXISTS %s;" % env.proj_name)
##############
# Deployment #
##############
@task
@log_call
def restart():
"""
Restart gunicorn worker processes for the project.
If the processes are not running, they will be started.
"""
pid_path = "%s/gunicorn.pid" % env.proj_path
if exists(pid_path):
run("kill -HUP `cat %s`" % pid_path)
else:
sudo("supervisorctl update")
@task
@log_call
def deploy():
"""
Deploy latest version of the project.
Backup current version of the project, push latest version of the project
via version control or rsync, install new requirements, sync and migrate
the database, collect any new static assets, and restart gunicorn's worker
processes for the project.
"""
if not exists(env.proj_path):
if confirm("Project does not exist in host server: %s"
"\nWould you like to create it?" % env.proj_name):
create()
else:
abort()
# Backup current version of the project
with cd(env.proj_path):
backup("last.db")
if env.deploy_tool in env.vcs_tools:
with cd(env.repo_path):
if env.deploy_tool == "git":
run("git rev-parse HEAD > %s/last.commit" % env.proj_path)
elif env.deploy_tool == "hg":
run("hg id -i > last.commit")
with project():
static_dir = static()
if exists(static_dir):
run("tar -cf static.tar --exclude='*.thumbnails' %s" %
static_dir)
else:
with cd(join(env.proj_path, "..")):
excludes = ["*.pyc", "*.pio", "*.thumbnails"]
exclude_arg = " ".join("--exclude='%s'" % e for e in excludes)
run("tar -cf {0}.tar {1} {0}".format(env.proj_name, exclude_arg))
# Deploy latest version of the project
with update_changed_requirements():
if env.deploy_tool in env.vcs_tools:
vcs_upload()
else:
rsync_upload()
with project():
manage("collectstatic -v 0 --noinput")
manage("migrate --noinput")
for name in get_templates():
upload_template_and_reload(name)
restart()
return True
@task
@log_call
def rollback():
"""
Reverts project state to the last deploy.
When a deploy is performed, the current state of the project is
backed up. This includes the project files, the database, and all static
files. Calling rollback will revert all of these to their state prior to
the last deploy.
"""
with update_changed_requirements():
if env.deploy_tool in env.vcs_tools:
with cd(env.repo_path):
if env.deploy_tool == "git":
run("GIT_WORK_TREE={0} git checkout -f "
"`cat {0}/last.commit`".format(env.proj_path))
elif env.deploy_tool == "hg":
run("hg update -C `cat last.commit`")
with project():
with cd(join(static(), "..")):
run("tar -xf %s/static.tar" % env.proj_path)
else:
with cd(env.proj_path.rsplit("/", 1)[0]):
run("rm -rf %s" % env.proj_name)
run("tar -xf %s.tar" % env.proj_name)
with cd(env.proj_path):
restore("last.db")
restart()
@task
@log_call
def all():
"""
Installs everything required on a new system and deploy.
From the base software, up to the deployed project.
"""
install()
if create():
deploy()
|
okfnepal/election-nepal
|
fabfile.py
|
Python
|
mit
| 21,828 | 0.000321 |
"""cascade folder deletes to imapuid
Otherwise, since this fk is NOT NULL, deleting a folder which has associated
imapuids still existing will cause a database IntegrityError. Only the mail
sync engine does such a thing. Nothing else should be deleting folders,
hard or soft.
This also fixes a problem where if e.g. someone disables their Spam folder
from showing up in Gmail IMAP, the server will crash trying to delete that
folder the account.spam_folder_id constraint fails.
Revision ID: 350a08df27ee
Revises: 1eab2619cc4f
Create Date: 2014-05-25 01:40:21.762119
"""
# revision identifiers, used by Alembic.
revision = '350a08df27ee'
down_revision = '1eab2619cc4f'
from alembic import op
def upgrade():
op.drop_constraint('imapuid_ibfk_3', 'imapuid', type_='foreignkey')
op.create_foreign_key('imapuid_ibfk_3', 'imapuid', 'folder',
['folder_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('account_ibfk_2', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_2', 'account', 'folder',
['inbox_folder_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('account_ibfk_3', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_3', 'account', 'folder',
['sent_folder_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('account_ibfk_4', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_4', 'account', 'folder',
['drafts_folder_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('account_ibfk_5', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_5', 'account', 'folder',
['spam_folder_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('account_ibfk_6', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_6', 'account', 'folder',
['trash_folder_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('account_ibfk_7', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_7', 'account', 'folder',
['archive_folder_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('account_ibfk_8', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_8', 'account', 'folder',
['all_folder_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('account_ibfk_9', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_9', 'account', 'folder',
['starred_folder_id'], ['id'], ondelete='SET NULL')
# for some reason this was left out of migration 024, so might not exist
try:
op.drop_constraint('account_ibfk_10', 'account', type_='foreignkey')
except:
pass
op.create_foreign_key('account_ibfk_10', 'account', 'folder',
['important_folder_id'], ['id'], ondelete='SET NULL')
def downgrade():
op.drop_constraint('imapuid_ibfk_3', 'imapuid', type_='foreignkey')
op.create_foreign_key('imapuid_ibfk_3', 'imapuid', 'folder',
['folder_id'], ['id'])
op.drop_constraint('account_ibfk_2', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_2', 'account', 'folder',
['inbox_folder_id'], ['id'])
op.drop_constraint('account_ibfk_3', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_3', 'account', 'folder',
['sent_folder_id'], ['id'])
op.drop_constraint('account_ibfk_4', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_4', 'account', 'folder',
['drafts_folder_id'], ['id'])
op.drop_constraint('account_ibfk_5', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_5', 'account', 'folder',
['spam_folder_id'], ['id'])
op.drop_constraint('account_ibfk_6', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_6', 'account', 'folder',
['trash_folder_id'], ['id'])
op.drop_constraint('account_ibfk_7', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_7', 'account', 'folder',
['archive_folder_id'], ['id'])
op.drop_constraint('account_ibfk_8', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_8', 'account', 'folder',
['all_folder_id'], ['id'])
op.drop_constraint('account_ibfk_9', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_9', 'account', 'folder',
['starred_folder_id'], ['id'])
op.drop_constraint('account_ibfk_10', 'account', type_='foreignkey')
op.create_foreign_key('account_ibfk_10', 'account', 'folder',
['important_folder_id'], ['id'])
|
nylas/sync-engine
|
migrations/versions/034_cascade_folder_deletes_to_imapuid.py
|
Python
|
agpl-3.0
| 4,899 | 0.000408 |
import string
import socket
import base64
import sys
class message:
def __init__(self, name="generate" ):
if name == "generate":
self.name=socket.gethostname()
else:
self.name=name
self.type="gc"
self.decoded=""
def set ( self, content=" " ):
base64content = base64.b64encode ( content )
self.decoded="piratebox;"+ self.type + ";01;" + self.name + ";" + base64content
def get ( self ):
# TODO Split decoded part
message_parts = string.split ( self.decoded , ";" )
if message_parts[0] != "piratebox":
return None
b64_content_part = message_parts[4]
content = base64.b64decode ( b64_content_part )
return content
def get_sendername (self):
return self.name
def get_message ( self ):
return self.decoded
def set_message ( self , decoded):
self.decoded = decoded
class shoutbox_message(message):
def __init__(self, name="generate" ):
message.__init__( self , name)
self.type="sb"
|
LibraryBox-Dev/LibraryBox-core
|
piratebox_origin/piratebox/piratebox/python_lib/messages.py
|
Python
|
gpl-3.0
| 1,109 | 0.038774 |
import giwyn.lib.settings.settings
from git import *
def list_git_projects():
print("List of git projects:")
#end="" -> avoid last '\n' character
for git_object in giwyn.lib.settings.settings.GIT_OBJECTS:
print(git_object)
def push_ready_projects():
print("Repository to push...")
any_repo_to_push = False
for git_project in giwyn.lib.settings.settings.GIT_OBJECTS:
if git_project.current_status == "TO PUSH":
print("Pushing {0} in the current branch...".format(git_project.entry))
git_project.git_object.remote().push()
any_repo_to_push = True
if not any_repo_to_push:
print("There is no repository to push yet!")
def pull_ready_projects():
print("Repository to pull...")
any_repo_to_pull = False
for git_project in giwyn.lib.settings.settings.GIT_OBJECTS:
if git_project.current_status == "CLEAN":
print("Try to pull {0}, from the current branch...".format(git_project.entry))
#Pull from origin
if git_project.git_object.remotes != []:
git_project.git_object.remotes.origin.pull()
any_repo_to_pull = True
if not any_repo_to_pull:
print("There is no repository to pull yet!")
|
k0pernicus/giwyn
|
giwyn/lib/gitconf/commands.py
|
Python
|
gpl-3.0
| 1,268 | 0.005521 |
# -*- coding: utf-8 -*-
from ..common import get_module_class
class Parser(object):
@staticmethod
def get(parser_name):
clazz = get_module_class(parser_name, __name__)
return clazz()
def loads(self, content):
return content
def dumps(self, content):
return content
def load(self, f):
return NotImplemented
def dump(self, content, f):
return NotImplemented
|
DataCanvasIO/pyDataCanvas
|
datacanvas/dataset/parser/parser.py
|
Python
|
apache-2.0
| 435 | 0 |
from django.db import models
from django.utils import timezone
import pytz
import datetime
def hash(n):
n = int(n)
return ((0x0000FFFF & n)<<16) + ((0xFFFF0000 & n)>>16)
class EventInstance(object):
def __init__(self, event, event_time, date):
self.date = date.date()
self.time = date.time()
self.event= event
self.event_time = event_time
self.attending = Signup.objects.filter(event=event, date=self.date, status=Signup.ATTENDING)
self.not_attending = Signup.objects.filter(event=event, date=self.date, status=Signup.NOT_ATTENDING)
def get_date_id(self):
return "%4d_%02d_%02d" % (self.date.year, self.date.month, self.date.day)
class Event(models.Model):
name = models.CharField(max_length=100)
timezone = models.CharField(max_length=50, choices=[(x,x) for x in pytz.all_timezones ], default="US/Mountain")
description = models.TextField()
location_lat = models.FloatField()
location_lon = models.FloatField()
addr = models.CharField(max_length=200)
city = models.CharField(max_length=100)
state = models.CharField(max_length=5)
zip = models.CharField(max_length=20)
contact_emails = models.CharField(max_length=500, help_text='Comma separated list of email addresses')
def __unicode__(self):
return self.name
def get_next(self):
timezone.activate(pytz.timezone(self.timezone))
now = timezone.now().date()
events = [ EventInstance(self, d, d.get_next(now)) for d in self.times.all() ]
events.sort(key=lambda x:x.date)
return events
class EventTime(models.Model):
DAY_CHOICES = (
(0, "Monday", ),
(1, "Tuesday", ),
(2, "Wednesday",),
(3, "Thursday", ),
(4, "Friday", ),
(5, "Saturday", ),
(6, "Sunday", ),
)
event= models.ForeignKey(Event, related_name="times")
day = models.IntegerField(choices=DAY_CHOICES)
time = models.TimeField()
def get_next(self, now):
dow = now.weekday()
td = datetime.timedelta(days=(self.day - dow) % 7)
next_date = now + td
return datetime.datetime.combine(next_date, self.time)
class Signup(models.Model):
ATTENDING = 0
NOT_ATTENDING = 1
status_choices = (
( ATTENDING , "I'm In", ),
( NOT_ATTENDING, "I'm Out", ),
)
event = models.ForeignKey(Event, related_name="signups")
date = models.DateField()
name = models.CharField(max_length=100)
status= models.IntegerField(choices=status_choices, blank=False, default=ATTENDING)
def hash(self):
return hash(self.pk)
class Comment(models.Model):
class Meta:
ordering = ["-timestamp"]
event = models.ForeignKey(Event, related_name="comments")
name = models.CharField(max_length=100)
comment = models.TextField()
timestamp = models.DateTimeField(auto_now_add=True)
|
dirjud/pickup
|
event/models.py
|
Python
|
gpl-2.0
| 3,032 | 0.01715 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('www', '0010_expo_info_url'),
]
operations = [
migrations.RenameField(
model_name='expo',
old_name='info_url',
new_name='url',
),
]
|
themaxx75/lapare-bijoux
|
lapare.ca/lapare/apps/www/migrations/0011_auto_20151022_2037.py
|
Python
|
bsd-3-clause
| 375 | 0 |
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Urls(models.Model):
longurl = models.CharField(max_length=256)
shorturl = models.CharField(max_length=128)
|
rodrigobersan/X-Serv-18.1-Practica1
|
project/acorta/models.py
|
Python
|
gpl-2.0
| 219 | 0.004566 |
import RoleManagement
import bot_logger
import purger
async def run_op(client, message, bot_log):
levels = {
'admin': ['admin'],
'high': ['admin', 'moderator', 'panda bat'],
'medium': ['trial moderator', 'moderator', 'admin', 'panda bat'],
'low': ['@everyone']
}
ops = {'+': [RoleManagement.assign_role, 'low'],
'-': [RoleManagement.remove_role, 'low'],
'reduce': [RoleManagement.reduce_roles, 'high'],
'timein': [RoleManagement.timein_user, 'medium'],
'timeout': [RoleManagement.timeout_user, 'medium'],
'verify': [RoleManagement.verify_rank, 'low'],
'purge': [purger.purge_channel, 'high'],
'count': [RoleManagement.count_users, 'medium'],
}
# unwrap message into operation and arguments
operation = message.content[1:]
try:
operation, _ = operation.split(maxsplit=1)
except ValueError:
if operation == 'purge':
pass
else:
return None
# check if operation exists
if operation in ops.keys():
op = ops[operation]
else:
return None
success = False
required_roles = levels[op[1]]
for r in message.author.roles:
if r.name.lower() in required_roles:
await op[0](client, message, bot_log)
success = True
break
if success is not True:
client.send_message(message.channel,
"Failed running `{}`".format(operation))
|
alexandergraul/pvs-bot
|
launcher.py
|
Python
|
gpl-3.0
| 1,561 | 0.000641 |
# -*- coding: utf-8 -*-
"""
14. Using a custom primary key
By default, Django adds an ``"id"`` field to each model. But you can override
this behavior by explicitly adding ``primary_key=True`` to a field.
"""
from django.conf import settings
from django.db import models, transaction, IntegrityError
from fields import MyAutoField
class Employee(models.Model):
employee_code = models.IntegerField(primary_key=True, db_column = 'code')
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
class Meta:
ordering = ('last_name', 'first_name')
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
class Business(models.Model):
name = models.CharField(max_length=20, primary_key=True)
employees = models.ManyToManyField(Employee)
class Meta:
verbose_name_plural = 'businesses'
def __unicode__(self):
return self.name
class Bar(models.Model):
id = MyAutoField(primary_key=True, db_index=True)
def __unicode__(self):
return repr(self.pk)
class Foo(models.Model):
bar = models.ForeignKey(Bar)
__test__ = {'API_TESTS':"""
>>> dan = Employee(employee_code=123, first_name='Dan', last_name='Jones')
>>> dan.save()
>>> Employee.objects.all()
[<Employee: Dan Jones>]
>>> fran = Employee(employee_code=456, first_name='Fran', last_name='Bones')
>>> fran.save()
>>> Employee.objects.all()
[<Employee: Fran Bones>, <Employee: Dan Jones>]
>>> Employee.objects.get(pk=123)
<Employee: Dan Jones>
>>> Employee.objects.get(pk=456)
<Employee: Fran Bones>
>>> Employee.objects.get(pk=42)
Traceback (most recent call last):
...
DoesNotExist: Employee matching query does not exist.
# Use the name of the primary key, rather than pk.
>>> Employee.objects.get(employee_code__exact=123)
<Employee: Dan Jones>
# pk can be used as a substitute for the primary key.
>>> Employee.objects.filter(pk__in=[123, 456])
[<Employee: Fran Bones>, <Employee: Dan Jones>]
# The primary key can be accessed via the pk property on the model.
>>> e = Employee.objects.get(pk=123)
>>> e.pk
123
# Or we can use the real attribute name for the primary key:
>>> e.employee_code
123
# Fran got married and changed her last name.
>>> fran = Employee.objects.get(pk=456)
>>> fran.last_name = 'Jones'
>>> fran.save()
>>> Employee.objects.filter(last_name__exact='Jones')
[<Employee: Dan Jones>, <Employee: Fran Jones>]
>>> emps = Employee.objects.in_bulk([123, 456])
>>> emps[123]
<Employee: Dan Jones>
>>> b = Business(name='Sears')
>>> b.save()
>>> b.employees.add(dan, fran)
>>> b.employees.all()
[<Employee: Dan Jones>, <Employee: Fran Jones>]
>>> fran.business_set.all()
[<Business: Sears>]
>>> Business.objects.in_bulk(['Sears'])
{u'Sears': <Business: Sears>}
>>> Business.objects.filter(name__exact='Sears')
[<Business: Sears>]
>>> Business.objects.filter(pk='Sears')
[<Business: Sears>]
# Queries across tables, involving primary key
>>> Employee.objects.filter(business__name__exact='Sears')
[<Employee: Dan Jones>, <Employee: Fran Jones>]
>>> Employee.objects.filter(business__pk='Sears')
[<Employee: Dan Jones>, <Employee: Fran Jones>]
>>> Business.objects.filter(employees__employee_code__exact=123)
[<Business: Sears>]
>>> Business.objects.filter(employees__pk=123)
[<Business: Sears>]
>>> Business.objects.filter(employees__first_name__startswith='Fran')
[<Business: Sears>]
# Primary key may be unicode string
>>> bus = Business(name=u'jaźń')
>>> bus.save()
# The primary key must also obviously be unique, so trying to create a new
# object with the same primary key will fail.
>>> try:
... sid = transaction.savepoint()
... Employee.objects.create(employee_code=123, first_name='Fred', last_name='Jones')
... transaction.savepoint_commit(sid)
... except Exception, e:
... if isinstance(e, IntegrityError):
... transaction.savepoint_rollback(sid)
... print "Pass"
... else:
... print "Fail with %s" % type(e)
Pass
# Regression for #10785 -- Custom fields can be used for primary keys.
>>> new_bar = Bar.objects.create()
>>> new_foo = Foo.objects.create(bar=new_bar)
# FIXME: This still doesn't work, but will require some changes in
# get_db_prep_lookup to fix it.
# >>> f = Foo.objects.get(bar=new_bar.pk)
# >>> f == new_foo
# True
# >>> f.bar == new_bar
# True
>>> f = Foo.objects.get(bar=new_bar)
>>> f == new_foo
True
>>> f.bar == new_bar
True
"""}
# SQLite lets objects be saved with an empty primary key, even though an
# integer is expected. So we can't check for an error being raised in that case
# for SQLite. Remove it from the suite for this next bit.
if settings.DATABASE_ENGINE != 'sqlite3':
__test__["API_TESTS"] += """
# The primary key must be specified, so an error is raised if you try to create
# an object without it.
>>> try:
... sid = transaction.savepoint()
... Employee.objects.create(first_name='Tom', last_name='Smith')
... print 'hello'
... transaction.savepoint_commit(sid)
... print 'hello2'
... except Exception, e:
... if isinstance(e, IntegrityError):
... transaction.savepoint_rollback(sid)
... print "Pass"
... else:
... print "Fail with %s" % type(e)
Pass
"""
|
grangier/django-11599
|
tests/modeltests/custom_pk/models.py
|
Python
|
bsd-3-clause
| 5,234 | 0.001911 |
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from neutronclient.common import exceptions as neutron_exc
from openstack_dashboard.api import keystone
from openstack_dashboard.api import network
from openstack_dashboard.api import nova
from openstack_dashboard.dashboards.project.access_and_security.\
api_access.tables import EndpointsTable
from openstack_dashboard.dashboards.project.access_and_security.\
floating_ips.tables import FloatingIPsTable
from openstack_dashboard.dashboards.project.access_and_security.\
keypairs.tables import KeypairsTable
from openstack_dashboard.dashboards.project.access_and_security.\
security_groups.tables import SecurityGroupsTable
class SecurityGroupsTab(tabs.TableTab):
table_classes = (SecurityGroupsTable,)
name = _("Security Groups")
slug = "security_groups_tab"
template_name = "horizon/common/_detail_table.html"
permissions = ('openstack.services.compute',)
def get_security_groups_data(self):
try:
security_groups = network.security_group_list(self.request)
except neutron_exc.ConnectionFailed:
security_groups = []
exceptions.handle(self.request)
except Exception:
security_groups = []
exceptions.handle(self.request,
_('Unable to retrieve security groups.'))
return sorted(security_groups, key=lambda group: group.name)
class KeypairsTab(tabs.TableTab):
table_classes = (KeypairsTable,)
name = _("Key Pairs")
slug = "keypairs_tab"
template_name = "horizon/common/_detail_table.html"
permissions = ('openstack.services.compute',)
def get_keypairs_data(self):
try:
keypairs = nova.keypair_list(self.request)
except Exception:
keypairs = []
exceptions.handle(self.request,
_('Unable to retrieve key pair list.'))
return keypairs
class FloatingIPsTab(tabs.TableTab):
table_classes = (FloatingIPsTable,)
name = _("Floating IPs")
slug = "floating_ips_tab"
template_name = "horizon/common/_detail_table.html"
permissions = ('openstack.services.compute',)
def get_floating_ips_data(self):
try:
floating_ips = network.tenant_floating_ip_list(self.request)
except neutron_exc.ConnectionFailed:
floating_ips = []
exceptions.handle(self.request)
except Exception:
floating_ips = []
exceptions.handle(self.request,
_('Unable to retrieve floating IP addresses.'))
try:
floating_ip_pools = network.floating_ip_pools_list(self.request)
except neutron_exc.ConnectionFailed:
floating_ip_pools = []
exceptions.handle(self.request)
except Exception:
floating_ip_pools = []
exceptions.handle(self.request,
_('Unable to retrieve floating IP pools.'))
pool_dict = dict([(obj.id, obj.name) for obj in floating_ip_pools])
instances = []
try:
instances, has_more = nova.server_list(self.request)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve instance list.'))
instances_dict = dict([(obj.id, obj.name) for obj in instances])
for ip in floating_ips:
ip.instance_name = instances_dict.get(ip.instance_id)
ip.pool_name = pool_dict.get(ip.pool, ip.pool)
return floating_ips
def allowed(self, request):
return network.floating_ip_supported(request)
class APIAccessTab(tabs.TableTab):
table_classes = (EndpointsTable,)
name = _("API Access")
slug = "api_access_tab"
template_name = "horizon/common/_detail_table.html"
def get_endpoints_data(self):
services = []
for i, service in enumerate(self.request.user.service_catalog):
service['id'] = i
services.append(
keystone.Service(service, self.request.user.services_region))
return services
class AccessAndSecurityTabs(tabs.TabGroup):
slug = "access_security_tabs"
tabs = (SecurityGroupsTab, KeypairsTab, FloatingIPsTab, APIAccessTab)
sticky = True
|
takeshineshiro/horizon
|
openstack_dashboard/dashboards/project/access_and_security/tabs.py
|
Python
|
apache-2.0
| 5,203 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class PoolEvaluateAutoScaleParameter(Model):
"""Options for evaluating an automatic scaling formula on a pool.
:param auto_scale_formula: The formula for the desired number of compute
nodes in the pool. The formula is validated and its results calculated,
but it is not applied to the pool. To apply the formula to the pool,
'Enable automatic scaling on a pool'. For more information about
specifying this formula, see Automatically scale compute nodes in an Azure
Batch pool
(https://azure.microsoft.com/en-us/documentation/articles/batch-automatic-scaling).
:type auto_scale_formula: str
"""
_validation = {
'auto_scale_formula': {'required': True},
}
_attribute_map = {
'auto_scale_formula': {'key': 'autoScaleFormula', 'type': 'str'},
}
def __init__(self, auto_scale_formula):
super(PoolEvaluateAutoScaleParameter, self).__init__()
self.auto_scale_formula = auto_scale_formula
|
lmazuel/azure-sdk-for-python
|
azure-batch/azure/batch/models/pool_evaluate_auto_scale_parameter.py
|
Python
|
mit
| 1,498 | 0 |
def deposit(materials, life, sea, climate):
contributions = []
depositkeys = set()
for m in materials:
t = 0
i = len(m.substance) - 1
sources = []
keys = set()
while t < m.total:
dt = m.total - t
layer = m.substance[i]
if layer['thickness'] >= dt:
sources.append({ 'rock': layer['rock'], 'thickness': dt })
else:
sources.append(layer)
keys = keys.union(sources[-1]['rock'].keys())
t += sources[-1]['thickness']
i -= 1
rock = { 'type': 'S', 'name': 'S' }
for k in keys:
if k not in rock:
# weight attributes by thickness
rock[k] = sum([float(s['thickness']) * s['rock'][k]
if k in s['rock'] else 0
for s in sources])/m.total
depositkeys = depositkeys.union(rock.keys())
contributions.append({ 'rock': rock, 'thickness': m.amount })
rock = { 'type': 'S', 'toughness': 0, 'name': None }
thickness = sum([c['thickness'] for c in contributions])
for k in depositkeys:
if k not in rock:
# weight attributes by thickness
rock[k] = sum([float(c['thickness']) * c['rock'][k]
if k in c['rock'] else 0
for c in contributions])/thickness
rock['clasticity'] = rock['clasticity'] * 2 if 'clasticity' in rock else 1
if life:
if sea:
rock['calcity'] = max(0, min(1, float(climate.temperature - 18)/25))
if rock['calcity'] > 0.99:
rock['name'] = 'chalk'
elif rock['calcity'] > 0.75:
rock['name'] = 'limestone'
elif climate.koeppen[0] == u'C' and climate.temperature < 18:
rock['bogginess'] = max(0, (climate.precipitation - 0.75) * 4)
if rock['name'] is None:
grain = 1e-3/float(rock['clasticity'])
if grain < 4e-6:
name = 'claystone'
elif grain < 60e-6:
name = 'siltstone'
elif grain < 2e-3:
name = 'sandstone'
else:
name = 'conglomerate'
rock['name'] = name
return { 'rock': rock, 'thickness': thickness }
|
tps12/Tec-Nine
|
rock/sedimentary.py
|
Python
|
gpl-3.0
| 2,309 | 0.004764 |
"""Unit tests for the io module."""
# Tests of io are scattered over the test suite:
# * test_bufio - tests file buffering
# * test_memoryio - tests BytesIO and StringIO
# * test_fileio - tests FileIO
# * test_file - tests the file interface
# * test_io - tests everything else in the io module
# * test_univnewlines - tests universal newline support
# * test_largefile - tests operations on a file greater than 2**32 bytes
# (only enabled with -ulargefile)
################################################################################
# ATTENTION TEST WRITERS!!!
################################################################################
# When writing tests for io, it's important to test both the C and Python
# implementations. This is usually done by writing a base test that refers to
# the type it is testing as a attribute. Then it provides custom subclasses to
# test both implementations. This file has lots of examples.
################################################################################
from __future__ import print_function
from __future__ import unicode_literals
import os
import sys
import time
import array
import random
import unittest
import weakref
import warnings
import abc
import signal
import errno
from itertools import cycle, count
from collections import deque
from UserList import UserList
from test import test_support as support
import contextlib
import codecs
import io # C implementation of io
import _pyio as pyio # Python implementation of io
try:
import threading
except ImportError:
threading = None
try:
import fcntl
except ImportError:
fcntl = None
__metaclass__ = type
bytes = support.py3k_bytes
def _default_chunk_size():
"""Get the default TextIOWrapper chunk size"""
with io.open(__file__, "r", encoding="latin1") as f:
return f._CHUNK_SIZE
class MockRawIOWithoutRead:
"""A RawIO implementation without read(), so as to exercise the default
RawIO.read() which calls readinto()."""
def __init__(self, read_stack=()):
self._read_stack = list(read_stack)
self._write_stack = []
self._reads = 0
self._extraneous_reads = 0
def write(self, b):
self._write_stack.append(bytes(b))
return len(b)
def writable(self):
return True
def fileno(self):
return 42
def readable(self):
return True
def seekable(self):
return True
def seek(self, pos, whence):
return 0 # wrong but we gotta return something
def tell(self):
return 0 # same comment as above
def readinto(self, buf):
self._reads += 1
max_len = len(buf)
try:
data = self._read_stack[0]
except IndexError:
self._extraneous_reads += 1
return 0
if data is None:
del self._read_stack[0]
return None
n = len(data)
if len(data) <= max_len:
del self._read_stack[0]
buf[:n] = data
return n
else:
buf[:] = data[:max_len]
self._read_stack[0] = data[max_len:]
return max_len
def truncate(self, pos=None):
return pos
class CMockRawIOWithoutRead(MockRawIOWithoutRead, io.RawIOBase):
pass
class PyMockRawIOWithoutRead(MockRawIOWithoutRead, pyio.RawIOBase):
pass
class MockRawIO(MockRawIOWithoutRead):
def read(self, n=None):
self._reads += 1
try:
return self._read_stack.pop(0)
except:
self._extraneous_reads += 1
return b""
class CMockRawIO(MockRawIO, io.RawIOBase):
pass
class PyMockRawIO(MockRawIO, pyio.RawIOBase):
pass
class MisbehavedRawIO(MockRawIO):
def write(self, b):
return MockRawIO.write(self, b) * 2
def read(self, n=None):
return MockRawIO.read(self, n) * 2
def seek(self, pos, whence):
return -123
def tell(self):
return -456
def readinto(self, buf):
MockRawIO.readinto(self, buf)
return len(buf) * 5
class CMisbehavedRawIO(MisbehavedRawIO, io.RawIOBase):
pass
class PyMisbehavedRawIO(MisbehavedRawIO, pyio.RawIOBase):
pass
class CloseFailureIO(MockRawIO):
closed = 0
def close(self):
if not self.closed:
self.closed = 1
raise IOError
class CCloseFailureIO(CloseFailureIO, io.RawIOBase):
pass
class PyCloseFailureIO(CloseFailureIO, pyio.RawIOBase):
pass
class MockFileIO:
def __init__(self, data):
self.read_history = []
super(MockFileIO, self).__init__(data)
def read(self, n=None):
res = super(MockFileIO, self).read(n)
self.read_history.append(None if res is None else len(res))
return res
def readinto(self, b):
res = super(MockFileIO, self).readinto(b)
self.read_history.append(res)
return res
class CMockFileIO(MockFileIO, io.BytesIO):
pass
class PyMockFileIO(MockFileIO, pyio.BytesIO):
pass
class MockNonBlockWriterIO:
def __init__(self):
self._write_stack = []
self._blocker_char = None
def pop_written(self):
s = b"".join(self._write_stack)
self._write_stack[:] = []
return s
def block_on(self, char):
"""Block when a given char is encountered."""
self._blocker_char = char
def readable(self):
return True
def seekable(self):
return True
def writable(self):
return True
def write(self, b):
b = bytes(b)
n = -1
if self._blocker_char:
try:
n = b.index(self._blocker_char)
except ValueError:
pass
else:
if n > 0:
# write data up to the first blocker
self._write_stack.append(b[:n])
return n
else:
# cancel blocker and indicate would block
self._blocker_char = None
return None
self._write_stack.append(b)
return len(b)
class CMockNonBlockWriterIO(MockNonBlockWriterIO, io.RawIOBase):
BlockingIOError = io.BlockingIOError
class PyMockNonBlockWriterIO(MockNonBlockWriterIO, pyio.RawIOBase):
BlockingIOError = pyio.BlockingIOError
class IOTest(unittest.TestCase):
def setUp(self):
support.unlink(support.TESTFN)
def tearDown(self):
support.unlink(support.TESTFN)
def write_ops(self, f):
self.assertEqual(f.write(b"blah."), 5)
f.truncate(0)
self.assertEqual(f.tell(), 5)
f.seek(0)
self.assertEqual(f.write(b"blah."), 5)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(b"Hello."), 6)
self.assertEqual(f.tell(), 6)
self.assertEqual(f.seek(-1, 1), 5)
self.assertEqual(f.tell(), 5)
self.assertEqual(f.write(bytearray(b" world\n\n\n")), 9)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(b"h"), 1)
self.assertEqual(f.seek(-1, 2), 13)
self.assertEqual(f.tell(), 13)
self.assertEqual(f.truncate(12), 12)
self.assertEqual(f.tell(), 13)
self.assertRaises(TypeError, f.seek, 0.0)
def read_ops(self, f, buffered=False):
data = f.read(5)
self.assertEqual(data, b"hello")
data = bytearray(data)
self.assertEqual(f.readinto(data), 5)
self.assertEqual(data, b" worl")
self.assertEqual(f.readinto(data), 2)
self.assertEqual(len(data), 5)
self.assertEqual(data[:2], b"d\n")
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.read(20), b"hello world\n")
self.assertEqual(f.read(1), b"")
self.assertEqual(f.readinto(bytearray(b"x")), 0)
self.assertEqual(f.seek(-6, 2), 6)
self.assertEqual(f.read(5), b"world")
self.assertEqual(f.read(0), b"")
self.assertEqual(f.readinto(bytearray()), 0)
self.assertEqual(f.seek(-6, 1), 5)
self.assertEqual(f.read(5), b" worl")
self.assertEqual(f.tell(), 10)
self.assertRaises(TypeError, f.seek, 0.0)
if buffered:
f.seek(0)
self.assertEqual(f.read(), b"hello world\n")
f.seek(6)
self.assertEqual(f.read(), b"world\n")
self.assertEqual(f.read(), b"")
LARGE = 2**31
def large_file_ops(self, f):
assert f.readable()
assert f.writable()
self.assertEqual(f.seek(self.LARGE), self.LARGE)
self.assertEqual(f.tell(), self.LARGE)
self.assertEqual(f.write(b"xxx"), 3)
self.assertEqual(f.tell(), self.LARGE + 3)
self.assertEqual(f.seek(-1, 1), self.LARGE + 2)
self.assertEqual(f.truncate(), self.LARGE + 2)
self.assertEqual(f.tell(), self.LARGE + 2)
self.assertEqual(f.seek(0, 2), self.LARGE + 2)
self.assertEqual(f.truncate(self.LARGE + 1), self.LARGE + 1)
self.assertEqual(f.tell(), self.LARGE + 2)
self.assertEqual(f.seek(0, 2), self.LARGE + 1)
self.assertEqual(f.seek(-1, 2), self.LARGE)
self.assertEqual(f.read(2), b"x")
def test_invalid_operations(self):
# Try writing on a file opened in read mode and vice-versa.
for mode in ("w", "wb"):
with self.open(support.TESTFN, mode) as fp:
self.assertRaises(IOError, fp.read)
self.assertRaises(IOError, fp.readline)
with self.open(support.TESTFN, "rb") as fp:
self.assertRaises(IOError, fp.write, b"blah")
self.assertRaises(IOError, fp.writelines, [b"blah\n"])
with self.open(support.TESTFN, "r") as fp:
self.assertRaises(IOError, fp.write, "blah")
self.assertRaises(IOError, fp.writelines, ["blah\n"])
def test_raw_file_io(self):
with self.open(support.TESTFN, "wb", buffering=0) as f:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.write_ops(f)
with self.open(support.TESTFN, "rb", buffering=0) as f:
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
self.read_ops(f)
def test_buffered_file_io(self):
with self.open(support.TESTFN, "wb") as f:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.write_ops(f)
with self.open(support.TESTFN, "rb") as f:
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
self.read_ops(f, True)
def test_readline(self):
with self.open(support.TESTFN, "wb") as f:
f.write(b"abc\ndef\nxyzzy\nfoo\x00bar\nanother line")
with self.open(support.TESTFN, "rb") as f:
self.assertEqual(f.readline(), b"abc\n")
self.assertEqual(f.readline(10), b"def\n")
self.assertEqual(f.readline(2), b"xy")
self.assertEqual(f.readline(4), b"zzy\n")
self.assertEqual(f.readline(), b"foo\x00bar\n")
self.assertEqual(f.readline(None), b"another line")
self.assertRaises(TypeError, f.readline, 5.3)
with self.open(support.TESTFN, "r") as f:
self.assertRaises(TypeError, f.readline, 5.3)
def test_raw_bytes_io(self):
f = self.BytesIO()
self.write_ops(f)
data = f.getvalue()
self.assertEqual(data, b"hello world\n")
f = self.BytesIO(data)
self.read_ops(f, True)
def test_large_file_ops(self):
# On Windows and Mac OSX this test comsumes large resources; It takes
# a long time to build the >2GB file and takes >2GB of disk space
# therefore the resource must be enabled to run this test.
if sys.platform[:3] == 'win' or sys.platform == 'darwin':
support.requires(
'largefile',
'test requires %s bytes and a long time to run' % self.LARGE)
with self.open(support.TESTFN, "w+b", 0) as f:
self.large_file_ops(f)
with self.open(support.TESTFN, "w+b") as f:
self.large_file_ops(f)
def test_with_open(self):
for bufsize in (0, 1, 100):
f = None
with self.open(support.TESTFN, "wb", bufsize) as f:
f.write(b"xxx")
self.assertEqual(f.closed, True)
f = None
try:
with self.open(support.TESTFN, "wb", bufsize) as f:
1 // 0
except ZeroDivisionError:
self.assertEqual(f.closed, True)
else:
self.fail("1 // 0 didn't raise an exception")
# issue 5008
def test_append_mode_tell(self):
with self.open(support.TESTFN, "wb") as f:
f.write(b"xxx")
with self.open(support.TESTFN, "ab", buffering=0) as f:
self.assertEqual(f.tell(), 3)
with self.open(support.TESTFN, "ab") as f:
self.assertEqual(f.tell(), 3)
with self.open(support.TESTFN, "a") as f:
self.assertTrue(f.tell() > 0)
def test_destructor(self):
record = []
class MyFileIO(self.FileIO):
def __del__(self):
record.append(1)
try:
f = super(MyFileIO, self).__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super(MyFileIO, self).close()
def flush(self):
record.append(3)
super(MyFileIO, self).flush()
f = MyFileIO(support.TESTFN, "wb")
f.write(b"xxx")
del f
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
with self.open(support.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"xxx")
def _check_base_destructor(self, base):
record = []
class MyIO(base):
def __init__(self):
# This exercises the availability of attributes on object
# destruction.
# (in the C version, close() is called by the tp_dealloc
# function, not by __del__)
self.on_del = 1
self.on_close = 2
self.on_flush = 3
def __del__(self):
record.append(self.on_del)
try:
f = super(MyIO, self).__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(self.on_close)
super(MyIO, self).close()
def flush(self):
record.append(self.on_flush)
super(MyIO, self).flush()
f = MyIO()
del f
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
def test_IOBase_destructor(self):
self._check_base_destructor(self.IOBase)
def test_RawIOBase_destructor(self):
self._check_base_destructor(self.RawIOBase)
def test_BufferedIOBase_destructor(self):
self._check_base_destructor(self.BufferedIOBase)
def test_TextIOBase_destructor(self):
self._check_base_destructor(self.TextIOBase)
def test_close_flushes(self):
with self.open(support.TESTFN, "wb") as f:
f.write(b"xxx")
with self.open(support.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"xxx")
def test_array_writes(self):
a = array.array(b'i', range(10))
n = len(a.tostring())
with self.open(support.TESTFN, "wb", 0) as f:
self.assertEqual(f.write(a), n)
with self.open(support.TESTFN, "wb") as f:
self.assertEqual(f.write(a), n)
def test_closefd(self):
self.assertRaises(ValueError, self.open, support.TESTFN, 'w',
closefd=False)
def test_read_closed(self):
with self.open(support.TESTFN, "w") as f:
f.write("egg\n")
with self.open(support.TESTFN, "r") as f:
file = self.open(f.fileno(), "r", closefd=False)
self.assertEqual(file.read(), "egg\n")
file.seek(0)
file.close()
self.assertRaises(ValueError, file.read)
def test_no_closefd_with_filename(self):
# can't use closefd in combination with a file name
self.assertRaises(ValueError, self.open, support.TESTFN, "r", closefd=False)
def test_closefd_attr(self):
with self.open(support.TESTFN, "wb") as f:
f.write(b"egg\n")
with self.open(support.TESTFN, "r") as f:
self.assertEqual(f.buffer.raw.closefd, True)
file = self.open(f.fileno(), "r", closefd=False)
self.assertEqual(file.buffer.raw.closefd, False)
def test_garbage_collection(self):
# FileIO objects are collected, and collecting them flushes
# all data to disk.
f = self.FileIO(support.TESTFN, "wb")
f.write(b"abcxxx")
f.f = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertTrue(wr() is None, wr)
with self.open(support.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"abcxxx")
def test_unbounded_file(self):
# Issue #1174606: reading from an unbounded stream such as /dev/zero.
zero = "/dev/zero"
if not os.path.exists(zero):
self.skipTest("{0} does not exist".format(zero))
if sys.maxsize > 0x7FFFFFFF:
self.skipTest("test can only run in a 32-bit address space")
if support.real_max_memuse < support._2G:
self.skipTest("test requires at least 2GB of memory")
with self.open(zero, "rb", buffering=0) as f:
self.assertRaises(OverflowError, f.read)
with self.open(zero, "rb") as f:
self.assertRaises(OverflowError, f.read)
with self.open(zero, "r") as f:
self.assertRaises(OverflowError, f.read)
def check_flush_error_on_close(self, *args, **kwargs):
# Test that the file is closed despite failed flush
# and that flush() is called before file closed.
f = self.open(*args, **kwargs)
closed = []
def bad_flush():
closed[:] = [f.closed]
raise IOError()
f.flush = bad_flush
self.assertRaises(IOError, f.close) # exception not swallowed
self.assertTrue(f.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
f.flush = lambda: None # break reference loop
def test_flush_error_on_close(self):
# raw file
# Issue #5700: io.FileIO calls flush() after file closed
self.check_flush_error_on_close(support.TESTFN, 'wb', buffering=0)
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', buffering=0)
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', buffering=0, closefd=False)
os.close(fd)
# buffered io
self.check_flush_error_on_close(support.TESTFN, 'wb')
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb')
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', closefd=False)
os.close(fd)
# text io
self.check_flush_error_on_close(support.TESTFN, 'w')
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'w')
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'w', closefd=False)
os.close(fd)
def test_multi_close(self):
f = self.open(support.TESTFN, "wb", buffering=0)
f.close()
f.close()
f.close()
self.assertRaises(ValueError, f.flush)
def test_RawIOBase_read(self):
# Exercise the default RawIOBase.read() implementation (which calls
# readinto() internally).
rawio = self.MockRawIOWithoutRead((b"abc", b"d", None, b"efg", None))
self.assertEqual(rawio.read(2), b"ab")
self.assertEqual(rawio.read(2), b"c")
self.assertEqual(rawio.read(2), b"d")
self.assertEqual(rawio.read(2), None)
self.assertEqual(rawio.read(2), b"ef")
self.assertEqual(rawio.read(2), b"g")
self.assertEqual(rawio.read(2), None)
self.assertEqual(rawio.read(2), b"")
def test_fileio_closefd(self):
# Issue #4841
with self.open(__file__, 'rb') as f1, \
self.open(__file__, 'rb') as f2:
fileio = self.FileIO(f1.fileno(), closefd=False)
# .__init__() must not close f1
fileio.__init__(f2.fileno(), closefd=False)
f1.readline()
# .close() must not close f2
fileio.close()
f2.readline()
def test_nonbuffered_textio(self):
with warnings.catch_warnings(record=True) as recorded:
with self.assertRaises(ValueError):
self.open(support.TESTFN, 'w', buffering=0)
support.gc_collect()
self.assertEqual(recorded, [])
def test_invalid_newline(self):
with warnings.catch_warnings(record=True) as recorded:
with self.assertRaises(ValueError):
self.open(support.TESTFN, 'w', newline='invalid')
support.gc_collect()
self.assertEqual(recorded, [])
class CIOTest(IOTest):
def test_IOBase_finalize(self):
# Issue #12149: segmentation fault on _PyIOBase_finalize when both a
# class which inherits IOBase and an object of this class are caught
# in a reference cycle and close() is already in the method cache.
class MyIO(self.IOBase):
def close(self):
pass
# create an instance to populate the method cache
MyIO()
obj = MyIO()
obj.obj = obj
wr = weakref.ref(obj)
del MyIO
del obj
support.gc_collect()
self.assertTrue(wr() is None, wr)
class PyIOTest(IOTest):
test_array_writes = unittest.skip(
"len(array.array) returns number of elements rather than bytelength"
)(IOTest.test_array_writes)
class CommonBufferedTests:
# Tests common to BufferedReader, BufferedWriter and BufferedRandom
def test_detach(self):
raw = self.MockRawIO()
buf = self.tp(raw)
self.assertIs(buf.detach(), raw)
self.assertRaises(ValueError, buf.detach)
repr(buf) # Should still work
def test_fileno(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertEqual(42, bufio.fileno())
@unittest.skip('test having existential crisis')
def test_no_fileno(self):
# XXX will we always have fileno() function? If so, kill
# this test. Else, write it.
pass
def test_invalid_args(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
# Invalid whence
self.assertRaises(ValueError, bufio.seek, 0, -1)
self.assertRaises(ValueError, bufio.seek, 0, 3)
def test_override_destructor(self):
tp = self.tp
record = []
class MyBufferedIO(tp):
def __del__(self):
record.append(1)
try:
f = super(MyBufferedIO, self).__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super(MyBufferedIO, self).close()
def flush(self):
record.append(3)
super(MyBufferedIO, self).flush()
rawio = self.MockRawIO()
bufio = MyBufferedIO(rawio)
writable = bufio.writable()
del bufio
support.gc_collect()
if writable:
self.assertEqual(record, [1, 2, 3])
else:
self.assertEqual(record, [1, 2])
def test_context_manager(self):
# Test usability as a context manager
rawio = self.MockRawIO()
bufio = self.tp(rawio)
def _with():
with bufio:
pass
_with()
# bufio should now be closed, and using it a second time should raise
# a ValueError.
self.assertRaises(ValueError, _with)
def test_error_through_destructor(self):
# Test that the exception state is not modified by a destructor,
# even if close() fails.
rawio = self.CloseFailureIO()
def f():
self.tp(rawio).xyzzy
with support.captured_output("stderr") as s:
self.assertRaises(AttributeError, f)
s = s.getvalue().strip()
if s:
# The destructor *may* have printed an unraisable error, check it
self.assertEqual(len(s.splitlines()), 1)
self.assertTrue(s.startswith("Exception IOError: "), s)
self.assertTrue(s.endswith(" ignored"), s)
def test_repr(self):
raw = self.MockRawIO()
b = self.tp(raw)
clsname = "%s.%s" % (self.tp.__module__, self.tp.__name__)
self.assertEqual(repr(b), "<%s>" % clsname)
raw.name = "dummy"
self.assertEqual(repr(b), "<%s name=u'dummy'>" % clsname)
raw.name = b"dummy"
self.assertEqual(repr(b), "<%s name='dummy'>" % clsname)
def test_flush_error_on_close(self):
# Test that buffered file is closed despite failed flush
# and that flush() is called before file closed.
raw = self.MockRawIO()
closed = []
def bad_flush():
closed[:] = [b.closed, raw.closed]
raise IOError()
raw.flush = bad_flush
b = self.tp(raw)
self.assertRaises(IOError, b.close) # exception not swallowed
self.assertTrue(b.closed)
self.assertTrue(raw.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
self.assertFalse(closed[1])
raw.flush = lambda: None # break reference loop
def test_close_error_on_close(self):
raw = self.MockRawIO()
def bad_flush():
raise IOError('flush')
def bad_close():
raise IOError('close')
raw.close = bad_close
b = self.tp(raw)
b.flush = bad_flush
with self.assertRaises(IOError) as err: # exception not swallowed
b.close()
self.assertEqual(err.exception.args, ('close',))
self.assertFalse(b.closed)
def test_multi_close(self):
raw = self.MockRawIO()
b = self.tp(raw)
b.close()
b.close()
b.close()
self.assertRaises(ValueError, b.flush)
def test_readonly_attributes(self):
raw = self.MockRawIO()
buf = self.tp(raw)
x = self.MockRawIO()
with self.assertRaises((AttributeError, TypeError)):
buf.raw = x
class SizeofTest:
@support.cpython_only
def test_sizeof(self):
bufsize1 = 4096
bufsize2 = 8192
rawio = self.MockRawIO()
bufio = self.tp(rawio, buffer_size=bufsize1)
size = sys.getsizeof(bufio) - bufsize1
rawio = self.MockRawIO()
bufio = self.tp(rawio, buffer_size=bufsize2)
self.assertEqual(sys.getsizeof(bufio), size + bufsize2)
class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
read_mode = "rb"
def test_constructor(self):
rawio = self.MockRawIO([b"abc"])
bufio = self.tp(rawio)
bufio.__init__(rawio)
bufio.__init__(rawio, buffer_size=1024)
bufio.__init__(rawio, buffer_size=16)
self.assertEqual(b"abc", bufio.read())
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
rawio = self.MockRawIO([b"abc"])
bufio.__init__(rawio)
self.assertEqual(b"abc", bufio.read())
def test_uninitialized(self):
bufio = self.tp.__new__(self.tp)
del bufio
bufio = self.tp.__new__(self.tp)
self.assertRaisesRegexp((ValueError, AttributeError),
'uninitialized|has no attribute',
bufio.read, 0)
bufio.__init__(self.MockRawIO())
self.assertEqual(bufio.read(0), b'')
def test_read(self):
for arg in (None, 7):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read(arg))
# Invalid args
self.assertRaises(ValueError, bufio.read, -2)
def test_read1(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"a", bufio.read(1))
self.assertEqual(b"b", bufio.read1(1))
self.assertEqual(rawio._reads, 1)
self.assertEqual(b"c", bufio.read1(100))
self.assertEqual(rawio._reads, 1)
self.assertEqual(b"d", bufio.read1(100))
self.assertEqual(rawio._reads, 2)
self.assertEqual(b"efg", bufio.read1(100))
self.assertEqual(rawio._reads, 3)
self.assertEqual(b"", bufio.read1(100))
self.assertEqual(rawio._reads, 4)
# Invalid args
self.assertRaises(ValueError, bufio.read1, -1)
def test_readinto(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
b = bytearray(2)
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"ab")
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"cd")
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"ef")
self.assertEqual(bufio.readinto(b), 1)
self.assertEqual(b, b"gf")
self.assertEqual(bufio.readinto(b), 0)
self.assertEqual(b, b"gf")
def test_readlines(self):
def bufio():
rawio = self.MockRawIO((b"abc\n", b"d\n", b"ef"))
return self.tp(rawio)
self.assertEqual(bufio().readlines(), [b"abc\n", b"d\n", b"ef"])
self.assertEqual(bufio().readlines(5), [b"abc\n", b"d\n"])
self.assertEqual(bufio().readlines(None), [b"abc\n", b"d\n", b"ef"])
def test_buffering(self):
data = b"abcdefghi"
dlen = len(data)
tests = [
[ 100, [ 3, 1, 4, 8 ], [ dlen, 0 ] ],
[ 100, [ 3, 3, 3], [ dlen ] ],
[ 4, [ 1, 2, 4, 2 ], [ 4, 4, 1 ] ],
]
for bufsize, buf_read_sizes, raw_read_sizes in tests:
rawio = self.MockFileIO(data)
bufio = self.tp(rawio, buffer_size=bufsize)
pos = 0
for nbytes in buf_read_sizes:
self.assertEqual(bufio.read(nbytes), data[pos:pos+nbytes])
pos += nbytes
# this is mildly implementation-dependent
self.assertEqual(rawio.read_history, raw_read_sizes)
def test_read_non_blocking(self):
# Inject some None's in there to simulate EWOULDBLOCK
rawio = self.MockRawIO((b"abc", b"d", None, b"efg", None, None, None))
bufio = self.tp(rawio)
self.assertEqual(b"abcd", bufio.read(6))
self.assertEqual(b"e", bufio.read(1))
self.assertEqual(b"fg", bufio.read())
self.assertEqual(b"", bufio.peek(1))
self.assertIsNone(bufio.read())
self.assertEqual(b"", bufio.read())
rawio = self.MockRawIO((b"a", None, None))
self.assertEqual(b"a", rawio.readall())
self.assertIsNone(rawio.readall())
def test_read_past_eof(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read(9000))
def test_read_all(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read())
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.requires_resource('cpu')
def test_threads(self):
try:
# Write out many bytes with exactly the same number of 0's,
# 1's... 255's. This will help us check that concurrent reading
# doesn't duplicate or forget contents.
N = 1000
l = list(range(256)) * N
random.shuffle(l)
s = bytes(bytearray(l))
with self.open(support.TESTFN, "wb") as f:
f.write(s)
with self.open(support.TESTFN, self.read_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
errors = []
results = []
def f():
try:
# Intra-buffer read then buffer-flushing read
for n in cycle([1, 19]):
s = bufio.read(n)
if not s:
break
# list.append() is atomic
results.append(s)
except Exception as e:
errors.append(e)
raise
threads = [threading.Thread(target=f) for x in range(20)]
with support.start_threads(threads):
time.sleep(0.02) # yield
self.assertFalse(errors,
"the following exceptions were caught: %r" % errors)
s = b''.join(results)
for i in range(256):
c = bytes(bytearray([i]))
self.assertEqual(s.count(c), N)
finally:
support.unlink(support.TESTFN)
def test_misbehaved_io(self):
rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertRaises(IOError, bufio.seek, 0)
self.assertRaises(IOError, bufio.tell)
def test_no_extraneous_read(self):
# Issue #9550; when the raw IO object has satisfied the read request,
# we should not issue any additional reads, otherwise it may block
# (e.g. socket).
bufsize = 16
for n in (2, bufsize - 1, bufsize, bufsize + 1, bufsize * 2):
rawio = self.MockRawIO([b"x" * n])
bufio = self.tp(rawio, bufsize)
self.assertEqual(bufio.read(n), b"x" * n)
# Simple case: one raw read is enough to satisfy the request.
self.assertEqual(rawio._extraneous_reads, 0,
"failed for {}: {} != 0".format(n, rawio._extraneous_reads))
# A more complex case where two raw reads are needed to satisfy
# the request.
rawio = self.MockRawIO([b"x" * (n - 1), b"x"])
bufio = self.tp(rawio, bufsize)
self.assertEqual(bufio.read(n), b"x" * n)
self.assertEqual(rawio._extraneous_reads, 0,
"failed for {}: {} != 0".format(n, rawio._extraneous_reads))
class CBufferedReaderTest(BufferedReaderTest, SizeofTest):
tp = io.BufferedReader
def test_constructor(self):
BufferedReaderTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
# than 2GB RAM and a 64-bit kernel.
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
def test_initialization(self):
rawio = self.MockRawIO([b"abc"])
bufio = self.tp(rawio)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.read)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.read)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
self.assertRaises(ValueError, bufio.read)
def test_misbehaved_io_read(self):
rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
# _pyio.BufferedReader seems to implement reading different, so that
# checking this is not so easy.
self.assertRaises(IOError, bufio.read, 10)
def test_garbage_collection(self):
# C BufferedReader objects are collected.
# The Python version has __del__, so it ends into gc.garbage instead
rawio = self.FileIO(support.TESTFN, "w+b")
f = self.tp(rawio)
f.f = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertTrue(wr() is None, wr)
def test_args_error(self):
# Issue #17275
with self.assertRaisesRegexp(TypeError, "BufferedReader"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
class PyBufferedReaderTest(BufferedReaderTest):
tp = pyio.BufferedReader
class BufferedWriterTest(unittest.TestCase, CommonBufferedTests):
write_mode = "wb"
def test_constructor(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
bufio.__init__(rawio)
bufio.__init__(rawio, buffer_size=1024)
bufio.__init__(rawio, buffer_size=16)
self.assertEqual(3, bufio.write(b"abc"))
bufio.flush()
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
bufio.__init__(rawio)
self.assertEqual(3, bufio.write(b"ghi"))
bufio.flush()
self.assertEqual(b"".join(rawio._write_stack), b"abcghi")
def test_uninitialized(self):
bufio = self.tp.__new__(self.tp)
del bufio
bufio = self.tp.__new__(self.tp)
self.assertRaisesRegexp((ValueError, AttributeError),
'uninitialized|has no attribute',
bufio.write, b'')
bufio.__init__(self.MockRawIO())
self.assertEqual(bufio.write(b''), 0)
def test_detach_flush(self):
raw = self.MockRawIO()
buf = self.tp(raw)
buf.write(b"howdy!")
self.assertFalse(raw._write_stack)
buf.detach()
self.assertEqual(raw._write_stack, [b"howdy!"])
def test_write(self):
# Write to the buffered IO but don't overflow the buffer.
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
self.assertFalse(writer._write_stack)
def test_write_overflow(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
contents = b"abcdefghijklmnop"
for n in range(0, len(contents), 3):
bufio.write(contents[n:n+3])
flushed = b"".join(writer._write_stack)
# At least (total - 8) bytes were implicitly flushed, perhaps more
# depending on the implementation.
self.assertTrue(flushed.startswith(contents[:-8]), flushed)
def check_writes(self, intermediate_func):
# Lots of writes, test the flushed output is as expected.
contents = bytes(range(256)) * 1000
n = 0
writer = self.MockRawIO()
bufio = self.tp(writer, 13)
# Generator of write sizes: repeat each N 15 times then proceed to N+1
def gen_sizes():
for size in count(1):
for i in range(15):
yield size
sizes = gen_sizes()
while n < len(contents):
size = min(next(sizes), len(contents) - n)
self.assertEqual(bufio.write(contents[n:n+size]), size)
intermediate_func(bufio)
n += size
bufio.flush()
self.assertEqual(contents,
b"".join(writer._write_stack))
def test_writes(self):
self.check_writes(lambda bufio: None)
def test_writes_and_flushes(self):
self.check_writes(lambda bufio: bufio.flush())
def test_writes_and_seeks(self):
def _seekabs(bufio):
pos = bufio.tell()
bufio.seek(pos + 1, 0)
bufio.seek(pos - 1, 0)
bufio.seek(pos, 0)
self.check_writes(_seekabs)
def _seekrel(bufio):
pos = bufio.seek(0, 1)
bufio.seek(+1, 1)
bufio.seek(-1, 1)
bufio.seek(pos, 0)
self.check_writes(_seekrel)
def test_writes_and_truncates(self):
self.check_writes(lambda bufio: bufio.truncate(bufio.tell()))
def test_write_non_blocking(self):
raw = self.MockNonBlockWriterIO()
bufio = self.tp(raw, 8)
self.assertEqual(bufio.write(b"abcd"), 4)
self.assertEqual(bufio.write(b"efghi"), 5)
# 1 byte will be written, the rest will be buffered
raw.block_on(b"k")
self.assertEqual(bufio.write(b"jklmn"), 5)
# 8 bytes will be written, 8 will be buffered and the rest will be lost
raw.block_on(b"0")
try:
bufio.write(b"opqrwxyz0123456789")
except self.BlockingIOError as e:
written = e.characters_written
else:
self.fail("BlockingIOError should have been raised")
self.assertEqual(written, 16)
self.assertEqual(raw.pop_written(),
b"abcdefghijklmnopqrwxyz")
self.assertEqual(bufio.write(b"ABCDEFGHI"), 9)
s = raw.pop_written()
# Previously buffered bytes were flushed
self.assertTrue(s.startswith(b"01234567A"), s)
def test_write_and_rewind(self):
raw = io.BytesIO()
bufio = self.tp(raw, 4)
self.assertEqual(bufio.write(b"abcdef"), 6)
self.assertEqual(bufio.tell(), 6)
bufio.seek(0, 0)
self.assertEqual(bufio.write(b"XY"), 2)
bufio.seek(6, 0)
self.assertEqual(raw.getvalue(), b"XYcdef")
self.assertEqual(bufio.write(b"123456"), 6)
bufio.flush()
self.assertEqual(raw.getvalue(), b"XYcdef123456")
def test_flush(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
bufio.flush()
self.assertEqual(b"abc", writer._write_stack[0])
def test_writelines(self):
l = [b'ab', b'cd', b'ef']
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.writelines(l)
bufio.flush()
self.assertEqual(b''.join(writer._write_stack), b'abcdef')
def test_writelines_userlist(self):
l = UserList([b'ab', b'cd', b'ef'])
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.writelines(l)
bufio.flush()
self.assertEqual(b''.join(writer._write_stack), b'abcdef')
def test_writelines_error(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
self.assertRaises(TypeError, bufio.writelines, [1, 2, 3])
self.assertRaises(TypeError, bufio.writelines, None)
def test_destructor(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
del bufio
support.gc_collect()
self.assertEqual(b"abc", writer._write_stack[0])
def test_truncate(self):
# Truncate implicitly flushes the buffer.
with self.open(support.TESTFN, self.write_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
bufio.write(b"abcdef")
self.assertEqual(bufio.truncate(3), 3)
self.assertEqual(bufio.tell(), 6)
with self.open(support.TESTFN, "rb", buffering=0) as f:
self.assertEqual(f.read(), b"abc")
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.requires_resource('cpu')
def test_threads(self):
try:
# Write out many bytes from many threads and test they were
# all flushed.
N = 1000
contents = bytes(range(256)) * N
sizes = cycle([1, 19])
n = 0
queue = deque()
while n < len(contents):
size = next(sizes)
queue.append(contents[n:n+size])
n += size
del contents
# We use a real file object because it allows us to
# exercise situations where the GIL is released before
# writing the buffer to the raw streams. This is in addition
# to concurrency issues due to switching threads in the middle
# of Python code.
with self.open(support.TESTFN, self.write_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
errors = []
def f():
try:
while True:
try:
s = queue.popleft()
except IndexError:
return
bufio.write(s)
except Exception as e:
errors.append(e)
raise
threads = [threading.Thread(target=f) for x in range(20)]
with support.start_threads(threads):
time.sleep(0.02) # yield
self.assertFalse(errors,
"the following exceptions were caught: %r" % errors)
bufio.close()
with self.open(support.TESTFN, "rb") as f:
s = f.read()
for i in range(256):
self.assertEqual(s.count(bytes([i])), N)
finally:
support.unlink(support.TESTFN)
def test_misbehaved_io(self):
rawio = self.MisbehavedRawIO()
bufio = self.tp(rawio, 5)
self.assertRaises(IOError, bufio.seek, 0)
self.assertRaises(IOError, bufio.tell)
self.assertRaises(IOError, bufio.write, b"abcdef")
def test_max_buffer_size_deprecation(self):
with support.check_warnings(("max_buffer_size is deprecated",
DeprecationWarning)):
self.tp(self.MockRawIO(), 8, 12)
def test_write_error_on_close(self):
raw = self.MockRawIO()
def bad_write(b):
raise IOError()
raw.write = bad_write
b = self.tp(raw)
b.write(b'spam')
self.assertRaises(IOError, b.close) # exception not swallowed
self.assertTrue(b.closed)
class CBufferedWriterTest(BufferedWriterTest, SizeofTest):
tp = io.BufferedWriter
def test_constructor(self):
BufferedWriterTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
# than 2GB RAM and a 64-bit kernel.
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
def test_initialization(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.write, b"def")
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.write, b"def")
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
self.assertRaises(ValueError, bufio.write, b"def")
def test_garbage_collection(self):
# C BufferedWriter objects are collected, and collecting them flushes
# all data to disk.
# The Python version has __del__, so it ends into gc.garbage instead
rawio = self.FileIO(support.TESTFN, "w+b")
f = self.tp(rawio)
f.write(b"123xxx")
f.x = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertTrue(wr() is None, wr)
with self.open(support.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"123xxx")
def test_args_error(self):
# Issue #17275
with self.assertRaisesRegexp(TypeError, "BufferedWriter"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
class PyBufferedWriterTest(BufferedWriterTest):
tp = pyio.BufferedWriter
class BufferedRWPairTest(unittest.TestCase):
def test_constructor(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.closed)
def test_uninitialized(self):
pair = self.tp.__new__(self.tp)
del pair
pair = self.tp.__new__(self.tp)
self.assertRaisesRegexp((ValueError, AttributeError),
'uninitialized|has no attribute',
pair.read, 0)
self.assertRaisesRegexp((ValueError, AttributeError),
'uninitialized|has no attribute',
pair.write, b'')
pair.__init__(self.MockRawIO(), self.MockRawIO())
self.assertEqual(pair.read(0), b'')
self.assertEqual(pair.write(b''), 0)
def test_detach(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertRaises(self.UnsupportedOperation, pair.detach)
def test_constructor_max_buffer_size_deprecation(self):
with support.check_warnings(("max_buffer_size is deprecated",
DeprecationWarning)):
self.tp(self.MockRawIO(), self.MockRawIO(), 8, 12)
def test_constructor_with_not_readable(self):
class NotReadable(MockRawIO):
def readable(self):
return False
self.assertRaises(IOError, self.tp, NotReadable(), self.MockRawIO())
def test_constructor_with_not_writeable(self):
class NotWriteable(MockRawIO):
def writable(self):
return False
self.assertRaises(IOError, self.tp, self.MockRawIO(), NotWriteable())
def test_read(self):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertEqual(pair.read(3), b"abc")
self.assertEqual(pair.read(1), b"d")
self.assertEqual(pair.read(), b"ef")
pair = self.tp(self.BytesIO(b"abc"), self.MockRawIO())
self.assertEqual(pair.read(None), b"abc")
def test_readlines(self):
pair = lambda: self.tp(self.BytesIO(b"abc\ndef\nh"), self.MockRawIO())
self.assertEqual(pair().readlines(), [b"abc\n", b"def\n", b"h"])
self.assertEqual(pair().readlines(), [b"abc\n", b"def\n", b"h"])
self.assertEqual(pair().readlines(5), [b"abc\n", b"def\n"])
def test_read1(self):
# .read1() is delegated to the underlying reader object, so this test
# can be shallow.
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertEqual(pair.read1(3), b"abc")
def test_readinto(self):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
data = bytearray(5)
self.assertEqual(pair.readinto(data), 5)
self.assertEqual(data, b"abcde")
def test_write(self):
w = self.MockRawIO()
pair = self.tp(self.MockRawIO(), w)
pair.write(b"abc")
pair.flush()
pair.write(b"def")
pair.flush()
self.assertEqual(w._write_stack, [b"abc", b"def"])
def test_peek(self):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertTrue(pair.peek(3).startswith(b"abc"))
self.assertEqual(pair.read(3), b"abc")
def test_readable(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertTrue(pair.readable())
def test_writeable(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertTrue(pair.writable())
def test_seekable(self):
# BufferedRWPairs are never seekable, even if their readers and writers
# are.
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.seekable())
# .flush() is delegated to the underlying writer object and has been
# tested in the test_write method.
def test_close_and_closed(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.closed)
pair.close()
self.assertTrue(pair.closed)
def test_reader_close_error_on_close(self):
def reader_close():
reader_non_existing
reader = self.MockRawIO()
reader.close = reader_close
writer = self.MockRawIO()
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('reader_non_existing', str(err.exception))
self.assertTrue(pair.closed)
self.assertFalse(reader.closed)
self.assertTrue(writer.closed)
def test_writer_close_error_on_close(self):
def writer_close():
writer_non_existing
reader = self.MockRawIO()
writer = self.MockRawIO()
writer.close = writer_close
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('writer_non_existing', str(err.exception))
self.assertFalse(pair.closed)
self.assertTrue(reader.closed)
self.assertFalse(writer.closed)
def test_reader_writer_close_error_on_close(self):
def reader_close():
reader_non_existing
def writer_close():
writer_non_existing
reader = self.MockRawIO()
reader.close = reader_close
writer = self.MockRawIO()
writer.close = writer_close
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('reader_non_existing', str(err.exception))
self.assertFalse(pair.closed)
self.assertFalse(reader.closed)
self.assertFalse(writer.closed)
def test_isatty(self):
class SelectableIsAtty(MockRawIO):
def __init__(self, isatty):
MockRawIO.__init__(self)
self._isatty = isatty
def isatty(self):
return self._isatty
pair = self.tp(SelectableIsAtty(False), SelectableIsAtty(False))
self.assertFalse(pair.isatty())
pair = self.tp(SelectableIsAtty(True), SelectableIsAtty(False))
self.assertTrue(pair.isatty())
pair = self.tp(SelectableIsAtty(False), SelectableIsAtty(True))
self.assertTrue(pair.isatty())
pair = self.tp(SelectableIsAtty(True), SelectableIsAtty(True))
self.assertTrue(pair.isatty())
def test_weakref_clearing(self):
brw = self.tp(self.MockRawIO(), self.MockRawIO())
ref = weakref.ref(brw)
brw = None
ref = None # Shouldn't segfault.
class CBufferedRWPairTest(BufferedRWPairTest):
tp = io.BufferedRWPair
class PyBufferedRWPairTest(BufferedRWPairTest):
tp = pyio.BufferedRWPair
class BufferedRandomTest(BufferedReaderTest, BufferedWriterTest):
read_mode = "rb+"
write_mode = "wb+"
def test_constructor(self):
BufferedReaderTest.test_constructor(self)
BufferedWriterTest.test_constructor(self)
def test_uninitialized(self):
BufferedReaderTest.test_uninitialized(self)
BufferedWriterTest.test_uninitialized(self)
def test_read_and_write(self):
raw = self.MockRawIO((b"asdf", b"ghjk"))
rw = self.tp(raw, 8)
self.assertEqual(b"as", rw.read(2))
rw.write(b"ddd")
rw.write(b"eee")
self.assertFalse(raw._write_stack) # Buffer writes
self.assertEqual(b"ghjk", rw.read())
self.assertEqual(b"dddeee", raw._write_stack[0])
def test_seek_and_tell(self):
raw = self.BytesIO(b"asdfghjkl")
rw = self.tp(raw)
self.assertEqual(b"as", rw.read(2))
self.assertEqual(2, rw.tell())
rw.seek(0, 0)
self.assertEqual(b"asdf", rw.read(4))
rw.write(b"123f")
rw.seek(0, 0)
self.assertEqual(b"asdf123fl", rw.read())
self.assertEqual(9, rw.tell())
rw.seek(-4, 2)
self.assertEqual(5, rw.tell())
rw.seek(2, 1)
self.assertEqual(7, rw.tell())
self.assertEqual(b"fl", rw.read(11))
rw.flush()
self.assertEqual(b"asdf123fl", raw.getvalue())
self.assertRaises(TypeError, rw.seek, 0.0)
def check_flush_and_read(self, read_func):
raw = self.BytesIO(b"abcdefghi")
bufio = self.tp(raw)
self.assertEqual(b"ab", read_func(bufio, 2))
bufio.write(b"12")
self.assertEqual(b"ef", read_func(bufio, 2))
self.assertEqual(6, bufio.tell())
bufio.flush()
self.assertEqual(6, bufio.tell())
self.assertEqual(b"ghi", read_func(bufio))
raw.seek(0, 0)
raw.write(b"XYZ")
# flush() resets the read buffer
bufio.flush()
bufio.seek(0, 0)
self.assertEqual(b"XYZ", read_func(bufio, 3))
def test_flush_and_read(self):
self.check_flush_and_read(lambda bufio, *args: bufio.read(*args))
def test_flush_and_readinto(self):
def _readinto(bufio, n=-1):
b = bytearray(n if n >= 0 else 9999)
n = bufio.readinto(b)
return bytes(b[:n])
self.check_flush_and_read(_readinto)
def test_flush_and_peek(self):
def _peek(bufio, n=-1):
# This relies on the fact that the buffer can contain the whole
# raw stream, otherwise peek() can return less.
b = bufio.peek(n)
if n != -1:
b = b[:n]
bufio.seek(len(b), 1)
return b
self.check_flush_and_read(_peek)
def test_flush_and_write(self):
raw = self.BytesIO(b"abcdefghi")
bufio = self.tp(raw)
bufio.write(b"123")
bufio.flush()
bufio.write(b"45")
bufio.flush()
bufio.seek(0, 0)
self.assertEqual(b"12345fghi", raw.getvalue())
self.assertEqual(b"12345fghi", bufio.read())
def test_threads(self):
BufferedReaderTest.test_threads(self)
BufferedWriterTest.test_threads(self)
def test_writes_and_peek(self):
def _peek(bufio):
bufio.peek(1)
self.check_writes(_peek)
def _peek(bufio):
pos = bufio.tell()
bufio.seek(-1, 1)
bufio.peek(1)
bufio.seek(pos, 0)
self.check_writes(_peek)
def test_writes_and_reads(self):
def _read(bufio):
bufio.seek(-1, 1)
bufio.read(1)
self.check_writes(_read)
def test_writes_and_read1s(self):
def _read1(bufio):
bufio.seek(-1, 1)
bufio.read1(1)
self.check_writes(_read1)
def test_writes_and_readintos(self):
def _read(bufio):
bufio.seek(-1, 1)
bufio.readinto(bytearray(1))
self.check_writes(_read)
def test_write_after_readahead(self):
# Issue #6629: writing after the buffer was filled by readahead should
# first rewind the raw stream.
for overwrite_size in [1, 5]:
raw = self.BytesIO(b"A" * 10)
bufio = self.tp(raw, 4)
# Trigger readahead
self.assertEqual(bufio.read(1), b"A")
self.assertEqual(bufio.tell(), 1)
# Overwriting should rewind the raw stream if it needs so
bufio.write(b"B" * overwrite_size)
self.assertEqual(bufio.tell(), overwrite_size + 1)
# If the write size was smaller than the buffer size, flush() and
# check that rewind happens.
bufio.flush()
self.assertEqual(bufio.tell(), overwrite_size + 1)
s = raw.getvalue()
self.assertEqual(s,
b"A" + b"B" * overwrite_size + b"A" * (9 - overwrite_size))
def test_write_rewind_write(self):
# Various combinations of reading / writing / seeking backwards / writing again
def mutate(bufio, pos1, pos2):
assert pos2 >= pos1
# Fill the buffer
bufio.seek(pos1)
bufio.read(pos2 - pos1)
bufio.write(b'\x02')
# This writes earlier than the previous write, but still inside
# the buffer.
bufio.seek(pos1)
bufio.write(b'\x01')
b = b"\x80\x81\x82\x83\x84"
for i in range(0, len(b)):
for j in range(i, len(b)):
raw = self.BytesIO(b)
bufio = self.tp(raw, 100)
mutate(bufio, i, j)
bufio.flush()
expected = bytearray(b)
expected[j] = 2
expected[i] = 1
self.assertEqual(raw.getvalue(), expected,
"failed result for i=%d, j=%d" % (i, j))
def test_truncate_after_read_or_write(self):
raw = self.BytesIO(b"A" * 10)
bufio = self.tp(raw, 100)
self.assertEqual(bufio.read(2), b"AA") # the read buffer gets filled
self.assertEqual(bufio.truncate(), 2)
self.assertEqual(bufio.write(b"BB"), 2) # the write buffer increases
self.assertEqual(bufio.truncate(), 4)
def test_misbehaved_io(self):
BufferedReaderTest.test_misbehaved_io(self)
BufferedWriterTest.test_misbehaved_io(self)
def test_interleaved_read_write(self):
# Test for issue #12213
with self.BytesIO(b'abcdefgh') as raw:
with self.tp(raw, 100) as f:
f.write(b"1")
self.assertEqual(f.read(1), b'b')
f.write(b'2')
self.assertEqual(f.read1(1), b'd')
f.write(b'3')
buf = bytearray(1)
f.readinto(buf)
self.assertEqual(buf, b'f')
f.write(b'4')
self.assertEqual(f.peek(1), b'h')
f.flush()
self.assertEqual(raw.getvalue(), b'1b2d3f4h')
with self.BytesIO(b'abc') as raw:
with self.tp(raw, 100) as f:
self.assertEqual(f.read(1), b'a')
f.write(b"2")
self.assertEqual(f.read(1), b'c')
f.flush()
self.assertEqual(raw.getvalue(), b'a2c')
def test_interleaved_readline_write(self):
with self.BytesIO(b'ab\ncdef\ng\n') as raw:
with self.tp(raw) as f:
f.write(b'1')
self.assertEqual(f.readline(), b'b\n')
f.write(b'2')
self.assertEqual(f.readline(), b'def\n')
f.write(b'3')
self.assertEqual(f.readline(), b'\n')
f.flush()
self.assertEqual(raw.getvalue(), b'1b\n2def\n3\n')
class CBufferedRandomTest(CBufferedReaderTest, CBufferedWriterTest,
BufferedRandomTest, SizeofTest):
tp = io.BufferedRandom
def test_constructor(self):
BufferedRandomTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
# than 2GB RAM and a 64-bit kernel.
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
def test_garbage_collection(self):
CBufferedReaderTest.test_garbage_collection(self)
CBufferedWriterTest.test_garbage_collection(self)
def test_args_error(self):
# Issue #17275
with self.assertRaisesRegexp(TypeError, "BufferedRandom"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
class PyBufferedRandomTest(BufferedRandomTest):
tp = pyio.BufferedRandom
# To fully exercise seek/tell, the StatefulIncrementalDecoder has these
# properties:
# - A single output character can correspond to many bytes of input.
# - The number of input bytes to complete the character can be
# undetermined until the last input byte is received.
# - The number of input bytes can vary depending on previous input.
# - A single input byte can correspond to many characters of output.
# - The number of output characters can be undetermined until the
# last input byte is received.
# - The number of output characters can vary depending on previous input.
class StatefulIncrementalDecoder(codecs.IncrementalDecoder):
"""
For testing seek/tell behavior with a stateful, buffering decoder.
Input is a sequence of words. Words may be fixed-length (length set
by input) or variable-length (period-terminated). In variable-length
mode, extra periods are ignored. Possible words are:
- 'i' followed by a number sets the input length, I (maximum 99).
When I is set to 0, words are space-terminated.
- 'o' followed by a number sets the output length, O (maximum 99).
- Any other word is converted into a word followed by a period on
the output. The output word consists of the input word truncated
or padded out with hyphens to make its length equal to O. If O
is 0, the word is output verbatim without truncating or padding.
I and O are initially set to 1. When I changes, any buffered input is
re-scanned according to the new I. EOF also terminates the last word.
"""
def __init__(self, errors='strict'):
codecs.IncrementalDecoder.__init__(self, errors)
self.reset()
def __repr__(self):
return '<SID %x>' % id(self)
def reset(self):
self.i = 1
self.o = 1
self.buffer = bytearray()
def getstate(self):
i, o = self.i ^ 1, self.o ^ 1 # so that flags = 0 after reset()
return bytes(self.buffer), i*100 + o
def setstate(self, state):
buffer, io = state
self.buffer = bytearray(buffer)
i, o = divmod(io, 100)
self.i, self.o = i ^ 1, o ^ 1
def decode(self, input, final=False):
output = ''
for b in input:
if self.i == 0: # variable-length, terminated with period
if b == '.':
if self.buffer:
output += self.process_word()
else:
self.buffer.append(b)
else: # fixed-length, terminate after self.i bytes
self.buffer.append(b)
if len(self.buffer) == self.i:
output += self.process_word()
if final and self.buffer: # EOF terminates the last word
output += self.process_word()
return output
def process_word(self):
output = ''
if self.buffer[0] == ord('i'):
self.i = min(99, int(self.buffer[1:] or 0)) # set input length
elif self.buffer[0] == ord('o'):
self.o = min(99, int(self.buffer[1:] or 0)) # set output length
else:
output = self.buffer.decode('ascii')
if len(output) < self.o:
output += '-'*self.o # pad out with hyphens
if self.o:
output = output[:self.o] # truncate to output length
output += '.'
self.buffer = bytearray()
return output
codecEnabled = False
@classmethod
def lookupTestDecoder(cls, name):
if cls.codecEnabled and name == 'test_decoder':
latin1 = codecs.lookup('latin-1')
return codecs.CodecInfo(
name='test_decoder', encode=latin1.encode, decode=None,
incrementalencoder=None,
streamreader=None, streamwriter=None,
incrementaldecoder=cls)
# Register the previous decoder for testing.
# Disabled by default, tests will enable it.
codecs.register(StatefulIncrementalDecoder.lookupTestDecoder)
class StatefulIncrementalDecoderTest(unittest.TestCase):
"""
Make sure the StatefulIncrementalDecoder actually works.
"""
test_cases = [
# I=1, O=1 (fixed-length input == fixed-length output)
(b'abcd', False, 'a.b.c.d.'),
# I=0, O=0 (variable-length input, variable-length output)
(b'oiabcd', True, 'abcd.'),
# I=0, O=0 (should ignore extra periods)
(b'oi...abcd...', True, 'abcd.'),
# I=0, O=6 (variable-length input, fixed-length output)
(b'i.o6.x.xyz.toolongtofit.', False, 'x-----.xyz---.toolon.'),
# I=2, O=6 (fixed-length input < fixed-length output)
(b'i.i2.o6xyz', True, 'xy----.z-----.'),
# I=6, O=3 (fixed-length input > fixed-length output)
(b'i.o3.i6.abcdefghijklmnop', True, 'abc.ghi.mno.'),
# I=0, then 3; O=29, then 15 (with longer output)
(b'i.o29.a.b.cde.o15.abcdefghijabcdefghij.i3.a.b.c.d.ei00k.l.m', True,
'a----------------------------.' +
'b----------------------------.' +
'cde--------------------------.' +
'abcdefghijabcde.' +
'a.b------------.' +
'.c.------------.' +
'd.e------------.' +
'k--------------.' +
'l--------------.' +
'm--------------.')
]
def test_decoder(self):
# Try a few one-shot test cases.
for input, eof, output in self.test_cases:
d = StatefulIncrementalDecoder()
self.assertEqual(d.decode(input, eof), output)
# Also test an unfinished decode, followed by forcing EOF.
d = StatefulIncrementalDecoder()
self.assertEqual(d.decode(b'oiabcd'), '')
self.assertEqual(d.decode(b'', 1), 'abcd.')
class TextIOWrapperTest(unittest.TestCase):
def setUp(self):
self.testdata = b"AAA\r\nBBB\rCCC\r\nDDD\nEEE\r\n"
self.normalized = b"AAA\nBBB\nCCC\nDDD\nEEE\n".decode("ascii")
support.unlink(support.TESTFN)
def tearDown(self):
support.unlink(support.TESTFN)
def test_constructor(self):
r = self.BytesIO(b"\xc3\xa9\n\n")
b = self.BufferedReader(r, 1000)
t = self.TextIOWrapper(b)
t.__init__(b, encoding="latin1", newline="\r\n")
self.assertEqual(t.encoding, "latin1")
self.assertEqual(t.line_buffering, False)
t.__init__(b, encoding="utf8", line_buffering=True)
self.assertEqual(t.encoding, "utf8")
self.assertEqual(t.line_buffering, True)
self.assertEqual("\xe9\n", t.readline())
self.assertRaises(TypeError, t.__init__, b, newline=42)
self.assertRaises(ValueError, t.__init__, b, newline='xyzzy')
def test_uninitialized(self):
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
del t
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
self.assertRaises(Exception, repr, t)
self.assertRaisesRegexp((ValueError, AttributeError),
'uninitialized|has no attribute',
t.read, 0)
t.__init__(self.MockRawIO())
self.assertEqual(t.read(0), u'')
def test_detach(self):
r = self.BytesIO()
b = self.BufferedWriter(r)
t = self.TextIOWrapper(b)
self.assertIs(t.detach(), b)
t = self.TextIOWrapper(b, encoding="ascii")
t.write("howdy")
self.assertFalse(r.getvalue())
t.detach()
self.assertEqual(r.getvalue(), b"howdy")
self.assertRaises(ValueError, t.detach)
# Operations independent of the detached stream should still work
repr(t)
self.assertEqual(t.encoding, "ascii")
self.assertEqual(t.errors, "strict")
self.assertFalse(t.line_buffering)
def test_repr(self):
raw = self.BytesIO("hello".encode("utf-8"))
b = self.BufferedReader(raw)
t = self.TextIOWrapper(b, encoding="utf-8")
modname = self.TextIOWrapper.__module__
self.assertEqual(repr(t),
"<%s.TextIOWrapper encoding='utf-8'>" % modname)
raw.name = "dummy"
self.assertEqual(repr(t),
"<%s.TextIOWrapper name=u'dummy' encoding='utf-8'>" % modname)
raw.name = b"dummy"
self.assertEqual(repr(t),
"<%s.TextIOWrapper name='dummy' encoding='utf-8'>" % modname)
t.buffer.detach()
repr(t) # Should not raise an exception
def test_line_buffering(self):
r = self.BytesIO()
b = self.BufferedWriter(r, 1000)
t = self.TextIOWrapper(b, newline="\n", line_buffering=True)
t.write("X")
self.assertEqual(r.getvalue(), b"") # No flush happened
t.write("Y\nZ")
self.assertEqual(r.getvalue(), b"XY\nZ") # All got flushed
t.write("A\rB")
self.assertEqual(r.getvalue(), b"XY\nZA\rB")
def test_encoding(self):
# Check the encoding attribute is always set, and valid
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="utf8")
self.assertEqual(t.encoding, "utf8")
t = self.TextIOWrapper(b)
self.assertTrue(t.encoding is not None)
codecs.lookup(t.encoding)
def test_encoding_errors_reading(self):
# (1) default
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii")
self.assertRaises(UnicodeError, t.read)
# (2) explicit strict
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="strict")
self.assertRaises(UnicodeError, t.read)
# (3) ignore
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="ignore")
self.assertEqual(t.read(), "abc\n\n")
# (4) replace
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="replace")
self.assertEqual(t.read(), "abc\n\ufffd\n")
def test_encoding_errors_writing(self):
# (1) default
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii")
self.assertRaises(UnicodeError, t.write, "\xff")
# (2) explicit strict
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="strict")
self.assertRaises(UnicodeError, t.write, "\xff")
# (3) ignore
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="ignore",
newline="\n")
t.write("abc\xffdef\n")
t.flush()
self.assertEqual(b.getvalue(), b"abcdef\n")
# (4) replace
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="replace",
newline="\n")
t.write("abc\xffdef\n")
t.flush()
self.assertEqual(b.getvalue(), b"abc?def\n")
def test_newlines(self):
input_lines = [ "unix\n", "windows\r\n", "os9\r", "last\n", "nonl" ]
tests = [
[ None, [ 'unix\n', 'windows\n', 'os9\n', 'last\n', 'nonl' ] ],
[ '', input_lines ],
[ '\n', [ "unix\n", "windows\r\n", "os9\rlast\n", "nonl" ] ],
[ '\r\n', [ "unix\nwindows\r\n", "os9\rlast\nnonl" ] ],
[ '\r', [ "unix\nwindows\r", "\nos9\r", "last\nnonl" ] ],
]
encodings = (
'utf-8', 'latin-1',
'utf-16', 'utf-16-le', 'utf-16-be',
'utf-32', 'utf-32-le', 'utf-32-be',
)
# Try a range of buffer sizes to test the case where \r is the last
# character in TextIOWrapper._pending_line.
for encoding in encodings:
# XXX: str.encode() should return bytes
data = bytes(''.join(input_lines).encode(encoding))
for do_reads in (False, True):
for bufsize in range(1, 10):
for newline, exp_lines in tests:
bufio = self.BufferedReader(self.BytesIO(data), bufsize)
textio = self.TextIOWrapper(bufio, newline=newline,
encoding=encoding)
if do_reads:
got_lines = []
while True:
c2 = textio.read(2)
if c2 == '':
break
self.assertEqual(len(c2), 2)
got_lines.append(c2 + textio.readline())
else:
got_lines = list(textio)
for got_line, exp_line in zip(got_lines, exp_lines):
self.assertEqual(got_line, exp_line)
self.assertEqual(len(got_lines), len(exp_lines))
def test_newlines_input(self):
testdata = b"AAA\nBB\x00B\nCCC\rDDD\rEEE\r\nFFF\r\nGGG"
normalized = testdata.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
for newline, expected in [
(None, normalized.decode("ascii").splitlines(True)),
("", testdata.decode("ascii").splitlines(True)),
("\n", ["AAA\n", "BB\x00B\n", "CCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r\n", ["AAA\nBB\x00B\nCCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r", ["AAA\nBB\x00B\nCCC\r", "DDD\r", "EEE\r", "\nFFF\r", "\nGGG"]),
]:
buf = self.BytesIO(testdata)
txt = self.TextIOWrapper(buf, encoding="ascii", newline=newline)
self.assertEqual(txt.readlines(), expected)
txt.seek(0)
self.assertEqual(txt.read(), "".join(expected))
def test_newlines_output(self):
testdict = {
"": b"AAA\nBBB\nCCC\nX\rY\r\nZ",
"\n": b"AAA\nBBB\nCCC\nX\rY\r\nZ",
"\r": b"AAA\rBBB\rCCC\rX\rY\r\rZ",
"\r\n": b"AAA\r\nBBB\r\nCCC\r\nX\rY\r\r\nZ",
}
tests = [(None, testdict[os.linesep])] + sorted(testdict.items())
for newline, expected in tests:
buf = self.BytesIO()
txt = self.TextIOWrapper(buf, encoding="ascii", newline=newline)
txt.write("AAA\nB")
txt.write("BB\nCCC\n")
txt.write("X\rY\r\nZ")
txt.flush()
self.assertEqual(buf.closed, False)
self.assertEqual(buf.getvalue(), expected)
def test_destructor(self):
l = []
base = self.BytesIO
class MyBytesIO(base):
def close(self):
l.append(self.getvalue())
base.close(self)
b = MyBytesIO()
t = self.TextIOWrapper(b, encoding="ascii")
t.write("abc")
del t
support.gc_collect()
self.assertEqual([b"abc"], l)
def test_override_destructor(self):
record = []
class MyTextIO(self.TextIOWrapper):
def __del__(self):
record.append(1)
try:
f = super(MyTextIO, self).__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super(MyTextIO, self).close()
def flush(self):
record.append(3)
super(MyTextIO, self).flush()
b = self.BytesIO()
t = MyTextIO(b, encoding="ascii")
del t
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
def test_error_through_destructor(self):
# Test that the exception state is not modified by a destructor,
# even if close() fails.
rawio = self.CloseFailureIO()
def f():
self.TextIOWrapper(rawio).xyzzy
with support.captured_output("stderr") as s:
self.assertRaises(AttributeError, f)
s = s.getvalue().strip()
if s:
# The destructor *may* have printed an unraisable error, check it
self.assertEqual(len(s.splitlines()), 1)
self.assertTrue(s.startswith("Exception IOError: "), s)
self.assertTrue(s.endswith(" ignored"), s)
# Systematic tests of the text I/O API
def test_basic_io(self):
for chunksize in (1, 2, 3, 4, 5, 15, 16, 17, 31, 32, 33, 63, 64, 65):
for enc in "ascii", "latin1", "utf8" :# , "utf-16-be", "utf-16-le":
f = self.open(support.TESTFN, "w+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEqual(f.write("abc"), 3)
f.close()
f = self.open(support.TESTFN, "r+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEqual(f.tell(), 0)
self.assertEqual(f.read(), "abc")
cookie = f.tell()
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.read(None), "abc")
f.seek(0)
self.assertEqual(f.read(2), "ab")
self.assertEqual(f.read(1), "c")
self.assertEqual(f.read(1), "")
self.assertEqual(f.read(), "")
self.assertEqual(f.tell(), cookie)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.seek(0, 2), cookie)
self.assertEqual(f.write("def"), 3)
self.assertEqual(f.seek(cookie), cookie)
self.assertEqual(f.read(), "def")
if enc.startswith("utf"):
self.multi_line_test(f, enc)
f.close()
def multi_line_test(self, f, enc):
f.seek(0)
f.truncate()
sample = "s\xff\u0fff\uffff"
wlines = []
for size in (0, 1, 2, 3, 4, 5, 30, 31, 32, 33, 62, 63, 64, 65, 1000):
chars = []
for i in range(size):
chars.append(sample[i % len(sample)])
line = "".join(chars) + "\n"
wlines.append((f.tell(), line))
f.write(line)
f.seek(0)
rlines = []
while True:
pos = f.tell()
line = f.readline()
if not line:
break
rlines.append((pos, line))
self.assertEqual(rlines, wlines)
def test_telling(self):
f = self.open(support.TESTFN, "w+", encoding="utf8")
p0 = f.tell()
f.write("\xff\n")
p1 = f.tell()
f.write("\xff\n")
p2 = f.tell()
f.seek(0)
self.assertEqual(f.tell(), p0)
self.assertEqual(f.readline(), "\xff\n")
self.assertEqual(f.tell(), p1)
self.assertEqual(f.readline(), "\xff\n")
self.assertEqual(f.tell(), p2)
f.seek(0)
for line in f:
self.assertEqual(line, "\xff\n")
self.assertRaises(IOError, f.tell)
self.assertEqual(f.tell(), p2)
f.close()
def test_seeking(self):
chunk_size = _default_chunk_size()
prefix_size = chunk_size - 2
u_prefix = "a" * prefix_size
prefix = bytes(u_prefix.encode("utf-8"))
self.assertEqual(len(u_prefix), len(prefix))
u_suffix = "\u8888\n"
suffix = bytes(u_suffix.encode("utf-8"))
line = prefix + suffix
f = self.open(support.TESTFN, "wb")
f.write(line*2)
f.close()
f = self.open(support.TESTFN, "r", encoding="utf-8")
s = f.read(prefix_size)
self.assertEqual(s, prefix.decode("ascii"))
self.assertEqual(f.tell(), prefix_size)
self.assertEqual(f.readline(), u_suffix)
def test_seeking_too(self):
# Regression test for a specific bug
data = b'\xe0\xbf\xbf\n'
f = self.open(support.TESTFN, "wb")
f.write(data)
f.close()
f = self.open(support.TESTFN, "r", encoding="utf-8")
f._CHUNK_SIZE # Just test that it exists
f._CHUNK_SIZE = 2
f.readline()
f.tell()
def test_seek_and_tell(self):
#Test seek/tell using the StatefulIncrementalDecoder.
# Make test faster by doing smaller seeks
CHUNK_SIZE = 128
def test_seek_and_tell_with_data(data, min_pos=0):
"""Tell/seek to various points within a data stream and ensure
that the decoded data returned by read() is consistent."""
f = self.open(support.TESTFN, 'wb')
f.write(data)
f.close()
f = self.open(support.TESTFN, encoding='test_decoder')
f._CHUNK_SIZE = CHUNK_SIZE
decoded = f.read()
f.close()
for i in range(min_pos, len(decoded) + 1): # seek positions
for j in [1, 5, len(decoded) - i]: # read lengths
f = self.open(support.TESTFN, encoding='test_decoder')
self.assertEqual(f.read(i), decoded[:i])
cookie = f.tell()
self.assertEqual(f.read(j), decoded[i:i + j])
f.seek(cookie)
self.assertEqual(f.read(), decoded[i:])
f.close()
# Enable the test decoder.
StatefulIncrementalDecoder.codecEnabled = 1
# Run the tests.
try:
# Try each test case.
for input, _, _ in StatefulIncrementalDecoderTest.test_cases:
test_seek_and_tell_with_data(input)
# Position each test case so that it crosses a chunk boundary.
for input, _, _ in StatefulIncrementalDecoderTest.test_cases:
offset = CHUNK_SIZE - len(input)//2
prefix = b'.'*offset
# Don't bother seeking into the prefix (takes too long).
min_pos = offset*2
test_seek_and_tell_with_data(prefix + input, min_pos)
# Ensure our test decoder won't interfere with subsequent tests.
finally:
StatefulIncrementalDecoder.codecEnabled = 0
def test_encoded_writes(self):
data = "1234567890"
tests = ("utf-16",
"utf-16-le",
"utf-16-be",
"utf-32",
"utf-32-le",
"utf-32-be")
for encoding in tests:
buf = self.BytesIO()
f = self.TextIOWrapper(buf, encoding=encoding)
# Check if the BOM is written only once (see issue1753).
f.write(data)
f.write(data)
f.seek(0)
self.assertEqual(f.read(), data * 2)
f.seek(0)
self.assertEqual(f.read(), data * 2)
self.assertEqual(buf.getvalue(), (data * 2).encode(encoding))
def test_unreadable(self):
class UnReadable(self.BytesIO):
def readable(self):
return False
txt = self.TextIOWrapper(UnReadable())
self.assertRaises(IOError, txt.read)
def test_read_one_by_one(self):
txt = self.TextIOWrapper(self.BytesIO(b"AA\r\nBB"))
reads = ""
while True:
c = txt.read(1)
if not c:
break
reads += c
self.assertEqual(reads, "AA\nBB")
def test_readlines(self):
txt = self.TextIOWrapper(self.BytesIO(b"AA\nBB\nCC"))
self.assertEqual(txt.readlines(), ["AA\n", "BB\n", "CC"])
txt.seek(0)
self.assertEqual(txt.readlines(None), ["AA\n", "BB\n", "CC"])
txt.seek(0)
self.assertEqual(txt.readlines(5), ["AA\n", "BB\n"])
# read in amounts equal to TextIOWrapper._CHUNK_SIZE which is 128.
def test_read_by_chunk(self):
# make sure "\r\n" straddles 128 char boundary.
txt = self.TextIOWrapper(self.BytesIO(b"A" * 127 + b"\r\nB"))
reads = ""
while True:
c = txt.read(128)
if not c:
break
reads += c
self.assertEqual(reads, "A"*127+"\nB")
def test_writelines(self):
l = ['ab', 'cd', 'ef']
buf = self.BytesIO()
txt = self.TextIOWrapper(buf)
txt.writelines(l)
txt.flush()
self.assertEqual(buf.getvalue(), b'abcdef')
def test_writelines_userlist(self):
l = UserList(['ab', 'cd', 'ef'])
buf = self.BytesIO()
txt = self.TextIOWrapper(buf)
txt.writelines(l)
txt.flush()
self.assertEqual(buf.getvalue(), b'abcdef')
def test_writelines_error(self):
txt = self.TextIOWrapper(self.BytesIO())
self.assertRaises(TypeError, txt.writelines, [1, 2, 3])
self.assertRaises(TypeError, txt.writelines, None)
self.assertRaises(TypeError, txt.writelines, b'abc')
def test_issue1395_1(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
# read one char at a time
reads = ""
while True:
c = txt.read(1)
if not c:
break
reads += c
self.assertEqual(reads, self.normalized)
def test_issue1395_2(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = ""
while True:
c = txt.read(4)
if not c:
break
reads += c
self.assertEqual(reads, self.normalized)
def test_issue1395_3(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
reads += txt.read(4)
reads += txt.readline()
reads += txt.readline()
reads += txt.readline()
self.assertEqual(reads, self.normalized)
def test_issue1395_4(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
reads += txt.read()
self.assertEqual(reads, self.normalized)
def test_issue1395_5(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
pos = txt.tell()
txt.seek(0)
txt.seek(pos)
self.assertEqual(txt.read(4), "BBB\n")
def test_issue2282(self):
buffer = self.BytesIO(self.testdata)
txt = self.TextIOWrapper(buffer, encoding="ascii")
self.assertEqual(buffer.seekable(), txt.seekable())
def test_append_bom(self):
# The BOM is not written again when appending to a non-empty file
filename = support.TESTFN
for charset in ('utf-8-sig', 'utf-16', 'utf-32'):
with self.open(filename, 'w', encoding=charset) as f:
f.write('aaa')
pos = f.tell()
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'aaa'.encode(charset))
with self.open(filename, 'a', encoding=charset) as f:
f.write('xxx')
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'aaaxxx'.encode(charset))
def test_seek_bom(self):
# Same test, but when seeking manually
filename = support.TESTFN
for charset in ('utf-8-sig', 'utf-16', 'utf-32'):
with self.open(filename, 'w', encoding=charset) as f:
f.write('aaa')
pos = f.tell()
with self.open(filename, 'r+', encoding=charset) as f:
f.seek(pos)
f.write('zzz')
f.seek(0)
f.write('bbb')
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'bbbzzz'.encode(charset))
def test_errors_property(self):
with self.open(support.TESTFN, "w") as f:
self.assertEqual(f.errors, "strict")
with self.open(support.TESTFN, "w", errors="replace") as f:
self.assertEqual(f.errors, "replace")
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_threads_write(self):
# Issue6750: concurrent writes could duplicate data
event = threading.Event()
with self.open(support.TESTFN, "w", buffering=1) as f:
def run(n):
text = "Thread%03d\n" % n
event.wait()
f.write(text)
threads = [threading.Thread(target=run, args=(x,))
for x in range(20)]
with support.start_threads(threads, event.set):
time.sleep(0.02)
with self.open(support.TESTFN) as f:
content = f.read()
for n in range(20):
self.assertEqual(content.count("Thread%03d\n" % n), 1)
def test_flush_error_on_close(self):
# Test that text file is closed despite failed flush
# and that flush() is called before file closed.
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
closed = []
def bad_flush():
closed[:] = [txt.closed, txt.buffer.closed]
raise IOError()
txt.flush = bad_flush
self.assertRaises(IOError, txt.close) # exception not swallowed
self.assertTrue(txt.closed)
self.assertTrue(txt.buffer.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
self.assertFalse(closed[1])
txt.flush = lambda: None # break reference loop
def test_multi_close(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt.close()
txt.close()
txt.close()
self.assertRaises(ValueError, txt.flush)
def test_readonly_attributes(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
buf = self.BytesIO(self.testdata)
with self.assertRaises((AttributeError, TypeError)):
txt.buffer = buf
def test_read_nonbytes(self):
# Issue #17106
# Crash when underlying read() returns non-bytes
class NonbytesStream(self.StringIO):
read1 = self.StringIO.read
class NonbytesStream(self.StringIO):
read1 = self.StringIO.read
t = self.TextIOWrapper(NonbytesStream('a'))
with self.maybeRaises(TypeError):
t.read(1)
t = self.TextIOWrapper(NonbytesStream('a'))
with self.maybeRaises(TypeError):
t.readline()
t = self.TextIOWrapper(NonbytesStream('a'))
self.assertEqual(t.read(), u'a')
def test_illegal_decoder(self):
# Issue #17106
# Crash when decoder returns non-string
t = self.TextIOWrapper(self.BytesIO(b'aaaaaa'), newline='\n',
encoding='quopri_codec')
with self.maybeRaises(TypeError):
t.read(1)
t = self.TextIOWrapper(self.BytesIO(b'aaaaaa'), newline='\n',
encoding='quopri_codec')
with self.maybeRaises(TypeError):
t.readline()
t = self.TextIOWrapper(self.BytesIO(b'aaaaaa'), newline='\n',
encoding='quopri_codec')
with self.maybeRaises(TypeError):
t.read()
class CTextIOWrapperTest(TextIOWrapperTest):
def test_initialization(self):
r = self.BytesIO(b"\xc3\xa9\n\n")
b = self.BufferedReader(r, 1000)
t = self.TextIOWrapper(b)
self.assertRaises(TypeError, t.__init__, b, newline=42)
self.assertRaises(ValueError, t.read)
self.assertRaises(ValueError, t.__init__, b, newline='xyzzy')
self.assertRaises(ValueError, t.read)
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
self.assertRaises(Exception, repr, t)
def test_garbage_collection(self):
# C TextIOWrapper objects are collected, and collecting them flushes
# all data to disk.
# The Python version has __del__, so it ends in gc.garbage instead.
rawio = io.FileIO(support.TESTFN, "wb")
b = self.BufferedWriter(rawio)
t = self.TextIOWrapper(b, encoding="ascii")
t.write("456def")
t.x = t
wr = weakref.ref(t)
del t
support.gc_collect()
self.assertTrue(wr() is None, wr)
with self.open(support.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"456def")
def test_rwpair_cleared_before_textio(self):
# Issue 13070: TextIOWrapper's finalization would crash when called
# after the reference to the underlying BufferedRWPair's writer got
# cleared by the GC.
for i in range(1000):
b1 = self.BufferedRWPair(self.MockRawIO(), self.MockRawIO())
t1 = self.TextIOWrapper(b1, encoding="ascii")
b2 = self.BufferedRWPair(self.MockRawIO(), self.MockRawIO())
t2 = self.TextIOWrapper(b2, encoding="ascii")
# circular references
t1.buddy = t2
t2.buddy = t1
support.gc_collect()
maybeRaises = unittest.TestCase.assertRaises
class PyTextIOWrapperTest(TextIOWrapperTest):
@contextlib.contextmanager
def maybeRaises(self, *args, **kwds):
yield
class IncrementalNewlineDecoderTest(unittest.TestCase):
def check_newline_decoding_utf8(self, decoder):
# UTF-8 specific tests for a newline decoder
def _check_decode(b, s, **kwargs):
# We exercise getstate() / setstate() as well as decode()
state = decoder.getstate()
self.assertEqual(decoder.decode(b, **kwargs), s)
decoder.setstate(state)
self.assertEqual(decoder.decode(b, **kwargs), s)
_check_decode(b'\xe8\xa2\x88', "\u8888")
_check_decode(b'\xe8', "")
_check_decode(b'\xa2', "")
_check_decode(b'\x88', "\u8888")
_check_decode(b'\xe8', "")
_check_decode(b'\xa2', "")
_check_decode(b'\x88', "\u8888")
_check_decode(b'\xe8', "")
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', final=True)
decoder.reset()
_check_decode(b'\n', "\n")
_check_decode(b'\r', "")
_check_decode(b'', "\n", final=True)
_check_decode(b'\r', "\n", final=True)
_check_decode(b'\r', "")
_check_decode(b'a', "\na")
_check_decode(b'\r\r\n', "\n\n")
_check_decode(b'\r', "")
_check_decode(b'\r', "\n")
_check_decode(b'\na', "\na")
_check_decode(b'\xe8\xa2\x88\r\n', "\u8888\n")
_check_decode(b'\xe8\xa2\x88', "\u8888")
_check_decode(b'\n', "\n")
_check_decode(b'\xe8\xa2\x88\r', "\u8888")
_check_decode(b'\n', "\n")
def check_newline_decoding(self, decoder, encoding):
result = []
if encoding is not None:
encoder = codecs.getincrementalencoder(encoding)()
def _decode_bytewise(s):
# Decode one byte at a time
for b in encoder.encode(s):
result.append(decoder.decode(b))
else:
encoder = None
def _decode_bytewise(s):
# Decode one char at a time
for c in s:
result.append(decoder.decode(c))
self.assertEqual(decoder.newlines, None)
_decode_bytewise("abc\n\r")
self.assertEqual(decoder.newlines, '\n')
_decode_bytewise("\nabc")
self.assertEqual(decoder.newlines, ('\n', '\r\n'))
_decode_bytewise("abc\r")
self.assertEqual(decoder.newlines, ('\n', '\r\n'))
_decode_bytewise("abc")
self.assertEqual(decoder.newlines, ('\r', '\n', '\r\n'))
_decode_bytewise("abc\r")
self.assertEqual("".join(result), "abc\n\nabcabc\nabcabc")
decoder.reset()
input = "abc"
if encoder is not None:
encoder.reset()
input = encoder.encode(input)
self.assertEqual(decoder.decode(input), "abc")
self.assertEqual(decoder.newlines, None)
def test_newline_decoder(self):
encodings = (
# None meaning the IncrementalNewlineDecoder takes unicode input
# rather than bytes input
None, 'utf-8', 'latin-1',
'utf-16', 'utf-16-le', 'utf-16-be',
'utf-32', 'utf-32-le', 'utf-32-be',
)
for enc in encodings:
decoder = enc and codecs.getincrementaldecoder(enc)()
decoder = self.IncrementalNewlineDecoder(decoder, translate=True)
self.check_newline_decoding(decoder, enc)
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = self.IncrementalNewlineDecoder(decoder, translate=True)
self.check_newline_decoding_utf8(decoder)
def test_newline_bytes(self):
# Issue 5433: Excessive optimization in IncrementalNewlineDecoder
def _check(dec):
self.assertEqual(dec.newlines, None)
self.assertEqual(dec.decode("\u0D00"), "\u0D00")
self.assertEqual(dec.newlines, None)
self.assertEqual(dec.decode("\u0A00"), "\u0A00")
self.assertEqual(dec.newlines, None)
dec = self.IncrementalNewlineDecoder(None, translate=False)
_check(dec)
dec = self.IncrementalNewlineDecoder(None, translate=True)
_check(dec)
class CIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest):
pass
class PyIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest):
pass
# XXX Tests for open()
class MiscIOTest(unittest.TestCase):
def tearDown(self):
support.unlink(support.TESTFN)
def test___all__(self):
for name in self.io.__all__:
obj = getattr(self.io, name, None)
self.assertTrue(obj is not None, name)
if name == "open":
continue
elif "error" in name.lower() or name == "UnsupportedOperation":
self.assertTrue(issubclass(obj, Exception), name)
elif not name.startswith("SEEK_"):
self.assertTrue(issubclass(obj, self.IOBase))
def test_attributes(self):
f = self.open(support.TESTFN, "wb", buffering=0)
self.assertEqual(f.mode, "wb")
f.close()
f = self.open(support.TESTFN, "U")
self.assertEqual(f.name, support.TESTFN)
self.assertEqual(f.buffer.name, support.TESTFN)
self.assertEqual(f.buffer.raw.name, support.TESTFN)
self.assertEqual(f.mode, "U")
self.assertEqual(f.buffer.mode, "rb")
self.assertEqual(f.buffer.raw.mode, "rb")
f.close()
f = self.open(support.TESTFN, "w+")
self.assertEqual(f.mode, "w+")
self.assertEqual(f.buffer.mode, "rb+") # Does it really matter?
self.assertEqual(f.buffer.raw.mode, "rb+")
g = self.open(f.fileno(), "wb", closefd=False)
self.assertEqual(g.mode, "wb")
self.assertEqual(g.raw.mode, "wb")
self.assertEqual(g.name, f.fileno())
self.assertEqual(g.raw.name, f.fileno())
f.close()
g.close()
def test_io_after_close(self):
for kwargs in [
{"mode": "w"},
{"mode": "wb"},
{"mode": "w", "buffering": 1},
{"mode": "w", "buffering": 2},
{"mode": "wb", "buffering": 0},
{"mode": "r"},
{"mode": "rb"},
{"mode": "r", "buffering": 1},
{"mode": "r", "buffering": 2},
{"mode": "rb", "buffering": 0},
{"mode": "w+"},
{"mode": "w+b"},
{"mode": "w+", "buffering": 1},
{"mode": "w+", "buffering": 2},
{"mode": "w+b", "buffering": 0},
]:
f = self.open(support.TESTFN, **kwargs)
f.close()
self.assertRaises(ValueError, f.flush)
self.assertRaises(ValueError, f.fileno)
self.assertRaises(ValueError, f.isatty)
self.assertRaises(ValueError, f.__iter__)
if hasattr(f, "peek"):
self.assertRaises(ValueError, f.peek, 1)
self.assertRaises(ValueError, f.read)
if hasattr(f, "read1"):
self.assertRaises(ValueError, f.read1, 1024)
if hasattr(f, "readall"):
self.assertRaises(ValueError, f.readall)
if hasattr(f, "readinto"):
self.assertRaises(ValueError, f.readinto, bytearray(1024))
self.assertRaises(ValueError, f.readline)
self.assertRaises(ValueError, f.readlines)
self.assertRaises(ValueError, f.seek, 0)
self.assertRaises(ValueError, f.tell)
self.assertRaises(ValueError, f.truncate)
self.assertRaises(ValueError, f.write,
b"" if "b" in kwargs['mode'] else "")
self.assertRaises(ValueError, f.writelines, [])
self.assertRaises(ValueError, next, f)
def test_blockingioerror(self):
# Various BlockingIOError issues
self.assertRaises(TypeError, self.BlockingIOError)
self.assertRaises(TypeError, self.BlockingIOError, 1)
self.assertRaises(TypeError, self.BlockingIOError, 1, 2, 3, 4)
self.assertRaises(TypeError, self.BlockingIOError, 1, "", None)
b = self.BlockingIOError(1, "")
self.assertEqual(b.characters_written, 0)
class C(unicode):
pass
c = C("")
b = self.BlockingIOError(1, c)
c.b = b
b.c = c
wr = weakref.ref(c)
del c, b
support.gc_collect()
self.assertTrue(wr() is None, wr)
def test_abcs(self):
# Test the visible base classes are ABCs.
self.assertIsInstance(self.IOBase, abc.ABCMeta)
self.assertIsInstance(self.RawIOBase, abc.ABCMeta)
self.assertIsInstance(self.BufferedIOBase, abc.ABCMeta)
self.assertIsInstance(self.TextIOBase, abc.ABCMeta)
def _check_abc_inheritance(self, abcmodule):
with self.open(support.TESTFN, "wb", buffering=0) as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertIsInstance(f, abcmodule.RawIOBase)
self.assertNotIsInstance(f, abcmodule.BufferedIOBase)
self.assertNotIsInstance(f, abcmodule.TextIOBase)
with self.open(support.TESTFN, "wb") as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertNotIsInstance(f, abcmodule.RawIOBase)
self.assertIsInstance(f, abcmodule.BufferedIOBase)
self.assertNotIsInstance(f, abcmodule.TextIOBase)
with self.open(support.TESTFN, "w") as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertNotIsInstance(f, abcmodule.RawIOBase)
self.assertNotIsInstance(f, abcmodule.BufferedIOBase)
self.assertIsInstance(f, abcmodule.TextIOBase)
def test_abc_inheritance(self):
# Test implementations inherit from their respective ABCs
self._check_abc_inheritance(self)
def test_abc_inheritance_official(self):
# Test implementations inherit from the official ABCs of the
# baseline "io" module.
self._check_abc_inheritance(io)
@unittest.skipUnless(fcntl, 'fcntl required for this test')
def test_nonblock_pipe_write_bigbuf(self):
self._test_nonblock_pipe_write(16*1024)
@unittest.skipUnless(fcntl, 'fcntl required for this test')
def test_nonblock_pipe_write_smallbuf(self):
self._test_nonblock_pipe_write(1024)
def _set_non_blocking(self, fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
self.assertNotEqual(flags, -1)
res = fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
self.assertEqual(res, 0)
def _test_nonblock_pipe_write(self, bufsize):
sent = []
received = []
r, w = os.pipe()
self._set_non_blocking(r)
self._set_non_blocking(w)
# To exercise all code paths in the C implementation we need
# to play with buffer sizes. For instance, if we choose a
# buffer size less than or equal to _PIPE_BUF (4096 on Linux)
# then we will never get a partial write of the buffer.
rf = self.open(r, mode='rb', closefd=True, buffering=bufsize)
wf = self.open(w, mode='wb', closefd=True, buffering=bufsize)
with rf, wf:
for N in 9999, 73, 7574:
try:
i = 0
while True:
msg = bytes([i % 26 + 97]) * N
sent.append(msg)
wf.write(msg)
i += 1
except self.BlockingIOError as e:
self.assertEqual(e.args[0], errno.EAGAIN)
sent[-1] = sent[-1][:e.characters_written]
received.append(rf.read())
msg = b'BLOCKED'
wf.write(msg)
sent.append(msg)
while True:
try:
wf.flush()
break
except self.BlockingIOError as e:
self.assertEqual(e.args[0], errno.EAGAIN)
self.assertEqual(e.characters_written, 0)
received.append(rf.read())
received += iter(rf.read, None)
sent, received = b''.join(sent), b''.join(received)
self.assertTrue(sent == received)
self.assertTrue(wf.closed)
self.assertTrue(rf.closed)
class CMiscIOTest(MiscIOTest):
io = io
class PyMiscIOTest(MiscIOTest):
io = pyio
@unittest.skipIf(os.name == 'nt', 'POSIX signals required for this test.')
class SignalsTest(unittest.TestCase):
def setUp(self):
self.oldalrm = signal.signal(signal.SIGALRM, self.alarm_interrupt)
def tearDown(self):
signal.signal(signal.SIGALRM, self.oldalrm)
def alarm_interrupt(self, sig, frame):
1 // 0
@unittest.skipUnless(threading, 'Threading required for this test.')
@unittest.skipIf(sys.platform in ('freebsd5', 'freebsd6', 'freebsd7'),
'issue #12429: skip test on FreeBSD <= 7')
def check_interrupted_write(self, item, bytes, **fdopen_kwargs):
"""Check that a partial write, when it gets interrupted, properly
invokes the signal handler, and bubbles up the exception raised
in the latter."""
read_results = []
def _read():
s = os.read(r, 1)
read_results.append(s)
t = threading.Thread(target=_read)
t.daemon = True
r, w = os.pipe()
try:
wio = self.io.open(w, **fdopen_kwargs)
t.start()
signal.alarm(1)
# Fill the pipe enough that the write will be blocking.
# It will be interrupted by the timer armed above. Since the
# other thread has read one byte, the low-level write will
# return with a successful (partial) result rather than an EINTR.
# The buffered IO layer must check for pending signal
# handlers, which in this case will invoke alarm_interrupt().
try:
with self.assertRaises(ZeroDivisionError):
wio.write(item * (support.PIPE_MAX_SIZE // len(item) + 1))
finally:
t.join()
# We got one byte, get another one and check that it isn't a
# repeat of the first one.
read_results.append(os.read(r, 1))
self.assertEqual(read_results, [bytes[0:1], bytes[1:2]])
finally:
os.close(w)
os.close(r)
# This is deliberate. If we didn't close the file descriptor
# before closing wio, wio would try to flush its internal
# buffer, and block again.
try:
wio.close()
except IOError as e:
if e.errno != errno.EBADF:
raise
def test_interrupted_write_unbuffered(self):
self.check_interrupted_write(b"xy", b"xy", mode="wb", buffering=0)
def test_interrupted_write_buffered(self):
self.check_interrupted_write(b"xy", b"xy", mode="wb")
def test_interrupted_write_text(self):
self.check_interrupted_write("xy", b"xy", mode="w", encoding="ascii")
def check_reentrant_write(self, data, **fdopen_kwargs):
def on_alarm(*args):
# Will be called reentrantly from the same thread
wio.write(data)
1//0
signal.signal(signal.SIGALRM, on_alarm)
r, w = os.pipe()
wio = self.io.open(w, **fdopen_kwargs)
try:
signal.alarm(1)
# Either the reentrant call to wio.write() fails with RuntimeError,
# or the signal handler raises ZeroDivisionError.
with self.assertRaises((ZeroDivisionError, RuntimeError)) as cm:
while 1:
for i in range(100):
wio.write(data)
wio.flush()
# Make sure the buffer doesn't fill up and block further writes
os.read(r, len(data) * 100)
exc = cm.exception
if isinstance(exc, RuntimeError):
self.assertTrue(str(exc).startswith("reentrant call"), str(exc))
finally:
wio.close()
os.close(r)
def test_reentrant_write_buffered(self):
self.check_reentrant_write(b"xy", mode="wb")
def test_reentrant_write_text(self):
self.check_reentrant_write("xy", mode="w", encoding="ascii")
def check_interrupted_read_retry(self, decode, **fdopen_kwargs):
"""Check that a buffered read, when it gets interrupted (either
returning a partial result or EINTR), properly invokes the signal
handler and retries if the latter returned successfully."""
r, w = os.pipe()
fdopen_kwargs["closefd"] = False
def alarm_handler(sig, frame):
os.write(w, b"bar")
signal.signal(signal.SIGALRM, alarm_handler)
try:
rio = self.io.open(r, **fdopen_kwargs)
os.write(w, b"foo")
signal.alarm(1)
# Expected behaviour:
# - first raw read() returns partial b"foo"
# - second raw read() returns EINTR
# - third raw read() returns b"bar"
self.assertEqual(decode(rio.read(6)), "foobar")
finally:
rio.close()
os.close(w)
os.close(r)
def test_interrupterd_read_retry_buffered(self):
self.check_interrupted_read_retry(lambda x: x.decode('latin1'),
mode="rb")
def test_interrupterd_read_retry_text(self):
self.check_interrupted_read_retry(lambda x: x,
mode="r")
@unittest.skipUnless(threading, 'Threading required for this test.')
def check_interrupted_write_retry(self, item, **fdopen_kwargs):
"""Check that a buffered write, when it gets interrupted (either
returning a partial result or EINTR), properly invokes the signal
handler and retries if the latter returned successfully."""
select = support.import_module("select")
# A quantity that exceeds the buffer size of an anonymous pipe's
# write end.
N = support.PIPE_MAX_SIZE
r, w = os.pipe()
fdopen_kwargs["closefd"] = False
# We need a separate thread to read from the pipe and allow the
# write() to finish. This thread is started after the SIGALRM is
# received (forcing a first EINTR in write()).
read_results = []
write_finished = False
error = [None]
def _read():
try:
while not write_finished:
while r in select.select([r], [], [], 1.0)[0]:
s = os.read(r, 1024)
read_results.append(s)
except BaseException as exc:
error[0] = exc
t = threading.Thread(target=_read)
t.daemon = True
def alarm1(sig, frame):
signal.signal(signal.SIGALRM, alarm2)
signal.alarm(1)
def alarm2(sig, frame):
t.start()
signal.signal(signal.SIGALRM, alarm1)
try:
wio = self.io.open(w, **fdopen_kwargs)
signal.alarm(1)
# Expected behaviour:
# - first raw write() is partial (because of the limited pipe buffer
# and the first alarm)
# - second raw write() returns EINTR (because of the second alarm)
# - subsequent write()s are successful (either partial or complete)
self.assertEqual(N, wio.write(item * N))
wio.flush()
write_finished = True
t.join()
self.assertIsNone(error[0])
self.assertEqual(N, sum(len(x) for x in read_results))
finally:
write_finished = True
os.close(w)
os.close(r)
# This is deliberate. If we didn't close the file descriptor
# before closing wio, wio would try to flush its internal
# buffer, and could block (in case of failure).
try:
wio.close()
except IOError as e:
if e.errno != errno.EBADF:
raise
def test_interrupterd_write_retry_buffered(self):
self.check_interrupted_write_retry(b"x", mode="wb")
def test_interrupterd_write_retry_text(self):
self.check_interrupted_write_retry("x", mode="w", encoding="latin1")
class CSignalsTest(SignalsTest):
io = io
class PySignalsTest(SignalsTest):
io = pyio
# Handling reentrancy issues would slow down _pyio even more, so the
# tests are disabled.
test_reentrant_write_buffered = None
test_reentrant_write_text = None
def test_main():
tests = (CIOTest, PyIOTest,
CBufferedReaderTest, PyBufferedReaderTest,
CBufferedWriterTest, PyBufferedWriterTest,
CBufferedRWPairTest, PyBufferedRWPairTest,
CBufferedRandomTest, PyBufferedRandomTest,
StatefulIncrementalDecoderTest,
CIncrementalNewlineDecoderTest, PyIncrementalNewlineDecoderTest,
CTextIOWrapperTest, PyTextIOWrapperTest,
CMiscIOTest, PyMiscIOTest,
CSignalsTest, PySignalsTest,
)
# Put the namespaces of the IO module we are testing and some useful mock
# classes in the __dict__ of each test.
mocks = (MockRawIO, MisbehavedRawIO, MockFileIO, CloseFailureIO,
MockNonBlockWriterIO, MockRawIOWithoutRead)
all_members = io.__all__ + ["IncrementalNewlineDecoder"]
c_io_ns = dict((name, getattr(io, name)) for name in all_members)
py_io_ns = dict((name, getattr(pyio, name)) for name in all_members)
globs = globals()
c_io_ns.update((x.__name__, globs["C" + x.__name__]) for x in mocks)
py_io_ns.update((x.__name__, globs["Py" + x.__name__]) for x in mocks)
# Avoid turning open into a bound method.
py_io_ns["open"] = pyio.OpenWrapper
for test in tests:
if test.__name__.startswith("C"):
for name, obj in c_io_ns.items():
setattr(test, name, obj)
elif test.__name__.startswith("Py"):
for name, obj in py_io_ns.items():
setattr(test, name, obj)
support.run_unittest(*tests)
if __name__ == "__main__":
test_main()
|
svanschalkwyk/datafari
|
windows/python/Lib/test/test_io.py
|
Python
|
apache-2.0
| 120,213 | 0.001406 |
class NipapError(Exception):
""" NIPAP base error class.
"""
error_code = 1000
class NipapInputError(NipapError):
""" Erroneous input.
A general input error.
"""
error_code = 1100
class NipapMissingInputError(NipapInputError):
""" Missing input.
Most input is passed in dicts, this could mean a missing key in a dict.
"""
error_code = 1110
class NipapExtraneousInputError(NipapInputError):
""" Extraneous input.
Most input is passed in dicts, this could mean an unknown key in a dict.
"""
error_code = 1120
class NipapNoSuchOperatorError(NipapInputError):
""" A non existent operator was specified.
"""
error_code = 1130
class NipapValueError(NipapError):
""" Something wrong with a value
For example, trying to send an integer when an IP address is expected.
"""
error_code = 1200
class NipapNonExistentError(NipapError):
""" A non existent object was specified
For example, try to get a prefix from a pool which doesn't exist.
"""
error_code = 1300
class NipapDuplicateError(NipapError):
""" The passed object violates unique constraints
For example, create a VRF with a name of an already existing one.
"""
error_code = 1400
|
SoundGoof/NIPAP
|
nipap/nipap/errors.py
|
Python
|
mit
| 1,301 | 0.000769 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_create_or_update_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_update_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-07-01"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
def build_list_by_resource_group_request(
subscription_id: str,
resource_group_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/snapshots')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_grant_access_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/beginGetAccess')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_revoke_access_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-07-01"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/endGetAccess')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
**kwargs
)
class SnapshotsOperations(object):
"""SnapshotsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name: str,
snapshot_name: str,
snapshot: "_models.Snapshot",
**kwargs: Any
) -> "_models.Snapshot":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(snapshot, 'Snapshot')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Snapshot', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
snapshot_name: str,
snapshot: "_models.Snapshot",
**kwargs: Any
) -> LROPoller["_models.Snapshot"]:
"""Creates or updates a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:param snapshot: Snapshot object supplied in the body of the Put disk operation.
:type snapshot: ~azure.mgmt.compute.v2019_07_01.models.Snapshot
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Snapshot or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_07_01.models.Snapshot]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
snapshot=snapshot,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
def _update_initial(
self,
resource_group_name: str,
snapshot_name: str,
snapshot: "_models.SnapshotUpdate",
**kwargs: Any
) -> "_models.Snapshot":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(snapshot, 'SnapshotUpdate')
request = build_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Snapshot', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def begin_update(
self,
resource_group_name: str,
snapshot_name: str,
snapshot: "_models.SnapshotUpdate",
**kwargs: Any
) -> LROPoller["_models.Snapshot"]:
"""Updates (patches) a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:param snapshot: Snapshot object supplied in the body of the Patch snapshot operation.
:type snapshot: ~azure.mgmt.compute.v2019_07_01.models.SnapshotUpdate
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Snapshot or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_07_01.models.Snapshot]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
snapshot=snapshot,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> "_models.Snapshot":
"""Gets information about a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Snapshot, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2019_07_01.models.Snapshot
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> LROPoller[None]:
"""Deletes a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> Iterable["_models.SnapshotList"]:
"""Lists snapshots under a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SnapshotList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_07_01.models.SnapshotList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SnapshotList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("SnapshotList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots'} # type: ignore
@distributed_trace
def list(
self,
**kwargs: Any
) -> Iterable["_models.SnapshotList"]:
"""Lists snapshots under a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SnapshotList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_07_01.models.SnapshotList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SnapshotList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("SnapshotList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/snapshots'} # type: ignore
def _grant_access_initial(
self,
resource_group_name: str,
snapshot_name: str,
grant_access_data: "_models.GrantAccessData",
**kwargs: Any
) -> Optional["_models.AccessUri"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.AccessUri"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(grant_access_data, 'GrantAccessData')
request = build_grant_access_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
content_type=content_type,
json=_json,
template_url=self._grant_access_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AccessUri', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_grant_access_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/beginGetAccess'} # type: ignore
@distributed_trace
def begin_grant_access(
self,
resource_group_name: str,
snapshot_name: str,
grant_access_data: "_models.GrantAccessData",
**kwargs: Any
) -> LROPoller["_models.AccessUri"]:
"""Grants access to a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:param grant_access_data: Access data object supplied in the body of the get snapshot access
operation.
:type grant_access_data: ~azure.mgmt.compute.v2019_07_01.models.GrantAccessData
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either AccessUri or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_07_01.models.AccessUri]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessUri"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._grant_access_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
grant_access_data=grant_access_data,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('AccessUri', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_grant_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/beginGetAccess'} # type: ignore
def _revoke_access_initial(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_revoke_access_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
template_url=self._revoke_access_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_revoke_access_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/endGetAccess'} # type: ignore
@distributed_trace
def begin_revoke_access(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> LROPoller[None]:
"""Revokes access to a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._revoke_access_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_revoke_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/endGetAccess'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_07_01/operations/_snapshots_operations.py
|
Python
|
mit
| 46,204 | 0.004697 |
# !/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (c) 2012-2015 Christian Schwarz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
def optimized(fn):
"""Decorator that will call the optimized c++ version
of a pycast function if available rather than theo
original pycast function
:param function fn: original pycast function
:return: return the wrapped function
:rtype: function
"""
def _optimized(self, *args, **kwargs):
""" This method calls the pycastC function if
optimization is enabled and the pycastC function
is available.
:param: PyCastObject self: reference to the calling object.
Needs to be passed to the pycastC function,
so that all uts members are available.
:param: list *args: list of arguments the function is called with.
:param: dict **kwargs: dictionary of parameter names and values the function has been called with.
:return result of the function call either from pycast or pycastC module.
:rtype: function
"""
if self.optimizationEnabled:
class_name = self.__class__.__name__
module = self.__module__.replace("pycast", "pycastC")
try:
imported = __import__("%s.%s" % (module, class_name), globals(), locals(), [fn.__name__])
function = getattr(imported, fn.__name__)
return function(self, *args, **kwargs)
except ImportError:
print "[WARNING] Could not enable optimization for %s, %s" % (fn.__name__, self)
return fn(self, *args, **kwargs)
else:
return fn(self, *args, **kwargs)
setattr(_optimized, "__name__", fn.__name__)
setattr(_optimized, "__repr__", fn.__repr__)
setattr(_optimized, "__str__", fn.__str__)
setattr(_optimized, "__doc__", fn.__doc__)
return _optimized
|
T-002/pycast
|
pycast/common/decorators.py
|
Python
|
mit
| 2,987 | 0.002009 |
# -*- coding: utf-8 -*-
"""
pygments.plugin
~~~~~~~~~~~~~~~
Pygments setuptools plugin interface. The methods defined
here also work if setuptools isn't installed but they just
return nothing.
lexer plugins::
[pygments.lexers]
yourlexer = yourmodule:YourLexer
formatter plugins::
[pygments.formatters]
yourformatter = yourformatter:YourFormatter
/.ext = yourformatter:YourFormatter
As you can see, you can define extensions for the formatter
with a leading slash.
syntax plugins::
[pygments.styles]
yourstyle = yourstyle:YourStyle
filter plugin::
[pygments.filter]
yourfilter = yourfilter:YourFilter
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import unicode_literals
try:
import pkg_resources
except ImportError:
pkg_resources = None
LEXER_ENTRY_POINT = 'pygments.lexers'
FORMATTER_ENTRY_POINT = 'pygments.formatters'
STYLE_ENTRY_POINT = 'pygments.styles'
FILTER_ENTRY_POINT = 'pygments.filters'
def find_plugin_lexers():
if pkg_resources is None:
return
for entrypoint in pkg_resources.iter_entry_points(LEXER_ENTRY_POINT):
yield entrypoint.load()
def find_plugin_formatters():
if pkg_resources is None:
return
for entrypoint in pkg_resources.iter_entry_points(FORMATTER_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
def find_plugin_styles():
if pkg_resources is None:
return
for entrypoint in pkg_resources.iter_entry_points(STYLE_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
def find_plugin_filters():
if pkg_resources is None:
return
for entrypoint in pkg_resources.iter_entry_points(FILTER_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
|
davy39/eric
|
ThirdParty/Pygments/pygments/plugin.py
|
Python
|
gpl-3.0
| 1,903 | 0 |
import os
import time
from config import ComponentBase
from transcode import Transcoder
class MediaDiscovery(ComponentBase):
DURATION_FORMAT = '%H:%M:%S'
MAX_DEPTH = 4
def __init__(self, library):
super(MediaDiscovery, self).__init__()
self.library = library
def search(self, paths, depth=0):
"""Search the given paths for media files"""
num_items = 0
sub_paths = []
tcoder = Transcoder()
if len(paths) == 0 or depth >= self.MAX_DEPTH:
return 0
for path in paths:
try:
for entry in os.listdir(path):
abspath = os.path.join(path, entry)
if os.path.isdir(abspath):
sub_paths.append(abspath)
continue
name, ext = os.path.splitext(entry)
ext = ext[1:]
if ext in tcoder.MIME_MAP:
info = tcoder.get_media_info(abspath)
if info is None:
continue
size = os.stat(abspath).st_size
length = self._duration_to_secs(info['duration'])
self.library.insert(name, abspath, length, size,
tcoder.MIME_MAP[ext], info, ignore_duplicates=True)
num_items += 1
except OSError as e:
self.logger.warning(str(e))
self.library.save()
return self.search(sub_paths, depth + 1) + num_items
def _duration_to_secs(self, duration):
"""Converts a duration string into seconds"""
# TODO - Support sub second precision
ts = time.strptime(duration, self.DURATION_FORMAT)
return ts.tm_hour * 3600 + ts.tm_min * 60 + ts.tm_sec
def start_watching(self):
"""Watch the filesystem for any new media files
and add them to the database automatically.
"""
pass
# TODO - Implement file system watching
|
s-knibbs/py-web-player
|
pywebplayer/discover.py
|
Python
|
gpl-2.0
| 2,070 | 0.000483 |
# -*- coding: utf-8 -*-
import time
from datetime import timedelta
class CookieJar:
def __init__(self, pluginname, account=None):
self.cookies = {}
self.plugin = pluginname
self.account = account
def add_cookies(self, clist):
for c in clist:
name = c.split("\t")[5]
self.cookies[name] = c
def get_cookies(self):
return list(self.cookies.values())
def parse_cookie(self, name):
if name in self.cookies:
return self.cookies[name].split("\t")[6]
else:
return None
def get_cookie(self, name):
return self.parse_cookie(name)
def set_cookie(
self,
domain,
name,
value,
path="/",
exp=time.time() + timedelta(hours=744).total_seconds(), #: 31 days retention
):
self.cookies[
name
] = f".{domain}\tTRUE\t{path}\tFALSE\t{exp}\t{name}\t{value}"
def clear(self):
self.cookies = {}
|
vuolter/pyload
|
src/pyload/core/network/cookie_jar.py
|
Python
|
agpl-3.0
| 1,007 | 0.000993 |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
|
caulagi/hubot-py-wtf
|
test/code/bad.py
|
Python
|
mit
| 46 | 0.021739 |
import logging
from dummy.models import Test
from dummy.utils import git
from dummy.storage import StorageProvider
from dummy import config
logger = logging.getLogger( __name__ )
def discover_targets( args ):
targets = []
if args.alltargets:
for t in config.TARGETS.keys():
targets.append( t )
elif len( args.target ) == 0:
targets.append( config.DEFAULT_TARGET )
else:
for target in args.target:
targets.append( target )
return targets
def discover_tests( args ):
tests = []
# try to find the suites and append the testnames
# of the suite
for name in args.suite:
logger.info( "Loading tests from suite `%s`" % name )
# make sure to have a valid test suite name
try:
suite = config.SUITES[ name ]
for descr in suite:
for fname in Test.glob( descr ):
logger.debug( "Adding test `%s` to tests." % fname )
tests.append( Test( fname ))
except KeyError:
logger.error( "We looked, but a test suite with name `%s` was not found." % name )
# queue the named tests
for names in [ Test.glob( name ) for name in args.tests ]:
for name in names:
tests.append( Test( name ))
# expand excludes using globbing
excludes = []
for ex in args.exclude:
excludes += Test.glob( ex )
# unqueue excluded tests
tests = [ t for t in tests if t.name not in excludes ]
# unqueue tests that already have results
# if complement option is given
if args.complement:
targets = discover_targets( args )
commit = args.commit or git.describe()
# assume tested
filtered = []
for test in tests:
tested = True
for t in targets:
if not StorageProvider.exists( commit, t, test ):
tested = False
if not tested:
filtered.append( test )
tests = filtered
return tests
|
ElessarWebb/dummy
|
src/dummy/utils/argparser.py
|
Python
|
mit
| 1,745 | 0.05616 |
# Copyright 2015-2016 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import git
import os
import string
def latest_commit_sha(repo, path):
"""That the last commit sha for a given path in repo"""
log_message = repo.git.log("-1", path)
commit_sha = log_message.split('\n')[0].split(' ')[1]
return commit_sha
def parse_manifest(manifest, repo, repo_name):
# For each release
for release_name, release_data in list(manifest['release_names'].items()):
print('release_name: ', release_name)
# For each os supported
at_least_one_tag = False
for os_name, os_data in list(release_data['os_names'].items()):
print('os_name: ', os_name)
# For each os code name supported
for os_code_name, os_code_data in list(os_data['os_code_names'].items()):
print('os_code_name: ', os_code_name)
if os_code_data['tag_names']:
at_least_one_tag = True
for tag_name, tag_data in os_code_data['tag_names'].items():
print('tag_name: ', tag_name)
tags = []
for alias_pattern in tag_data['aliases']:
alias_template = string.Template(alias_pattern)
alias = alias_template.substitute(
release_name=release_name,
os_name=os_name,
os_code_name=os_code_name)
tags.append(alias)
commit_path = os.path.join(
repo_name, release_name,
os_name, os_code_name, tag_name)
commit_sha = latest_commit_sha(repo, commit_path)
print('tags: ', tags)
tag_data['Tags'] = tags
tag_data['Architectures'] = os_code_data['archs']
tag_data['GitCommit'] = commit_sha
tag_data['Directory'] = commit_path
if not at_least_one_tag:
del manifest['release_names'][release_name]
return manifest
|
osrf/docker_templates
|
docker_templates/library.py
|
Python
|
apache-2.0
| 2,737 | 0.000731 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class ProductTemplate(models.Model):
_inherit = 'product.template'
def _default_visible_expense_policy(self):
visibility = self.user_has_groups('hr_expense.group_hr_expense_user')
return visibility or super(ProductTemplate, self)._default_visible_expense_policy()
@api.depends('can_be_expensed')
def _compute_visible_expense_policy(self):
expense_products = self.filtered(lambda p: p.can_be_expensed)
for product_template in self - expense_products:
product_template.visible_expense_policy = False
super(ProductTemplate, expense_products)._compute_visible_expense_policy()
visibility = self.user_has_groups('hr_expense.group_hr_expense_user')
for product_template in expense_products:
if not product_template.visible_expense_policy:
product_template.visible_expense_policy = visibility
|
ddico/odoo
|
addons/sale_expense/models/product_template.py
|
Python
|
agpl-3.0
| 1,032 | 0.001938 |
#!/usr/bin/python3
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <pink@odahoda.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
import fractions
import functools
import logging
import os.path
from typing import Any, Dict, List, Set, Sequence, Tuple
from PyQt5.QtCore import Qt
from PyQt5 import QtCore
from PyQt5 import QtGui
from PyQt5 import QtWidgets
from noisicaa.core.typing_extra import down_cast
from noisicaa import constants
from noisicaa import audioproc
from noisicaa import core
from noisicaa import music
from noisicaa.ui import ui_base
from noisicaa.ui import clipboard
from noisicaa.ui import pianoroll
from noisicaa.ui import slots
from noisicaa.ui import int_dial
from noisicaa.ui.track_list import tools
from noisicaa.ui.track_list import base_track_editor
from noisicaa.ui.track_list import time_view_mixin
from noisicaa.builtin_nodes.pianoroll import processor_messages
from . import model
from . import clipboard_pb2
logger = logging.getLogger(__name__)
class PianoRollToolMixin(tools.ToolBase): # pylint: disable=abstract-method
track = None # type: PianoRollTrackEditor
def activateSegment(self, segment: 'SegmentEditor') -> None:
pass
def activated(self) -> None:
for segment in self.track.segments:
self.activateSegment(segment)
super().activated()
def __changeRowHeight(
self,
delta: int,
label: QtWidgets.QLabel,
increase_button: QtWidgets.QToolButton,
decrease_button: QtWidgets.QToolButton
) -> None:
tr = self.track
pos = (tr.yOffset() + tr.height() / 2) / tr.gridHeight()
tr.setGridYSize(
max(tr.MIN_GRID_Y_SIZE, min(tr.MAX_GRID_Y_SIZE, tr.gridYSize() + delta)))
tr.setYOffset(
max(0, min(tr.gridHeight() - tr.height(),
int(pos * tr.gridHeight() - tr.height() / 2))))
label.setText("%dpx" % tr.gridYSize())
increase_button.setEnabled(tr.gridYSize() < tr.MAX_GRID_Y_SIZE)
decrease_button.setEnabled(tr.gridYSize() > tr.MIN_GRID_Y_SIZE)
def buildContextMenu(self, menu: QtWidgets.QMenu, evt: QtGui.QContextMenuEvent) -> None:
view_menu = menu.addMenu("View")
increase_row_height_button = QtWidgets.QToolButton()
increase_row_height_button.setObjectName('incr-row-height')
increase_row_height_button.setAutoRaise(True)
increase_row_height_button.setIcon(QtGui.QIcon(
os.path.join(constants.DATA_DIR, 'icons', 'zoom-in.svg')))
increase_row_height_button.setEnabled(self.track.gridYSize() < self.track.MAX_GRID_Y_SIZE)
decrease_row_height_button = QtWidgets.QToolButton()
decrease_row_height_button.setObjectName('decr-row-height')
decrease_row_height_button.setAutoRaise(True)
decrease_row_height_button.setIcon(QtGui.QIcon(
os.path.join(constants.DATA_DIR, 'icons', 'zoom-out.svg')))
decrease_row_height_button.setEnabled(self.track.gridYSize() > self.track.MIN_GRID_Y_SIZE)
row_height_label = QtWidgets.QLabel("%dpx" % self.track.gridYSize())
increase_row_height_button.clicked.connect(functools.partial(
self.__changeRowHeight,
1, row_height_label, increase_row_height_button, decrease_row_height_button))
decrease_row_height_button.clicked.connect(functools.partial(
self.__changeRowHeight,
-1, row_height_label, increase_row_height_button, decrease_row_height_button))
row_height_widget = QtWidgets.QWidget()
l = QtWidgets.QHBoxLayout()
l.setContentsMargins(10, 2, 10, 2)
l.setSpacing(4)
l.addWidget(QtWidgets.QLabel("Row height:"))
l.addWidget(decrease_row_height_button)
l.addWidget(row_height_label)
l.addWidget(increase_row_height_button)
l.addStretch(1)
row_height_widget.setLayout(l)
row_height_action = QtWidgets.QWidgetAction(self)
row_height_action.setDefaultWidget(row_height_widget)
view_menu.addAction(row_height_action)
current_channel_menu = menu.addMenu("Current MIDI Channel")
for ch in range(16):
current_channel_menu.addAction(
self.track.set_current_channel_actions[ch])
def contextMenuEvent(self, evt: QtGui.QContextMenuEvent) -> None:
menu = QtWidgets.QMenu(self.track)
menu.setObjectName('context-menu')
self.buildContextMenu(menu, evt)
menu.popup(evt.globalPos())
evt.accept()
class ArrangeSegmentsTool(PianoRollToolMixin, tools.ToolBase):
def __init__(self, **kwargs: Any) -> None:
super().__init__(
type=tools.ToolType.PIANOROLL_ARRANGE_SEGMENTS,
group=tools.ToolGroup.EDIT,
**kwargs)
self.__action = None # type: str
self.__resize_segment = None # type: SegmentEditor
self.__drag_segments = None # type: List[SegmentEditor]
self.__handle_offset = None # type: int
self.__ref_time = None # type: audioproc.MusicalTime
self.__time = None # type: audioproc.MusicalTime
self.__select_all_action = QtWidgets.QAction(self)
self.__select_all_action.setObjectName('select-all')
self.__select_all_action.setText("Select All")
self.__select_all_action.setShortcut('ctrl+a')
self.__select_all_action.setShortcutContext(Qt.WidgetWithChildrenShortcut)
self.__select_all_action.triggered.connect(self.__selectAll)
self.__clear_selection_action = QtWidgets.QAction(self)
self.__clear_selection_action.setObjectName('clear-selection')
self.__clear_selection_action.setText("Clear Selection")
self.__clear_selection_action.setShortcut('ctrl+shift+a')
self.__clear_selection_action.setShortcutContext(Qt.WidgetWithChildrenShortcut)
self.__clear_selection_action.triggered.connect(self.__clearSelection)
self.__add_segment_action = QtWidgets.QAction(self)
self.__add_segment_action.setObjectName('add-segment')
self.__add_segment_action.setText("Add Segment")
self.__add_segment_action.setIcon(QtGui.QIcon(
os.path.join(constants.DATA_DIR, 'icons', 'list-add.svg')))
self.__add_segment_action.setShortcut('ins')
self.__add_segment_action.setShortcutContext(Qt.WidgetWithChildrenShortcut)
self.__add_segment_action.triggered.connect(self.__createSegment)
self.__delete_segment_action = QtWidgets.QAction(self)
self.__delete_segment_action.setObjectName('delete-segment')
self.__delete_segment_action.setText("Delete Segment(s)")
self.__delete_segment_action.setIcon(QtGui.QIcon(
os.path.join(constants.DATA_DIR, 'icons', 'list-remove.svg')))
self.__delete_segment_action.setShortcut('del')
self.__delete_segment_action.setShortcutContext(Qt.WidgetWithChildrenShortcut)
self.__delete_segment_action.triggered.connect(self.__deleteSegments)
def iconName(self) -> str:
return 'pianoroll-arrange-segments'
def keySequence(self) -> QtGui.QKeySequence:
return QtGui.QKeySequence('a')
def activateSegment(self, segment: 'SegmentEditor') -> None:
segment.setAttribute(Qt.WA_TransparentForMouseEvents, True)
segment.setReadOnly(True)
def activated(self) -> None:
self.track.addAction(self.__select_all_action)
self.track.addAction(self.__clear_selection_action)
self.track.addAction(self.__add_segment_action)
self.track.addAction(self.__delete_segment_action)
super().activated()
def deactivated(self) -> None:
self.track.removeAction(self.__select_all_action)
self.track.removeAction(self.__clear_selection_action)
self.track.removeAction(self.__add_segment_action)
self.track.removeAction(self.__delete_segment_action)
self.track.setInsertTime(audioproc.MusicalTime(-1, 1))
self.track.clearSelection()
self.track.unsetCursor()
super().deactivated()
def __selectAll(self) -> None:
for segment in self.track.segments:
self.track.addToSelection(segment)
def __clearSelection(self) -> None:
self.track.clearSelection()
def __createSegment(self) -> None:
time = self.track.insertTime()
if time < audioproc.MusicalTime(0, 1):
time = audioproc.MusicalTime(0, 1)
tr = self.track
with tr.project.apply_mutations('%s: Add segment' % tr.track.name):
tr.track.create_segment(
time, audioproc.MusicalDuration(16, 4))
def __deleteSegments(self) -> None:
segments = self.track.selection()
tr = self.track
with tr.project.apply_mutations('%s: Remove segment(s)' % tr.track.name):
for segment in segments:
tr.track.remove_segment(segment.segmentRef())
def __splitSegment(self, segment: 'SegmentEditor', split_time: audioproc.MusicalTime) -> None:
assert segment.startTime() < split_time < segment.endTime()
tr = self.track
with tr.project.apply_mutations('%s: Split segment' % tr.track.name):
tr.track.split_segment(segment.segmentRef(), split_time)
def buildContextMenu(self, menu: QtWidgets.QMenu, evt: QtGui.QContextMenuEvent) -> None:
super().buildContextMenu(menu, evt)
menu.addSeparator()
menu.addAction(self.app.clipboard.cut_action)
menu.addAction(self.app.clipboard.copy_action)
menu.addAction(self.app.clipboard.paste_action)
menu.addAction(self.app.clipboard.paste_as_link_action)
menu.addSeparator()
menu.addAction(self.__select_all_action)
menu.addAction(self.__clear_selection_action)
menu.addSeparator()
menu.addAction(self.__add_segment_action)
menu.addAction(self.__delete_segment_action)
playback_position = self.track.playbackPosition()
split_segment = self.track.segmentAtTime(playback_position)
if (split_segment is not None
and not split_segment.startTime() < playback_position < split_segment.endTime()):
split_segment = None
split_segment_action = QtWidgets.QAction(menu)
split_segment_action.setObjectName('split-segment')
split_segment_action.setText("Split Segment")
split_segment_action.setIcon(QtGui.QIcon(
os.path.join(constants.DATA_DIR, 'icons', 'pianoroll-split-segment.svg')))
if split_segment is not None:
split_segment_action.triggered.connect(
functools.partial(self.__splitSegment, split_segment, playback_position))
else:
split_segment_action.setEnabled(False)
menu.addAction(split_segment_action)
def contextMenuEvent(self, evt: QtGui.QContextMenuEvent) -> None:
if self.__action is not None:
evt.accept()
return
if self.track.insertTime() < audioproc.MusicalTime(0, 1):
self.track.setInsertTime(self.track.xToTime(evt.pos().x()))
segment = self.track.segmentAt(evt.pos().x())
if segment is not None and not segment.selected():
self.track.clearSelection()
self.track.addToSelection(segment)
super().contextMenuEvent(evt)
def mousePressEvent(self, evt: QtGui.QMouseEvent) -> None:
if evt.button() == Qt.LeftButton:
click_segment = self.track.segmentAt(evt.pos().x())
if click_segment is not None:
if evt.modifiers() == Qt.NoModifier and not click_segment.selected():
self.track.clearSelection()
self.track.addToSelection(click_segment)
elif evt.modifiers() == Qt.ControlModifier:
if click_segment.selected():
self.track.removeFromSelection(click_segment)
else:
self.track.addToSelection(click_segment)
elif evt.modifiers() == Qt.ShiftModifier and self.track.lastSelected() is not None:
start_time = min(
click_segment.startTime(), self.track.lastSelected().startTime())
end_time = max(click_segment.endTime(), self.track.lastSelected().endTime())
for segment in self.track.segments:
if segment.startTime() >= start_time and segment.endTime() <= end_time:
self.track.addToSelection(segment)
elif evt.modifiers() == Qt.NoModifier:
self.track.clearSelection()
if evt.button() == Qt.LeftButton and evt.modifiers() == Qt.NoModifier:
for seditor in reversed(self.track.segments):
x1 = self.track.timeToX(seditor.startTime())
x2 = self.track.timeToX(seditor.endTime())
if abs(x2 - evt.pos().x()) < 4:
self.track.setInsertTime(audioproc.MusicalTime(-1, 1))
self.track.clearSelection()
self.track.addToSelection(seditor)
self.__action = 'move-end'
self.__resize_segment = seditor
self.__handle_offset = evt.pos().x() - x2
self.__time = seditor.endTime()
evt.accept()
return
if abs(x1 - evt.pos().x()) < 4:
self.track.setInsertTime(audioproc.MusicalTime(-1, 1))
self.track.clearSelection()
self.track.addToSelection(seditor)
self.__action = 'move-start'
self.__resize_segment = seditor
self.__handle_offset = evt.pos().x() - x1
self.__time = seditor.startTime()
evt.accept()
return
if x1 <= evt.pos().x() < x2:
self.track.setInsertTime(audioproc.MusicalTime(-1, 1))
self.__action = 'drag'
if seditor.selected():
self.__drag_segments = self.track.selection()
else:
self.__drag_segments = [seditor]
self.__ref_time = min(s.startTime() for s in self.__drag_segments)
self.__handle_offset = evt.pos().x() - self.track.timeToX(self.__ref_time)
self.__time = self.__ref_time
evt.accept()
return
self.track.setInsertTime(self.track.xToTime(evt.pos().x()))
evt.accept()
return
super().mousePressEvent(evt)
def mouseMoveEvent(self, evt: QtGui.QMouseEvent) -> None:
if self.__action == 'drag':
self.__time = self.track.xToTime(evt.pos().x() - self.__handle_offset)
if self.track.shouldSnap(evt):
self.__time = self.track.snapTime(self.__time)
self.__time = max(audioproc.MusicalTime(0, 1), self.__time)
for segment in self.__drag_segments:
segment.setShowPlaybackPosition(False)
time = self.__time + (segment.startTime() - self.__ref_time)
self.track.repositionSegment(
segment, time, time + segment.duration())
evt.accept()
return
if self.__action == 'move-end':
self.__resize_segment.setShowPlaybackPosition(False)
self.__time = self.track.xToTime(evt.pos().x() - self.__handle_offset)
if self.track.shouldSnap(evt):
self.__time = self.track.snapTime(self.__time)
self.__time = max(
self.__resize_segment.startTime() + audioproc.MusicalDuration(1, 16),
self.__time)
self.track.repositionSegment(
self.__resize_segment, self.__resize_segment.startTime(), self.__time)
self.__resize_segment.setDuration(self.__time - self.__resize_segment.startTime())
evt.accept()
return
if self.__action == 'move-start':
self.__resize_segment.setShowPlaybackPosition(False)
self.__time = self.track.xToTime(evt.pos().x() - self.__handle_offset)
if self.track.shouldSnap(evt):
self.__time = self.track.snapTime(self.__time)
self.__time = min(
self.__resize_segment.endTime() - audioproc.MusicalDuration(1, 16),
self.__time)
self.track.repositionSegment(
self.__resize_segment, self.__time, self.__resize_segment.endTime())
self.__resize_segment.setDuration(self.__resize_segment.endTime() - self.__time)
evt.accept()
return
for seditor in reversed(self.track.segments):
x1 = self.track.timeToX(seditor.startTime())
x2 = self.track.timeToX(seditor.endTime())
if abs(x2 - evt.pos().x()) < 4:
self.track.setCursor(Qt.SizeHorCursor)
break
elif abs(x1 - evt.pos().x()) < 4:
self.track.setCursor(Qt.SizeHorCursor)
break
elif x1 <= evt.pos().x() < x2:
self.track.setCursor(Qt.DragMoveCursor)
break
else:
self.track.unsetCursor()
super().mouseMoveEvent(evt)
def mouseReleaseEvent(self, evt: QtGui.QMouseEvent) -> None:
if evt.button() == Qt.LeftButton and self.__action == 'drag':
with self.project.apply_mutations('%s: Move segment' % self.track.track.name):
for segment in self.__drag_segments:
segment.setShowPlaybackPosition(True)
segment.segmentRef().time += self.__time - self.__ref_time
self.track.updatePlaybackPosition()
self.__drag_segments.clear()
self.__action = None
evt.accept()
return
if evt.button() == Qt.LeftButton and self.__action == 'move-start':
self.__resize_segment.setShowPlaybackPosition(True)
with self.project.apply_mutations('%s: Resize segment' % self.track.track.name):
delta_time = self.__time - self.__resize_segment.startTime()
self.__resize_segment.segmentRef().time = self.__time
self.__resize_segment.segment().duration -= delta_time
self.track.updatePlaybackPosition()
self.__resize_segment = None
self.__action = None
evt.accept()
return
if evt.button() == Qt.LeftButton and self.__action == 'move-end':
self.__resize_segment.setShowPlaybackPosition(True)
with self.project.apply_mutations('%s: Resize segment' % self.track.track.name):
delta_time = self.__time - self.__resize_segment.endTime()
self.__resize_segment.segment().duration += delta_time
self.track.updatePlaybackPosition()
self.__resize_segment = None
self.__action = None
evt.accept()
return
if evt.button() == Qt.RightButton and self.__action == 'drag':
for segment in self.__drag_segments:
segment.setShowPlaybackPosition(True)
self.track.repositionSegment(segment, segment.startTime(), segment.endTime())
self.track.updatePlaybackPosition()
self.__resize_segment = None
self.__action = None
evt.accept()
return
if evt.button() == Qt.RightButton and self.__action in ('move-start', 'move-end'):
self.__resize_segment.setShowPlaybackPosition(True)
self.track.repositionSegment(
self.__resize_segment,
self.__resize_segment.startTime(), self.__resize_segment.endTime())
self.__resize_segment.setDuration(self.__resize_segment.duration())
self.track.updatePlaybackPosition()
self.__resize_segment = None
self.__action = None
evt.accept()
return
super().mouseReleaseEvent(evt)
def mouseDoubleClickEvent(self, evt: QtGui.QMouseEvent) -> None:
if evt.button() == Qt.LeftButton and evt.modifiers() == Qt.NoModifier:
seditor = self.track.segmentAt(evt.pos().x())
if seditor is not None:
self.track.setCurrentToolType(tools.ToolType.PIANOROLL_EDIT_EVENTS)
seditor.activate()
evt.accept()
return
super().mouseDoubleClickEvent(evt)
class EditEventsTool(PianoRollToolMixin, tools.ToolBase):
def __init__(self, **kwargs: Any) -> None:
super().__init__(
type=tools.ToolType.PIANOROLL_EDIT_EVENTS,
group=tools.ToolGroup.EDIT,
**kwargs)
def iconName(self) -> str:
return 'pianoroll-edit-events'
def keySequence(self) -> QtGui.QKeySequence:
return QtGui.QKeySequence('e')
def activateSegment(self, segment: 'SegmentEditor') -> None:
segment.setAttribute(Qt.WA_TransparentForMouseEvents, False)
segment.setReadOnly(False)
segment.setEditMode(pianoroll.EditMode.AddInterval)
def activated(self) -> None:
self.track.setShowVelocity(True)
super().activated()
def deactivated(self) -> None:
self.track.setShowVelocity(False)
super().deactivated()
class SelectEventsTool(PianoRollToolMixin, tools.ToolBase):
def __init__(self, **kwargs: Any) -> None:
super().__init__(
type=tools.ToolType.PIANOROLL_SELECT_EVENTS,
group=tools.ToolGroup.EDIT,
**kwargs)
def iconName(self) -> str:
return 'pianoroll-select-events'
def keySequence(self) -> QtGui.QKeySequence:
return QtGui.QKeySequence('s')
def activateSegment(self, segment: 'SegmentEditor') -> None:
segment.setAttribute(Qt.WA_TransparentForMouseEvents, False)
segment.setReadOnly(False)
segment.setEditMode(pianoroll.EditMode.SelectRect)
class EditVelocityTool(PianoRollToolMixin, tools.ToolBase):
def __init__(self, **kwargs: Any) -> None:
super().__init__(
type=tools.ToolType.PIANOROLL_EDIT_VELOCITY,
group=tools.ToolGroup.EDIT,
**kwargs)
def iconName(self) -> str:
return 'pianoroll-edit-velocity'
def keySequence(self) -> QtGui.QKeySequence:
return QtGui.QKeySequence('v')
def activateSegment(self, segment: 'SegmentEditor') -> None:
segment.setAttribute(Qt.WA_TransparentForMouseEvents, False)
segment.setReadOnly(False)
segment.setEditMode(pianoroll.EditMode.EditVelocity)
class SegmentEditor(
slots.SlotContainer, core.AutoCleanupMixin, ui_base.ProjectMixin, QtWidgets.QWidget):
playNotes = QtCore.pyqtSignal(pianoroll.PlayNotes)
xOffset, setXOffset, xOffsetChanged = slots.slot(int, 'xOffset', default=0)
yOffset, setYOffset, yOffsetChanged = slots.slot(int, 'yOffset', default=0)
scaleX, setScaleX, scaleXChanged = slots.slot(
fractions.Fraction, 'scaleX', default=fractions.Fraction(4*80))
gridYSize, setGridYSize, gridYSizeChanged = slots.slot(int, 'gridYSize', default=15)
readOnly, setReadOnly, readOnlyChanged = slots.slot(bool, 'readOnly', default=True)
editMode, setEditMode, editModeChanged = slots.slot(
pianoroll.EditMode, 'editMode', default=pianoroll.EditMode.AddInterval)
currentChannel, setCurrentChannel, currentChannelChanged = slots.slot(
int, 'currentChannel', default=0)
playbackPosition, setPlaybackPosition, playbackPositionChanged = slots.slot(
audioproc.MusicalTime, 'playbackPosition', default=audioproc.MusicalTime(-1, 1))
insertVelocity, setInsertVelocity, insertVelocityChanged = slots.slot(
int, 'insertVelocity', default=100)
selected, setSelected, selectedChanged = slots.slot(bool, 'selected', default=False)
showPlaybackPosition, setShowPlaybackPosition, showPlaybackPositionChanged = slots.slot(
bool, 'showPlaybackPosition', default=True)
def __init__(
self, *,
track_editor: 'PianoRollTrackEditor',
segment_ref: model.PianoRollSegmentRef,
**kwargs: Any
) -> None:
super().__init__(parent=track_editor, **kwargs)
self.setObjectName('segment-editor[%016x]' % segment_ref.id)
self.__listeners = core.ListenerList()
self.add_cleanup_function(self.__listeners.cleanup)
self.__track_editor = track_editor
self.__segment_ref = segment_ref
self.__segment = segment_ref.segment
self.__listeners.add(self.__segment_ref.time_changed.add(self.__timeChanged))
self.__listeners.add(self.__segment.duration_changed.add(self.__durationChanged))
self.__grid = pianoroll.PianoRollGrid(parent=self)
self.__grid.setObjectName('grid')
self.__grid.move(0, 0)
self.__grid.setDuration(self.__segment.duration)
self.__grid.setXOffset(self.xOffset())
self.xOffsetChanged.connect(self.__grid.setXOffset)
self.__grid.setYOffset(self.yOffset())
self.yOffsetChanged.connect(self.__grid.setYOffset)
self.__grid.setGridXSize(self.scaleX())
self.scaleXChanged.connect(self.__grid.setGridXSize)
self.__grid.setGridYSize(self.gridYSize())
self.gridYSizeChanged.connect(self.__grid.setGridYSize)
self.__grid.setReadOnly(self.readOnly())
self.readOnlyChanged.connect(self.__grid.setReadOnly)
self.__grid.setEditMode(self.editMode())
self.editModeChanged.connect(self.__grid.setEditMode)
self.__grid.setCurrentChannel(self.currentChannel())
self.currentChannelChanged.connect(self.__grid.setCurrentChannel)
self.__grid.setInsertVelocity(self.insertVelocity())
self.insertVelocityChanged.connect(self.__grid.setInsertVelocity)
self.__grid.hoverPitchChanged.connect(self.__track_editor.setHoverPitch)
self.__grid.playNotes.connect(self.playNotes.emit)
self.__listeners.add(self.__grid.mutations.add(self.__gridMutations))
self.selectedChanged.connect(self.__selectedChanged)
self.playbackPositionChanged.connect(lambda _: self.__updatePlaybackPosition())
self.showPlaybackPositionChanged.connect(lambda _: self.__updatePlaybackPosition())
self.__ignore_model_mutations = False
self.__obj_to_grid_map = {} # type: Dict[int, int]
self.__grid_to_obj_map = {} # type: Dict[int, int]
for event in self.__segment.events:
event_id = self.__grid.addEvent(event.midi_event)
self.__grid_to_obj_map[event_id] = event
self.__obj_to_grid_map[event.id] = event_id
self.__listeners.add(self.__segment.events_changed.add(self.__eventsChanged))
def __selectedChanged(self, selected: bool) -> None:
if selected:
self.__grid.setOverlayColor(QtGui.QColor(150, 150, 255, 150))
else:
self.__grid.setOverlayColor(QtGui.QColor(0, 0, 0, 0))
def __updatePlaybackPosition(self) -> None:
if self.showPlaybackPosition():
self.__grid.setPlaybackPosition(self.playbackPosition())
else:
self.__grid.setPlaybackPosition(audioproc.MusicalTime(-1, 1))
def __timeChanged(self, change: music.PropertyValueChange[audioproc.MusicalTime]) -> None:
self.__track_editor.repositionSegment(
self, change.new_value, change.new_value + self.__segment.duration)
def __durationChanged(
self, change: music.PropertyValueChange[audioproc.MusicalDuration]) -> None:
self.__track_editor.repositionSegment(
self, self.__segment_ref.time, self.__segment_ref.time + change.new_value)
self.__grid.setDuration(change.new_value)
def __eventsChanged(self, change: music.PropertyListChange[model.PianoRollEvent]) -> None:
if self.__ignore_model_mutations:
return
if isinstance(change, music.PropertyListInsert):
event = change.new_value
grid_id = self.__grid.addEvent(event.midi_event)
self.__grid_to_obj_map[grid_id] = event
self.__obj_to_grid_map[event.id] = grid_id
elif isinstance(change, music.PropertyListDelete):
event = change.old_value
grid_id = self.__obj_to_grid_map[event.id]
self.__grid.removeEvent(grid_id)
del self.__grid_to_obj_map[grid_id]
del self.__obj_to_grid_map[event.id]
else:
raise ValueError(type(change))
def __gridMutations(self, mutations: Sequence[pianoroll.Mutation]) -> None:
self.__ignore_model_mutations = True
try:
with self.project.apply_mutations(
'%s: Edit MIDI events' % self.__track_editor.track.name):
for mutation in mutations:
if isinstance(mutation, pianoroll.AddEvent):
event = self.__segment.add_event(mutation.event)
self.__grid_to_obj_map[mutation.event_id] = event
self.__obj_to_grid_map[event.id] = mutation.event_id
elif isinstance(mutation, pianoroll.RemoveEvent):
event = self.__grid_to_obj_map[mutation.event_id]
self.__segment.remove_event(event)
del self.__grid_to_obj_map[mutation.event_id]
del self.__obj_to_grid_map[event.id]
else:
raise ValueError(type(mutation))
finally:
self.__ignore_model_mutations = False
def segmentRef(self) -> model.PianoRollSegmentRef:
return self.__segment_ref
def segment(self) -> model.PianoRollSegment:
return self.__segment
def startTime(self) -> audioproc.MusicalTime:
return self.__segment_ref.time
def endTime(self) -> audioproc.MusicalTime:
return self.__segment_ref.time + self.__segment.duration
def duration(self) -> audioproc.MusicalDuration:
return self.__segment.duration
def setDuration(self, duration: audioproc.MusicalDuration) -> None:
self.__grid.setDuration(duration)
def activate(self) -> None:
self.__grid.setFocus()
def resizeEvent(self, evt: QtGui.QResizeEvent) -> None:
self.__grid.resize(self.width(), self.height())
super().resizeEvent(evt)
class InsertCursor(QtWidgets.QWidget):
def paintEvent(self, evt: QtGui.QPaintEvent) -> None:
painter = QtGui.QPainter(self)
painter.fillRect(0, 0, 1, self.height(), QtGui.QColor(160, 160, 255))
painter.fillRect(1, 0, 1, self.height(), QtGui.QColor(0, 0, 255))
painter.fillRect(2, 0, 1, self.height(), QtGui.QColor(160, 160, 255))
class PianoRollTrackEditor(
clipboard.CopyableMixin,
time_view_mixin.ContinuousTimeMixin,
base_track_editor.BaseTrackEditor):
yOffset, setYOffset, yOffsetChanged = slots.slot(int, 'yOffset', default=0)
gridYSize, setGridYSize, gridYSizeChanged = slots.slot(int, 'gridYSize', default=15)
effectiveGridYSize, setEffectiveGridYSize, effectiveGridYSizeChanged = slots.slot(
int, 'effectiveGridYSize', default=15)
hoverPitch, setHoverPitch, hoverPitchChanged = slots.slot(int, 'hoverPitch', default=-1)
currentChannel, setCurrentChannel, currentChannelChanged = slots.slot(
int, 'currentChannel', default=0)
showKeys, setShowKeys, showKeysChanged = slots.slot(
bool, 'showVelocity', default=False)
showVelocity, setShowVelocity, showVelocityChanged = slots.slot(
bool, 'showVelocity', default=False)
insertTime, setInsertTime, insertTimeChanged = slots.slot(
audioproc.MusicalTime, 'insertTime', default=audioproc.MusicalTime(-1, 1))
MIN_GRID_Y_SIZE = 2
MAX_GRID_Y_SIZE = 64
def __init__(self, **kwargs: Any) -> None:
self.segments = [] # type: List[SegmentEditor]
self.__segment_map = {} # type: Dict[int, SegmentEditor]
self.__selection = set() # type: Set[int]
self.__last_selected = None # type: SegmentEditor
super().__init__(**kwargs)
self.__session_prefix = 'pianoroll-track:%016x:' % self.track.id
self.__first_show = True
self.__listeners = core.ListenerList()
self.add_cleanup_function(self.__listeners.cleanup)
self.__active_notes = set() # type: Set[Tuple[int, int]]
self.__hover_pitch = -1
self.__keys = pianoroll.PianoKeys(parent=self)
self.__keys.setVisible(self.showKeys())
self.showKeysChanged.connect(self.__keys.setVisible)
self.__keys.setPlayable(True)
self.__keys.setPlaybackChannel(self.currentChannel())
self.currentChannelChanged.connect(self.__keys.setPlaybackChannel)
self.__keys.playNotes.connect(self.playNotes)
self.__keys.setScrollable(True)
self.__keys.setGridYSize(self.effectiveGridYSize())
self.effectiveGridYSizeChanged.connect(self.__keys.setGridYSize)
self.__keys.setYOffset(self.yOffset())
self.__keys.yOffsetChanged.connect(self.setYOffset)
self.yOffsetChanged.connect(self.__keys.setYOffset)
self.hoverPitchChanged.connect(self.__hoverPitchChanged)
self.__y_scrollbar = QtWidgets.QScrollBar(orientation=Qt.Vertical, parent=self)
self.__y_scrollbar.setFixedWidth(16)
self.__y_scrollbar.setRange(0, 500)
self.__y_scrollbar.setSingleStep(20)
self.__y_scrollbar.setPageStep(self.height())
self.__y_scrollbar.setValue(self.yOffset())
self.yOffsetChanged.connect(self.__y_scrollbar.setValue)
self.__y_scrollbar.valueChanged.connect(self.setYOffset)
self.__velocity = int_dial.IntDial(self)
self.__velocity.setFixedSize(48, 48)
self.__velocity.setValue(
self.get_session_value(self.__session_prefix + 'new-interval-velocity', 100))
self.__velocity.valueChanged.connect(functools.partial(
self.set_session_value, self.__session_prefix + 'new-interval-velocity'))
self.__velocity.setRange(1, 127)
label = QtWidgets.QLabel("Velocity")
font = QtGui.QFont(label.font())
font.setPointSizeF(font.pointSizeF() / 1.2)
label.setFont(font)
l = QtWidgets.QVBoxLayout()
l.setContentsMargins(2, 0, 0, 0)
l.setSpacing(0)
l.addWidget(self.__velocity, 0, Qt.AlignHCenter)
l.addWidget(label, 0, Qt.AlignHCenter)
self.__velocity_group = QtWidgets.QWidget(self)
self.__velocity_group.setLayout(l)
self.__velocity_group.setVisible(self.showVelocity())
self.showVelocityChanged.connect(self.__velocity_group.setVisible)
self.__insert_cursor = InsertCursor(self)
self.updateInsertTime()
for segment_ref in self.track.segments:
self.__addSegment(len(self.segments), segment_ref)
self.__listeners.add(self.track.segments_changed.add(self.__segmentsChanged))
self.setAutoScroll(False)
self.setDefaultHeight(240)
self.isCurrentChanged.connect(self.__isCurrentChanged)
self.isCurrentChanged.connect(lambda _: self.__updateShowKeys())
self.xOffsetChanged.connect(lambda _: self.__repositionSegments())
self.xOffsetChanged.connect(lambda _: self.update())
self.scaleXChanged.connect(lambda _: self.__repositionSegments())
self.effectiveGridYSizeChanged.connect(lambda _: self.__updateYScrollbar())
self.effectiveGridYSizeChanged.connect(lambda _: self.__updateShowKeys())
self.playbackPositionChanged.connect(lambda _: self.updatePlaybackPosition())
self.insertTimeChanged.connect(lambda _: self.updateInsertTime())
self.xOffsetChanged.connect(lambda _: self.updateInsertTime())
self.scaleXChanged.connect(lambda _: self.updateInsertTime())
self.gridYSizeChanged.connect(lambda _: self.__updateEffectiveGridSize())
self.zoomChanged.connect(lambda _: self.__updateEffectiveGridSize())
self.__updateEffectiveGridSize()
self.__updateShowKeys()
self.setCurrentChannel(
self.get_session_value(self.__session_prefix + 'current-channel', 0))
self.currentChannelChanged.connect(
functools.partial(self.set_session_value, self.__session_prefix + 'current-channel'))
self.setFocusPolicy(Qt.StrongFocus)
self.__current_channel_action_group = QtWidgets.QActionGroup(self)
self.__current_channel_action_group.setExclusive(True)
self.__current_channel_action_group.triggered.connect(
lambda action: self.setCurrentChannel(action.data()))
self.set_current_channel_actions = [] # type: List[QtWidgets.QAction]
for ch in range(16):
action = QtWidgets.QAction(self)
action.setData(ch)
action.setCheckable(True)
action.setText("Channel %d" % (ch + 1))
pixmap = QtGui.QPixmap(16, 16)
pixmap.fill(pianoroll.PianoRollGrid.channel_base_colors[ch])
icon = QtGui.QIcon(pixmap)
action.setIcon(icon)
action.setShortcut(QtGui.QKeySequence(
['1', '2', '3', '4', '5', '6', '7', '8', '9', '0',
'shift+1', 'shift+2', 'shift+3', 'shift+4', 'shift+5', 'shift+6',
][ch]))
action.setShortcutContext(Qt.WidgetWithChildrenShortcut)
action.setShortcutVisibleInContextMenu(True)
self.__current_channel_action_group.addAction(action)
self.set_current_channel_actions.append(action)
self.addAction(action)
self.set_current_channel_actions[self.currentChannel()].setChecked(True)
self.currentChannelChanged.connect(
lambda ch: self.set_current_channel_actions[ch].setChecked(True))
selected_ids = {
int(segment_id)
for segment_id in self.get_session_value(
self.__session_prefix + 'selected-segments', '').split(',')
if segment_id
}
for segment in self.segments:
if segment.segmentRef().id in selected_ids:
segment.setSelected(True)
self.__selection.add(segment.segmentRef().id)
self.setCanCopy(bool(self.__selection))
self.setCanCut(bool(self.__selection))
@property
def track(self) -> model.PianoRollTrack:
return down_cast(model.PianoRollTrack, super().track)
def createToolBox(self) -> tools.ToolBox:
toolbox = tools.ToolBox(track=self, context=self.context)
toolbox.addTool(ArrangeSegmentsTool)
toolbox.addTool(EditEventsTool)
toolbox.addTool(SelectEventsTool)
toolbox.addTool(EditVelocityTool)
return toolbox
def __addSegment(self, insert_index: int, segment_ref: model.PianoRollSegmentRef) -> None:
seditor = SegmentEditor(track_editor=self, segment_ref=segment_ref, context=self.context)
self.__segment_map[segment_ref.id] = seditor
self.segments.insert(insert_index, seditor)
seditor.setEnabled(self.isCurrent())
seditor.setScaleX(self.scaleX())
self.scaleXChanged.connect(seditor.setScaleX)
seditor.setYOffset(self.yOffset())
self.yOffsetChanged.connect(seditor.setYOffset)
seditor.setGridYSize(self.effectiveGridYSize())
self.effectiveGridYSizeChanged.connect(seditor.setGridYSize)
seditor.setCurrentChannel(self.currentChannel())
self.currentChannelChanged.connect(seditor.setCurrentChannel)
seditor.setInsertVelocity(self.__velocity.value())
self.__velocity.valueChanged.connect(seditor.setInsertVelocity)
seditor.playNotes.connect(self.playNotes)
self.repositionSegment(seditor, seditor.startTime(), seditor.endTime())
seditor.setSelected(segment_ref.id in self.__selection)
down_cast(PianoRollToolMixin, self.currentTool()).activateSegment(seditor)
for segment in self.segments:
segment.raise_()
self.__insert_cursor.raise_()
self.__keys.raise_()
self.__velocity_group.raise_()
self.__y_scrollbar.raise_()
self.update()
def __removeSegment(self, remove_index: int, segment_ref: model.PianoRollSegmentRef) -> None:
seditor = self.segments.pop(remove_index)
del self.__segment_map[seditor.segmentRef().id]
seditor.cleanup()
seditor.hide()
seditor.setParent(None)
self.update()
def __segmentsChanged(
self, change: music.PropertyListChange[model.PianoRollSegmentRef]) -> None:
if isinstance(change, music.PropertyListInsert):
self.__addSegment(change.index, change.new_value)
elif isinstance(change, music.PropertyListDelete):
self.__removeSegment(change.index, change.old_value)
else:
raise TypeError(type(change))
def __hoverPitchChanged(self, pitch: int) -> None:
if self.__hover_pitch >= 0:
self.__keys.noteOff(self.__hover_pitch)
self.__hover_pitch = pitch
if self.__hover_pitch >= 0:
self.__keys.noteOn(self.__hover_pitch)
def __isCurrentChanged(self, is_current: bool) -> None:
for segment in self.segments:
segment.setEnabled(is_current)
def __selectionChanged(self) -> None:
self.set_session_value(
self.__session_prefix + 'selected-segments',
','.join(str(segment_id) for segment_id in sorted(self.__selection)))
self.setCanCut(bool(self.__selection))
self.setCanCopy(bool(self.__selection))
def addToSelection(self, segment: SegmentEditor) -> None:
self.__selection.add(segment.segmentRef().id)
self.__last_selected = segment
segment.setSelected(True)
self.__selectionChanged()
def removeFromSelection(self, segment: SegmentEditor) -> None:
self.__selection.discard(segment.segmentRef().id)
if segment is self.__last_selected:
self.__last_selected = None
segment.setSelected(False)
self.__selectionChanged()
def clearSelection(self) -> None:
for segment in self.selection():
segment.setSelected(False)
self.__selection.clear()
self.__last_selected = None
self.__selectionChanged()
def lastSelected(self) -> SegmentEditor:
return self.__last_selected
def numSelected(self) -> int:
return len(self.__selection)
def selection(self) -> List[SegmentEditor]:
segments = [] # type: List[SegmentEditor]
for segment in self.segments:
if segment.segmentRef().id in self.__selection:
segments.append(segment)
return segments
def copyToClipboard(self) -> music.ClipboardContents:
segments = self.selection()
assert len(segments) > 0
segment_data = self.track.copy_segments(
[segment.segmentRef() for segment in segments])
self.setInsertTime(max(segment.endTime() for segment in segments))
data = music.ClipboardContents()
data.Extensions[clipboard_pb2.pianoroll_segments].CopyFrom(segment_data)
return data
def cutToClipboard(self) -> music.ClipboardContents:
segments = self.selection()
assert len(segments) > 0
with self.project.apply_mutations('%s: cut segment(s)' % self.track.name):
segment_data = self.track.cut_segments(
[segment.segmentRef() for segment in segments])
self.clearSelection()
self.setInsertTime(min(segment.startTime() for segment in segments))
data = music.ClipboardContents()
data.Extensions[clipboard_pb2.pianoroll_segments].CopyFrom(segment_data)
return data
def canPaste(self, data: music.ClipboardContents) -> bool:
return data.HasExtension(clipboard_pb2.pianoroll_segments)
def pasteFromClipboard(self, data: music.ClipboardContents) -> None:
assert data.HasExtension(clipboard_pb2.pianoroll_segments)
segment_data = data.Extensions[clipboard_pb2.pianoroll_segments]
time = self.insertTime()
if time < audioproc.MusicalTime(0, 1):
time = audioproc.MusicalTime(0, 1)
with self.project.apply_mutations('%s: paste segment(s)' % self.track.name):
segments = self.track.paste_segments(segment_data, time)
self.setInsertTime(max(segment.end_time for segment in segments))
self.clearSelection()
for segment in segments:
self.addToSelection(self.__segment_map[segment.id])
def canPasteAsLink(self, data: music.ClipboardContents) -> bool:
if not data.HasExtension(clipboard_pb2.pianoroll_segments):
return False
existing_segments = {segment.id for segment in self.track.segment_heap}
segment_data = data.Extensions[clipboard_pb2.pianoroll_segments]
for serialized_ref in segment_data.segment_refs:
if serialized_ref.segment not in existing_segments:
return False
return True
def pasteAsLinkFromClipboard(self, data: music.ClipboardContents) -> None:
assert data.HasExtension(clipboard_pb2.pianoroll_segments)
segment_data = data.Extensions[clipboard_pb2.pianoroll_segments]
time = self.insertTime()
if time < audioproc.MusicalTime(0, 1):
time = audioproc.MusicalTime(0, 1)
with self.project.apply_mutations('%s: link segment(s)' % self.track.name):
segments = self.track.link_segments(segment_data, time)
self.setInsertTime(max(segment.end_time for segment in segments))
self.clearSelection()
for segment in segments:
self.addToSelection(self.__segment_map[segment.id])
def updatePlaybackPosition(self) -> None:
time = self.playbackPosition()
for segment in self.segments:
if segment.startTime() <= time < segment.endTime():
segment.setPlaybackPosition(time.relative_to(segment.startTime()))
else:
segment.setPlaybackPosition(audioproc.MusicalTime(-1, 1))
def updateInsertTime(self) -> None:
time = self.insertTime()
if time < audioproc.MusicalTime(0, 1):
self.__insert_cursor.hide()
return
x = self.timeToX(time) - self.xOffset() - 1
if not -3 < x <= self.width():
self.__insert_cursor.hide()
return
self.__insert_cursor.setGeometry(x, 0, 3, self.height())
self.__insert_cursor.show()
def gridHeight(self) -> int:
return 128 * self.effectiveGridYSize() + 1
def __updateEffectiveGridSize(self) -> None:
grid_y_size = max(1, int(self.zoom() * self.gridYSize()))
pos = (self.yOffset() + self.height() / 2) / self.gridHeight()
self.setEffectiveGridYSize(grid_y_size)
self.setYOffset(
max(0, min(self.gridHeight() - self.height(),
int(pos * self.gridHeight() - self.height() / 2))))
def __updateShowKeys(self) -> None:
self.setShowKeys(self.isCurrent() and self.effectiveGridYSize() > 3)
def repositionSegment(
self,
segment: SegmentEditor,
start_time: audioproc.MusicalTime,
end_time: audioproc.MusicalTime
) -> None:
x1 = self.timeToX(start_time)
x2 = self.timeToX(end_time) + 1
rect = QtCore.QRect(x1, 0, x2 - x1, self.height())
rect.translate(-self.xOffset(), 0)
clipped_rect = rect.intersected(QtCore.QRect(0, 0, self.width(), self.height()))
if not clipped_rect.isEmpty():
segment.setXOffset(max(0, -rect.left()))
segment.setGeometry(clipped_rect)
segment.show()
else:
segment.hide()
def __repositionSegments(self) -> None:
for segment in self.segments:
self.repositionSegment(segment, segment.startTime(), segment.endTime())
def segmentAt(self, x: int) -> 'SegmentEditor':
return self.segmentAtTime(self.xToTime(x))
def segmentAtTime(self, time: audioproc.MusicalTime) -> 'SegmentEditor':
for seditor in reversed(self.segments):
if seditor.startTime() <= time < seditor.endTime():
return seditor
return None
def __updateYScrollbar(self) -> None:
self.__y_scrollbar.setRange(0, max(0, self.gridHeight() - self.height()))
self.__y_scrollbar.setPageStep(self.height())
def resizeEvent(self, evt: QtGui.QResizeEvent) -> None:
super().resizeEvent(evt)
self.__keys.move(0, 0)
self.__keys.resize(self.__keys.width(), self.height())
self.__velocity_group.move(self.__keys.width(), 0)
pos = self.yOffset() + evt.oldSize().height() // 2
self.setYOffset(max(0, min(
max(0, self.gridHeight() - self.height()),
pos - evt.size().height() // 2)))
self.__y_scrollbar.move(self.width() - self.__y_scrollbar.width(), 0)
self.__y_scrollbar.resize(self.__y_scrollbar.width(), self.height())
self.__updateYScrollbar()
self.__repositionSegments()
def showEvent(self, evt: QtGui.QShowEvent) -> None:
super().showEvent(evt)
if self.__first_show:
self.setGridYSize(self.get_session_value(self.__session_prefix + 'grid-y-size', 15))
self.gridYSizeChanged.connect(
functools.partial(self.set_session_value, self.__session_prefix + 'grid-y-size'))
default_y_offset = max(0, min(self.gridHeight() - self.height(),
self.gridHeight() - self.height()) // 2)
self.setYOffset(self.get_session_value(
self.__session_prefix + 'y-offset', default_y_offset))
self.yOffsetChanged.connect(
functools.partial(self.set_session_value, self.__session_prefix + 'y-offset'))
self.__first_show = False
def wheelEvent(self, evt: QtGui.QWheelEvent) -> None:
if evt.modifiers() == Qt.NoModifier:
offset = self.yOffset()
if evt.angleDelta().y() > 0:
offset -= 3 * self.effectiveGridYSize()
elif evt.angleDelta().y() < 0:
offset += 3 * self.effectiveGridYSize()
offset = min(self.gridHeight() - self.height(), offset)
offset = max(0, offset)
if offset != self.yOffset():
self.setYOffset(offset)
evt.accept()
return
super().wheelEvent(evt)
def _paint(self, painter: QtGui.QPainter, paint_rect: QtCore.QRect) -> None:
self.renderTimeGrid(painter, paint_rect)
def playNotes(self, play_notes: pianoroll.PlayNotes) -> None:
if self.playerState().playerID():
for channel, pitch in play_notes.note_off:
if (channel, pitch) in self.__active_notes:
self.call_async(self.project_view.sendNodeMessage(
processor_messages.note_off_event(
self.track.pipeline_node_id, channel, pitch)))
self.__active_notes.discard((channel, pitch))
if play_notes.all_notes_off:
for channel, pitch in self.__active_notes:
self.call_async(self.project_view.sendNodeMessage(
processor_messages.note_off_event(
self.track.pipeline_node_id, channel, pitch)))
self.__active_notes.clear()
for channel, pitch in play_notes.note_on:
self.call_async(self.project_view.sendNodeMessage(
processor_messages.note_on_event(
self.track.pipeline_node_id, channel, pitch, 100)))
self.__active_notes.add((channel, pitch))
|
odahoda/noisicaa
|
noisicaa/builtin_nodes/pianoroll_track/track_ui.py
|
Python
|
gpl-2.0
| 53,058 | 0.001809 |
"""Accessors for an app's local configuration
The local configuration is loaded from a YAML file. The default
configuration is "local.yaml", in the app's root.
An app's local configuration can change depending on the current
environment, i.e., development and production.
For example,
pirate: ninja
robot:
dev: zombie
prod: monkey
In development, this app's local config will be,
{'pirate': 'ninja', 'robot': 'zombie'}
In production, the app's local config will be,
{'pirate': 'ninja', 'robot': 'monkey'}
"""
import yaml
import os
from env import branch
from google.appengine.api import memcache
def config(filename='local.yaml'):
"""Return the config (dict) for the current environment."""
cachekey = 'config:%s' % filename
# check memcache
try:
config = memcache.get(cachekey)
if config:
return config
except AssertionError: pass
if os.path.exists(filename):
config = yaml.load(file(filename).read())
# branch each value by environment
config = dict([(key, branch(value)) for key, value in config.iteritems()])
try:
memcache.set(cachekey, config)
except AssertionError: pass
return config
return dict()
def config_get(key):
"""Return the value for the given key from the default config."""
return config()[key]
|
tantalor/megaera
|
megaera/local.py
|
Python
|
mit
| 1,305 | 0.010728 |
"""
=========================================================================
Non-parametric between conditions cluster statistic on single trial power
=========================================================================
This script shows how to compare clusters in time-frequency
power estimates between conditions. It uses a non-parametric
statistical procedure based on permutations and cluster
level statistics.
The procedure consists in:
- extracting epochs for 2 conditions
- compute single trial power estimates
- baseline line correct the power estimates (power ratios)
- compute stats to see if the power estimates are significantly different
between conditions.
"""
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne.time_frequency import tfr_morlet
from mne.stats import permutation_cluster_test
from mne.datasets import sample
print(__doc__)
###############################################################################
# Set parameters
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_raw-eve.fif'
tmin, tmax = -0.2, 0.5
# Setup for reading the raw data
raw = mne.io.read_raw_fif(raw_fname)
events = mne.read_events(event_fname)
include = []
raw.info['bads'] += ['MEG 2443', 'EEG 053'] # bads + 2 more
# picks MEG gradiometers
picks = mne.pick_types(raw.info, meg='grad', eeg=False, eog=True,
stim=False, include=include, exclude='bads')
ch_name = 'MEG 1332' # restrict example to one channel
# Load condition 1
reject = dict(grad=4000e-13, eog=150e-6)
event_id = 1
epochs_condition_1 = mne.Epochs(raw, events, event_id, tmin, tmax,
picks=picks, baseline=(None, 0),
reject=reject, preload=True)
epochs_condition_1.pick_channels([ch_name])
# Load condition 2
event_id = 2
epochs_condition_2 = mne.Epochs(raw, events, event_id, tmin, tmax,
picks=picks, baseline=(None, 0),
reject=reject, preload=True)
epochs_condition_2.pick_channels([ch_name])
###############################################################################
# Factor to downsample the temporal dimension of the TFR computed by
# tfr_morlet. Decimation occurs after frequency decomposition and can
# be used to reduce memory usage (and possibly comptuational time of downstream
# operations such as nonparametric statistics) if you don't need high
# spectrotemporal resolution.
decim = 2
freqs = np.arange(7, 30, 3) # define frequencies of interest
n_cycles = 1.5
tfr_epochs_1 = tfr_morlet(epochs_condition_1, freqs,
n_cycles=n_cycles, decim=decim,
return_itc=False, average=False)
tfr_epochs_2 = tfr_morlet(epochs_condition_2, freqs,
n_cycles=n_cycles, decim=decim,
return_itc=False, average=False)
tfr_epochs_1.apply_baseline(mode='ratio', baseline=(None, 0))
tfr_epochs_2.apply_baseline(mode='ratio', baseline=(None, 0))
epochs_power_1 = tfr_epochs_1.data[:, 0, :, :] # only 1 channel as 3D matrix
epochs_power_2 = tfr_epochs_2.data[:, 0, :, :] # only 1 channel as 3D matrix
###############################################################################
# Compute statistic
# -----------------
threshold = 6.0
T_obs, clusters, cluster_p_values, H0 = \
permutation_cluster_test([epochs_power_1, epochs_power_2],
n_permutations=100, threshold=threshold, tail=0)
###############################################################################
# View time-frequency plots
# -------------------------
times = 1e3 * epochs_condition_1.times # change unit to ms
evoked_condition_1 = epochs_condition_1.average()
evoked_condition_2 = epochs_condition_2.average()
plt.figure()
plt.subplots_adjust(0.12, 0.08, 0.96, 0.94, 0.2, 0.43)
plt.subplot(2, 1, 1)
# Create new stats image with only significant clusters
T_obs_plot = np.nan * np.ones_like(T_obs)
for c, p_val in zip(clusters, cluster_p_values):
if p_val <= 0.05:
T_obs_plot[c] = T_obs[c]
plt.imshow(T_obs,
extent=[times[0], times[-1], freqs[0], freqs[-1]],
aspect='auto', origin='lower', cmap='gray')
plt.imshow(T_obs_plot,
extent=[times[0], times[-1], freqs[0], freqs[-1]],
aspect='auto', origin='lower', cmap='RdBu_r')
plt.xlabel('Time (ms)')
plt.ylabel('Frequency (Hz)')
plt.title('Induced power (%s)' % ch_name)
ax2 = plt.subplot(2, 1, 2)
evoked_contrast = mne.combine_evoked([evoked_condition_1, evoked_condition_2],
weights=[1, -1])
evoked_contrast.plot(axes=ax2, time_unit='s')
plt.show()
|
adykstra/mne-python
|
tutorials/stats-sensor-space/plot_stats_cluster_time_frequency.py
|
Python
|
bsd-3-clause
| 4,873 | 0 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from .render import render_tablature
__all__ = ['render_tablature']
|
pignacio/chorddb
|
chorddb/terminal/__init__.py
|
Python
|
gpl-3.0
| 190 | 0 |
import functools
import os
import numpy as np
import pygmo as pg
from simulation import simulate, statistical
from solving import value_function_list
from util import constants as cs, param_type
class HumanCapitalSearchProblem(object):
def fitness(self, params_nparray, gradient_eval=False):
params_paramtype = param_type.transform_array_to_paramstype(params_nparray)
optimal_value_functions = value_function_list.backwards_iterate(params_paramtype)
# if display_plots:
# plotting.plot_valuefunctions(optimal_value_functions)
panel_data = simulate.simulate_data(params_paramtype, optimal_value_functions)
# if save_panel_data:
# np.savetxt('simulated_data.csv', panel_data.T, delimiter=',')
simulated_coefficients, _, _, _ = statistical.calculate_coefficients(panel_data)
criterion_value = calculate_criterion(
simulated_coefficients, cs.data_coeffs, cs.weights)
if not gradient_eval:
print('within_val {0}:'.format(os.getpid()), repr(params_nparray), repr(np.array([criterion_value])))
return np.array([criterion_value])
def get_name(self):
return 'Human Capital Search Problem'
def get_bounds(self):
lowerbounds, upperbounds, _ = param_type.gen_initial_point()
return (lowerbounds, upperbounds)
def gradient(self, x):
grad_fitness = functools.partial(self.fitness, gradient_eval=True)
return pg.estimate_gradient(grad_fitness, x)
def calculate_criterion(simulate_coefficients,
data_coefficients, weights):
cutoff = 155
try:
squared_coeff_diffs = (data_coefficients[:cutoff]
- simulate_coefficients[:cutoff])**2
return np.sum(squared_coeff_diffs * weights[:cutoff]**2)
except ValueError:
return 1000000.0
return None
def convert_hours_to_seconds(hours):
seconds = 60 * 60 * hours
return int(seconds)
if __name__ == '__main__':
x0 = np.array([ 3.50002199e-03, 6.51848176e-03, 1.51129690e-02,
5.44408669e-01, 4.00993663e-01, 6.55844833e-02,
6.07802957e+00, 1.60167206e+00, 5.01869425e+00,
4.72961572e+00, 9.38466921e+00, 5.05588161e+00,
8.19033636e+00, 2.75929445e+00, 2.85635433e+00,
1.75737616e+00, 7.80585097e-01, 7.75955256e-01,
7.84082645e-01, 7.84472240e-01, 7.88595353e-01,
7.56837829e-01, 7.95899147e-01, 1.00607895e-01,
9.54173933e-02, 1.01830970e-01, 2.35455817e-01,
4.34618429e-01, 5.05177886e-01, 4.97754216e-01,
1.33424724e+00, 1.33335481e+00, 5.14048248e-01,
5.31256998e-01, 2.72639929e-02, 2.06973106e-01,
7.44039604e-01, 3.35103286e-01, 8.64058736e-01,
2.01314260e-01, 7.48161453e-01, 1.98923666e-01,
7.49378943e-01, 1.96135026e-01, 7.52297629e-01,
2.03848678e-01, 7.48561095e-01, 1.98618489e-01,
9.17364498e-01, 1.97851509e-01, 7.42171336e-01,
1.98302575e-01, 7.41711271e-01]) #array([ 149.86359966])
hc_problem = HumanCapitalSearchProblem()
hc_problem.fitness(x0)
np.set_printoptions(threshold=10000)
np.random.seed(128)
prob = pg.problem(hc_problem)
pop = pg.population(prob=prob, size=0)
pop.push_back(x0, f=[149.86359966])
archi = pg.archipelago(pop=pop)
methods = ["bobyqa", 'neldermead', 'praxis', "newuoa_bound", "sbplx"]
algo_list = []
for method in methods:
nl= pg.nlopt(method)
nl.stopval = 1e-8
nl.ftol_rel = 1e-8
nl.xtol_rel = 1e-8
nl.xtol_abs = 1e-8
nl.ftol_abs = 1e-8
nl.maxtime = convert_hours_to_seconds(12)
algo = pg.algorithm(nl)
archi.push_back(algo=algo, pop=pop)
print(archi)
archi.evolve()
import time
while archi.status is pg.core._evolve_status.busy:
time.sleep(120)
print(archi)
archi.wait_check()
print(archi.get_champions_x())
print(archi.get_champions_f())
|
mishpat/human-capital-search
|
humancapitalsearch.py
|
Python
|
mit
| 4,080 | 0.007108 |
"""Sourcecounts
s is flux in Jy and n is number > s per str
"""
import numpy as N
s=N.array([ 9.9999997e-05, 0.00010328281, 0.00010667340, 0.00011017529, 0.00011379215, 0.00011752774, 0.00012138595, \
0.00012537083, 0.00012948645, 0.00013373725, 0.00013812761, 0.00014266209, 0.00014734542, 0.00015218249, 0.00015717837, \
0.00016233824, 0.00016766752, 0.00017317173, 0.00017885664, 0.00018472817, 0.00019079246, 0.00019705582, 0.00020352470, \
0.00021020604, 0.00021710672, 0.00022423393, 0.00023159511, 0.00023919797, 0.00024705040, 0.00025516062, 0.00026353705, \
0.00027218851, 0.00028112394, 0.00029035273, 0.00029988447, 0.00030972913, 0.00031989696, 0.00033039862, 0.00034124497, \
0.00035244724, 0.00036401744, 0.00037596744, 0.00038830977, 0.00040105727, 0.00041422324, 0.00042782145, 0.00044186602, \
0.00045637166, 0.00047135353, 0.00048682719, 0.00050280854, 0.00051931484, 0.00053636299, 0.00055397081, 0.00057215663, \
0.00059093948, 0.00061033899, 0.00063037529, 0.00065106933, 0.00067244272, 0.00069451780, 0.00071731757, 0.00074086577, \
0.00076518703, 0.00079030672, 0.00081625103, 0.00084304705, 0.00087072275, 0.00089930650, 0.00092882907, 0.00095932081, \
0.00099081360, 0.0010233402, 0.0010569346, 0.0010916317, 0.0011274681, 0.0011644807, 0.0012027085, 0.0012421905, \
0.0012829694, 0.0013250869, 0.0013685870, 0.0014135153, 0.0014599183, 0.0015078448, 0.0015573446, 0.0016084694, \
0.0016612725, 0.0017158090, 0.0017721358, 0.0018303118, 0.0018903976, 0.0019524558, 0.0020165513, 0.0020827511, \
0.0021511239, 0.0022217415, 0.0022946771, 0.0023700071, 0.0024478103, 0.0025281659, 0.0026111610, 0.0026968806, \
0.0027854142, 0.0028768543, 0.0029712960, 0.0030688383, 0.0031695808, 0.0032736324, 0.0033810998, 0.0034920950, \
0.0036067341, 0.0037251366, 0.0038474260, 0.0039737299, 0.0041041803, 0.0042389128, 0.0043780687, 0.0045217923, \
0.0046702349, 0.0048235501, 0.0049818982, 0.0051454445, 0.0053143604, 0.0054888208, 0.0056690089, 0.0058551119, \
0.0060473247, 0.0062458473, 0.0064508831, 0.0066626542, 0.0068813767, 0.0071072797, 0.0073405989, 0.0075815772, \
0.0078304661, 0.0080875214, 0.0083530201, 0.0086272340, 0.0089104511, 0.0092029646, 0.0095050810, 0.0098171150, \
0.010139393, 0.010472251, 0.010816036, 0.011171106, 0.011537833, 0.011916599, 0.012307799, 0.012711842, 0.013129148, \
0.013560154, 0.014005309, 0.014465077, 0.014939931, 0.015430382, 0.015936933, 0.016460113, 0.017000468, 0.017558562, \
0.018134978, 0.018730316, 0.019345198, 0.019980265, 0.020636180, 0.021313628, 0.022013316, 0.022735972, 0.023482339, \
0.024253221, 0.025049411, 0.025871737, 0.026721058, 0.027598262, 0.028504262, 0.029440004, 0.030406466, 0.031404655, \
0.032435611, 0.033500414, 0.034600168, 0.035736032, 0.036909178, 0.038120817, 0.039372254, 0.040664773, 0.041999724, \
0.043378498, 0.044802535, 0.046273317, 0.047792386, 0.049361322, 0.050981764, 0.052655403, 0.054383982, 0.056169309, \
0.058013245, 0.059917714, 0.061884668, 0.063916229, 0.066014484, 0.068181612, 0.070419893, 0.072731644, 0.075119294, \
0.077585325, 0.080132306, 0.082762904, 0.085479856, 0.088286005, 0.091184273, 0.094177686, 0.097269312, 0.10046248, \
0.10376048, 0.10716674, 0.11068483, 0.11431842, 0.11807128, 0.12194734, 0.12595065, 0.13008538, 0.13435584, 0.13876650, \
0.14332195, 0.14802694, 0.15288639, 0.15790530, 0.16308904, 0.16844295, 0.17397262, 0.17968382, 0.18558250, 0.19167484, \
0.19796717, 0.20446607, 0.21117832, 0.21811092, 0.22527111, 0.23266634, 0.24030435, 0.24819310, 0.25634068, 0.26475587, \
0.27344733, 0.28242409, 0.29169556, 0.30127138, 0.31116158, 0.32137644, 0.33192664, 0.34282318, 0.35407743, 0.36570114, \
0.37770644, 0.39010584, 0.40291208, 0.41613895, 0.42980003, 0.44390959, 0.45848233, 0.47353345, 0.48907870, 0.50513422, \
0.52171689, 0.53884387, 0.55653316, 0.57480311, 0.59367281, 0.61316204, 0.63329101, 0.65408045, 0.67555267, 0.69772983, \
0.72063506, 0.74429214, 0.76872587, 0.79396176, 0.82002604, 0.84694600, 0.87474972, 0.90346611, 0.93312526, 0.96375805, \
0.99539644, 1.0280730, 1.0618227, 1.0966804, 1.1326823, 1.1698662, 1.2082708, 1.2479361, 1.2889036, 1.3312160, 1.3749173, \
1.4200534, 1.4666711, 1.5148191, 1.5645479, 1.6159091, 1.6689565, 1.7237452, 1.7803327, 1.8387777, 1.8991414, 1.9614867, \
2.0258787, 2.0923846, 2.1610713, 2.2320154, 2.3052883, 2.3809667, 2.4591296, 2.5398583, 2.6232371, 2.7093532, 2.7982962, \
2.8901591, 2.9850378, 3.0830312, 3.1842413, 3.2887743, 3.3967385, 3.5082474, 3.6234167, 3.7423668, 3.8652217, 3.9921098, \
4.1231637, 4.2585196, 4.3983188, 4.5427074, 4.6918364, 4.8458605, 5.0049415, 5.1692443, 5.3389411, 5.5142026, 5.6952238, \
5.8821878, 6.0752892, 6.2747297, 6.4807177, 6.6934676, 6.9132018, 7.1401496, 7.3745475, 7.6166406, 7.8666806, 8.1249294, \
8.3916559, 8.6671391, 8.9516649, 9.2455320, 9.5490456, 9.8625231, 10.186292, 10.520689, 10.866064, 11.222776, 11.591200, \
11.971718, 12.364727, 12.770638, 13.189876, 13.622874, 14.070073, 14.531968, 15.009026, 15.501744, 16.010639, 16.536238, \
17.079092, 17.639769, 18.218849, 18.816940, 19.434666, 20.072670, 20.731619, 21.412201, 22.115124, 22.841122, 23.590954, \
24.365402, 25.165274, 25.991404, 26.844654, 27.725914, 28.636105, 29.576176, 30.547108, 31.549913, 32.585640, 33.655365, \
34.760208, 35.901321, 37.079857, 38.297119, 39.554344, 40.852840, 42.193966, 43.579117, 45.009739, 46.487324, 48.013420, \
49.589611, 51.217548, 52.898926, 54.635498, 56.429081, 58.281548, 60.194820, 62.170906, 64.211861, 66.319824, 68.496979, \
70.745613, 73.068062, 75.466751, 77.944183, 80.502945, 83.145714, 85.875237, 88.694359, 91.606033, 94.613190, 97.719162, \
100.92711, 104.24036, 107.66238, 111.19673, 114.84712, 118.61734, 122.51133, 126.53315, 130.68700, 134.97722, 139.40826, \
143.98479, 148.71155, 153.59348, 158.63567, 163.84338, 169.22206, 174.77731, 180.51492, 186.44090, 192.56142, 198.88284, \
205.41180, 212.15511, 219.11977, 226.31306, 233.74251, 241.41557, 249.34081, 257.52621, 265.98032, 274.71198, 283.73026, \
293.04462, 302.66473, 312.60065, 322.86276, 333.46173, 344.40869, 355.71500, 367.39246, 379.45328, 391.91003, 404.77573, \
418.06375, 431.78802, 445.96283, 460.60297, 475.72372, 491.34085, 507.47067, 524.13000, 541.33624, 559.10730, 577.46179, \
596.41876, 615.99811, 636.21954, 657.10541, 678.67700, 700.95673, 723.96783, 747.73438, 772.28113, 797.63373, 823.81854, \
850.86298, 878.79529, 907.64453, 937.44080, 968.21527, 1000.0000])
n=N.array([ 3.7709775e+10, 3.6065767e+10, 3.4493432e+10, 3.2989649e+10, 3.1551425e+10, 3.0175900e+10, \
2.8860342e+10, 2.7602137e+10, \
2.6398808e+10, 2.5247922e+10, 2.4147204e+10, 2.3094475e+10, 2.2087643e+10, 2.1124704e+10, 2.0203747e+10, 1.9322939e+10, \
1.8480527e+10, 1.7674846e+10, 1.6904289e+10, 1.6167328e+10, 1.5462490e+10, 1.4788384e+10, 1.4143675e+10, 1.3527065e+10, \
1.2937335e+10, 1.2373316e+10, 1.1833886e+10, 1.1317971e+10, 1.0824550e+10, 1.0352640e+10, 9.9013028e+09, 9.4696428e+09, \
9.0568028e+09, 8.6619587e+09, 8.2843305e+09, 7.9231647e+09, 7.5777439e+09, 7.2473825e+09, 6.9314243e+09, 6.6292444e+09, \
6.3402342e+09, 6.0638244e+09, 5.7994639e+09, 5.5466291e+09, 5.3048166e+09, 5.0735457e+09, 4.8523587e+09, 4.6408141e+09, \
4.4384916e+09, 4.2449897e+09, 4.0599278e+09, 3.8829297e+09, 3.7136481e+09, 3.5517468e+09, 3.3969042e+09, 3.2488120e+09, \
3.1071754e+09, 2.9717143e+09, 2.8421588e+09, 2.7182515e+09, 2.5997458e+09, 2.4864064e+09, 2.3780086e+09, 2.2743360e+09, \
2.1751834e+09, 2.0803535e+09, 1.9896579e+09, 1.9029162e+09, 1.8199575e+09, 1.7406141e+09, 1.6647299e+09, 1.5921536e+09, \
1.5227420e+09, 1.4563558e+09, 1.3928644e+09, 1.3321405e+09, 1.2740643e+09, 1.2185199e+09, 1.1653979e+09, 1.1145907e+09, \
1.0659987e+09, 1.0195252e+09, 9.7507763e+08, 9.3256806e+08, 8.9191149e+08, 8.5302746e+08, 8.1583853e+08, 7.8027117e+08, \
7.4625421e+08, 7.1372032e+08, 6.8260474e+08, 6.5284576e+08, 6.2438406e+08, 5.9716326e+08, 5.7112922e+08, 5.4623008e+08, \
5.2241651e+08, 4.9964106e+08, 4.7785866e+08, 4.5702573e+08, 4.3710147e+08, 4.1804544e+08, 3.9982026e+08, 3.8238954e+08, \
3.6571878e+08, 3.4977482e+08, 3.3452595e+08, 3.1994208e+08, 3.0599382e+08, 2.9265363e+08, 2.7989501e+08, 2.6769266e+08, \
2.5602224e+08, 2.4486062e+08, 2.3418562e+08, 2.2397598e+08, 2.1421147e+08, 2.0487264e+08, 1.9594099e+08, 1.8739867e+08, \
1.7922877e+08, 1.7141509e+08, 1.6394203e+08, 1.5679477e+08, 1.4995909e+08, 1.4342146e+08, 1.3716880e+08, 1.3118874e+08, \
1.2546940e+08, 1.1999951e+08, 1.1476796e+08, 1.0976452e+08, 1.0497919e+08, 1.0040248e+08, 96025304., 91838968., \
87835200., 84005912., 80343576., 76840880., 73490912., 70286984., 67222736., 64292076., 61489172., 58808476., \
56244648., 53792588., 51447432., 49204512., 47059380., 45007768., 43045600., 41168972., 39374160., 37657620., \
36015888., 34445724., 32944024., 31507790., 30134168., 28820430., 27563966., 26362278., 25212982., 24113790., \
23062518., 22057078., 21095472., 20175804., 19296216., 18454972., 17650402., 16880912., 16144966., 15441105., \
14767931., 14124105., 13508346., 12919433., 12356192., 11817510., 11302309., 10809571., 10338324., 9887611.0, \
9456547.0, 9044277.0, 8649980.0, 8272873.0, 7912207.0, 7567264.5, 7237360.0, 6921837.5, 6620071.0, 6331461.0, \
6055433.0, 5791438.5, 5538953.0, 5297479.5, 5066528.5, 4845647.0, 4634395.5, 4432353.0, 4239119.0, 4054309.2, \
3877556.2, 3708509.5, 3546832.0, 3392203.5, 3244316.0, 3102876.0, 2967602.0, 2838228.0, 2729847.5, 2624870.5, \
2524750.2, 2429229.0, 2338061.0, 2251017.0, 2167880.5, 2088448.4, 2012529.5, 1939942.6, 1870518.1, 1804095.8, \
1740523.8, 1679660.2, 1621370.6, 1565526.9, 1512157.9, 1460823.1, 1411600.0, 1364385.6, 1319083.4, 1275602.0, \
1233855.0, 1193760.2, 1155241.0, 1118223.9, 1082639.1, 1048421.7, 1015509.1, 983842.56, 953365.38, 924024.94, \
895770.81, 868555.00, 842332.44, 817144.38, 792764.06, 769256.56, 746584.44, 724711.62, 703604.50, 683230.62, \
663559.44, 644562.06, 626210.06, 608477.38, 591338.81, 574770.50, 558749.50, 543254.06, 528263.38, 513757.69, \
499717.94, 486126.28, 473019.56, 460262.88, 447906.47, 435935.03, 424334.22, 413089.53, 402187.88, 391616.53, \
381363.44, 371416.84, 361765.66, 352399.28, 343307.47, 334480.50, 325909.12, 317584.28, 309497.50, 301640.47, \
294005.56, 286584.88, 279402.72, 272383.66, 265559.03, 258922.31, 252467.16, 246187.56, 240077.75, 234132.17, \
228345.47, 222712.61, 217228.62, 211888.83, 206688.67, 201623.84, 196690.11, 191883.45, 187200.03, 182636.05, \
178187.92, 173852.23, 169645.80, 165521.64, 161500.73, 157580.05, 153756.70, 150027.80, 146390.59, 142842.50, \
139380.91, 136003.44, 132707.70, 129491.38, 126352.36, 123288.48, 120297.67, 117378.02, 114527.58, 111744.49, \
109027.01, 106373.41, 103781.99, 101262.79, 98789.008, 96373.047, 94013.438, 91708.680, 89457.398, 87258.211, \
85109.805, 83010.930, 80960.391, 78956.891, 76999.320, 75086.586, 73217.594, 71391.312, 69606.703, 67862.789, \
66158.609, 64493.254, 62865.801, 61275.387, 59728.344, 58208.258, 56722.930, 55271.520, 53853.266, 52467.410, \
51113.223, 49789.961, 48496.941, 47233.500, 45998.977, 44792.723, 43614.117, 42462.578, 41337.504, 40238.328, \
39164.488, 38115.469, 37090.699, 36089.668, 35111.887, 34156.848, 33228.004, 32316.406, 31426.256, 30557.111, \
29708.504, 28880.010, 28071.193, 27281.650, 26510.949, 25758.721, 25024.562, 24308.115, 23608.990, 22926.832, \
22261.293, 21612.029, 20978.699, 20360.971, 19758.527, 19171.037, 18598.217, 18039.732, 17495.309, 16966.436, \
16448.930, 15944.685, 15453.382, 14974.762, 14508.550, 14054.481, 13612.296, 13181.744, 12762.577, 12354.543, \
11957.408, 11570.935, 11194.892, 10829.060, 10473.206, 10127.119, 9790.5850, 9463.3916, 9145.3301, 8836.2021, \
8535.8027, 8243.9434, 7961.2437, 7685.7393, 7418.2314, 7158.5264, 6906.4458, 6661.8105, 6424.4482, 6194.1807, \
5970.8477, 5754.2710, 5544.2944, 5340.7573, 5143.5054, 4952.3828, 4767.2373, 4587.9229, 4414.2944, 4246.2085, \
4083.5212, 3926.0977, 3773.8032, 3626.5049, 3484.0715, 3346.3752, 3213.5771, 3084.9297, 2960.6602, 2840.6472, \
2724.7744, 2612.9258, 2504.9900, 2400.8569, 2300.4167, 2203.5654, 2110.1995, 2020.2166, 1933.5188, 1850.0120, \
1769.5944, 1692.1769, 1617.6688, 1545.9810, 1477.0260, 1410.7202, 1346.9801, 1285.7245, 1226.8739, 1170.3518, \
1116.1688, 1064.0614, 1014.0633, 966.10516, 920.11682, 876.03217, 833.78497, 793.31201, 754.55164, 717.44275, \
681.92755, 647.94806, 615.44952, 584.37762, 554.67981, 526.30505, 499.20432, 473.32895, 448.63220, 425.07007, \
402.59656, 381.16980, 360.74893, 341.31854, 322.78470, 305.14084, 288.35059, 272.37881, 257.19098, 242.75432, \
229.03673, 216.00752, 203.63695, 191.89633])
s=s/1000.0
|
lofar-astron/PyBDSF
|
bdsf/sourcecounts.py
|
Python
|
gpl-3.0
| 12,587 | 0.025582 |
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
def _makeunicodes(f):
import re
lines = iter(f.readlines())
unicodes = {}
for line in lines:
if not line: continue
num, name = line.split(';')[:2]
if name[0] == '<': continue # "<control>", etc.
num = int(num, 16)
unicodes[num] = name
return unicodes
class _UnicodeCustom(object):
def __init__(self, f):
if isinstance(f, basestring):
f = open(f)
self.codes = _makeunicodes(f)
def __getitem__(self, charCode):
try:
return self.codes[charCode]
except KeyError:
return "????"
class _UnicodeBuiltin(object):
def __getitem__(self, charCode):
try:
# use unicodedata backport to python2, if available:
# https://github.com/mikekap/unicodedata2
import unicodedata2 as unicodedata
except ImportError:
import unicodedata
try:
return unicodedata.name(unichr(charCode))
except ValueError:
return "????"
Unicode = _UnicodeBuiltin()
def setUnicodeData(f):
global Unicode
Unicode = _UnicodeCustom(f)
|
MitchTalmadge/Emoji-Tools
|
src/main/resources/PythonScripts/fontTools/unicode.py
|
Python
|
gpl-3.0
| 1,057 | 0.037843 |
# @file get_svn_revision.py
# Fetch the subversion revision number from the repository
#
# @copyright (c) 2006,2014 CSIRO
# Australia Telescope National Facility (ATNF)
# Commonwealth Scientific and Industrial Research Organisation (CSIRO)
# PO Box 76, Epping NSW 1710, Australia
# atnf-enquiries@csiro.au
#
# This file is part of the ASKAP software distribution.
#
# The ASKAP software distribution is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the License
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
#
# @author Robert Crida <robert.crida@ska.ac.za>
#
from runcmd import runcmd
from get_vcs_type import is_git
import os
def get_svn_revision():
try:
(stdout, stderr, returncode) = runcmd('svnversion', shell=True)
if returncode == 0 and stdout and stdout[0].isdigit():
return stdout.rstrip()
else:
if is_git():
return get_git_revision()
return "unknown"
except:
return "unknown"
def get_git_revision():
try:
(stdout, stderr, returncode) = runcmd('git describe --tags --always', shell=True)
if returncode == 0:
return stdout.rstrip()
else:
return "unknown"
except:
return "unknown"
|
ATNF/askapsdp
|
Tools/Dev/rbuild/askapdev/rbuild/utils/get_svn_revision.py
|
Python
|
gpl-2.0
| 1,823 | 0.002743 |
import os
import rospy, rospkg
import sys
import math
import yaml
from itertools import izip_longest
from operator import add, sub
from qt_gui.plugin import Plugin
from python_qt_binding import loadUi
from python_qt_binding.QtWidgets import QWidget
from PyQt5 import QtGui, QtWidgets, QtCore
from rqt_plot.rosplot import ROSData, RosPlotException
from std_msgs.msg import Float32
from last_letter_msgs.msg import RefCommands
class DashboardGrid(QtWidgets.QWidget):
def __init__(self):
super(DashboardGrid, self).__init__()
self.setFixedSize(900, 900)
self.setWindowTitle('UAV Dashboard')
self.setAutoFillBackground(True)
prefix = '{}'.format(rospy.get_namespace())
rospack = rospkg.RosPack()
filename = rospack.get_path('last_letter') + '/data/parameters/aircraft' + rospy.get_namespace() + 'dashboard.yaml'
data = yaml.load(open(filename).read())
gauges = []
self.line = QtWidgets.QHBoxLayout()
self.commands = RefCommands()
initPos = rospy.get_param('init/position',[0])
self.commands.altitude = -initPos[-1]
initVa = rospy.get_param('init/velLin',[0,0,0])
self.commands.airspeed = math.sqrt(initVa[0]*initVa[0] + initVa[1]*initVa[1] + initVa[2]*initVa[2])
self.pub = rospy.Publisher('refCommands', RefCommands, queue_size=1)
self.pubTimer = QtCore.QTimer()
self.pubTimer.timeout.connect(self.publishCommands)
self.pubTimer.start(1000)
for name in sorted(data.keys()): #sort based on name
values = data[name]
# print 'Adding: {}'.format(name)
values['topic'] = prefix + values['topic']
values['warning'] = zip(*[iter(values['warning'])]*2)
values['danger'] = zip(*[iter(values['danger'])]*2)
gauges.append(GaugeSimple(**values))
gauges[-1].marker_set.connect(self.onClick)
grouped_gauges = list(izip_longest(*(iter(gauges),)*3))
for i in xrange(len(grouped_gauges)):
setattr(self, 'column{}'.format(i), QtWidgets.QVBoxLayout())
curr_column = getattr(self, 'column{}'.format(i))
for g in grouped_gauges[i]:
if g is not None:
curr_column.addWidget(g)
# curr_column.append(g.topic)
self.line.addLayout(curr_column)
self.setLayout(self.line)
def onClick(self,comList):
member = comList[0]
value = comList[1]
if member == 'Roll':
self.commands.euler.x = value*math.pi/180
if member == 'Yaw':
self.commands.euler.z = value*math.pi/180
if member == 'Pitch':
self.commands.euler.y = value*math.pi/180
elif member == 'Airspeed':
self.commands.airspeed = value
elif member == 'Geometric Altitude':
self.commands.altitude = value
def publishCommands(self):
self.commands.header.stamp = rospy.Time.now()
self.pub.publish(self.commands)
self.pubTimer.start(1000)
#By Fadi
class GaugeSimple(QtWidgets.QWidget):
''' Gauge pointer movement:
minimum->maximum values: clockwise rotation
maximum value > minimum-value
'''
marker_set = QtCore.pyqtSignal(list)
def __init__(self, topic='/HUD', length=300.0, end_angle=300.0, min=0.0, max=100.0, main_points=11,
warning=[], danger=[], multiplier='', units='', description=''):
super(GaugeSimple, self).__init__()
self.setFixedSize(300, 300)
self.setWindowTitle('A Magnificent Gauge')
self.setAutoFillBackground(True)
self.redraw_interval = 40
self.update_plot_timer = QtCore.QTimer()
self.update_plot_timer.timeout.connect(self.update_plot)
self._start_time = rospy.get_time()
self._rosdata = ROSData(topic, self._start_time)
self.min = min
self.curr_value = min
self.value_uncap = 0
self.max = max
self.length = length
self.main_points = main_points
self.start_angle = (end_angle + length) % 360
self.end_angle = end_angle % 360
self.is_circle = self.start_angle == self.end_angle
self.gauge_ticks = []
self.bounding_rect = QtCore.QRectF(25.0, 25.0, 250.0, 250.0)
self.center = QtCore.QPointF(150.0, 150.0)
self.warning = warning #zones
self.danger = danger #zones
self.center_radius = 5.0
self.margin = 12
self.units = units
self.multiplier = multiplier
self.description = description
self.update_plot_timer.start(self.redraw_interval)
#Various ui colors
self.marker_tick_color = QtGui.QPen(QtGui.QColor('#FF9900'), 1.8)
self.ui_color = QtGui.QPen(QtCore.Qt.green, 2.5)
self.ui_color_tick = QtGui.QPen(QtCore.Qt.green, 1.5)
self.gauge_color = QtGui.QPen(QtCore.Qt.lightGray, 2)
self.warning_color = QtGui.QPen(QtCore.Qt.yellow, 2)
self.warning_bg = QtGui.QRadialGradient(self.center, self.width()/3)
self.warning_bg.setColorAt(0.0, QtCore.Qt.yellow)
self.warning_bg.setColorAt(1.0, QtCore.Qt.black)
self.danger_color = QtGui.QPen(QtCore.Qt.red, 2)
self.danger_bg = QtGui.QRadialGradient(self.center, self.width()/3)
self.danger_bg.setColorAt(0.0, QtCore.Qt.red)
self.danger_bg.setColorAt(1.0, QtCore.Qt.black)
self.current_bg = QtCore.Qt.black
self.create_gauge()
def detect_safe_zones(self):
r = [(self.min, self.max)]
unsafe = sorted(self.warning+self.danger, key=lambda i:i[0])
for i in unsafe:
temp = []
for y in r:
if i[0] > y[1] or i[1] < y[0]:
temp.append(y)
elif i[0]==y[0] and i[1]==y[1]:
continue
elif i[0]>y[0] and i[1]<y[1]:
temp.append((y[0], i[0]))
temp.append((i[1], y[1]))
elif i[0]>y[0] and i[1]==y[1]:
temp.append((i[0], i[1]))
elif i[0]==y[0] and i[1]<y[1]:
temp.append((i[1], y[1]))
r = temp
return r
def create_gauge(self):
def text_width(text):
font = QtGui.QFont()
metrics = QtGui.QFontMetrics(font)
return metrics.width(text)
#Main points
divisor = self.main_points
if self.start_angle != self.end_angle:
divisor -= 1
angle_step = self.length/divisor
value_step = abs(self.max-self.min)/divisor
#Gauge main line(the circular path)
#Safe zones
zones = map(self.val2deg_tuple, self.detect_safe_zones())
self.gauge_safe = []
for zone in zones:
path = QtGui.QPainterPath()
path.arcMoveTo(self.bounding_rect, self.start_angle-zone[0])
path.arcTo(self.bounding_rect, self.start_angle-zone[0], -(zone[1]-zone[0]))
self.gauge_safe.append(path)
#Warning zones
warning_zones = map(self.val2deg_tuple, self.warning)
self.gauge_warning = []
for w in warning_zones:
path = QtGui.QPainterPath()
path.arcMoveTo(self.bounding_rect, self.start_angle-w[0])
path.arcTo(self.bounding_rect, self.start_angle-w[0], -(w[1]-w[0]))
self.gauge_warning.append(path)
#Danger zones
danger_zones = map(self.val2deg_tuple, self.danger)
self.gauge_danger = []
for d in danger_zones:
path = QtGui.QPainterPath()
path.arcMoveTo(self.bounding_rect, self.start_angle-d[0])
path.arcTo(self.bounding_rect, self.start_angle-d[0], -(d[1]-d[0]))
self.gauge_danger.append(path)
#Initial gauge position
self.set_gauge(self.curr_value)
for i in xrange(self.main_points):
#Find the point on the curve
angle = self.start_angle -i*angle_step
value = self.min + i*value_step
p = QtGui.QPainterPath()
p.arcMoveTo(self.bounding_rect, angle)
x, y = p.currentPosition().x(), p.currentPosition().y()
x_new = x*0.9 + self.center.x()*0.1
y_new = y*0.9 + self.center.y()*0.1
x_text = x*0.8 + self.center.x()*0.2 - (text_width(str(round(value, 1)))-10)/2
y_text = y*0.8 + self.center.y()*0.2 + 4
tick_path = QtGui.QPainterPath()
tick_path.moveTo(x_new, y_new)
tick_path.lineTo(x, y)
#Store the tick_length for the marker area
self.gauge_ticks.append([QtCore.QPointF(x_text, y_text), value, tick_path])
#Store the tick_length for the marker area
self.tick_length = math.sqrt((x-x_new)**2+(y-y_new)**2)
def val2deg(self, value):
return self.length*((value-self.min)/abs(self.max-self.min))
def val2deg_tuple(self, t):
return map(self.val2deg, t)
def deg2val(self, degrees):
#Convert the given degress relative to the start_angle to the respective value
return abs(self.max-self.min)*(degrees/self.length)+self.min
def mouseReleaseEvent(self, e):
self.mouseMoveEvent(e)
def mouseMoveEvent(self, e):
#marker_line and marker_value dont exist before the first call of this function
click_pos = e.posF()
x_coeff = (click_pos.x() - self.center.x())**2
y_coeff = (click_pos.y() - self.center.y())**2
dist = math.sqrt(x_coeff + y_coeff)
w = self.bounding_rect.width()/2
if w - self.tick_length <= dist <= w:
#Find the angle between the start angle and the click point
angle = self.angle_from_zero(self.center, click_pos, self.start_angle)
#Return if the user clicked outside of the allowed range
if self.deg2val(angle) > self.max or self.deg2val(angle) < self.min:
return
self.set_marker(self.deg2val(angle))
def angle_from_zero(self, p1, p2, offset):
angle = math.degrees(math.atan2(p1.y()-p2.y(), p2.x()-p1.x()))
if angle < 0:
angle += 360
angle = offset - angle
if angle < 0:
angle += 360
return angle
def set_marker(self, value):
#New values: marker_point
#Emit the new value
self.marker_value = max(min(value, self.max), self.min)
self.marker_set.emit([self.description, self.marker_value])
#Round it for presentation purposes
self.marker_value = round(self.marker_value, 2)
#Create the marker line
p = QtGui.QPainterPath()
p.arcMoveTo(self.bounding_rect, self.start_angle - self.val2deg(value))
self.marker_point = p.currentPosition()
self.draw_marker(y=3)
def compute_marker_rotation(self):
#Marker_point is already set and ready for use
return self.angle_from_zero(self.center, self.marker_point, 90)
def draw_marker(self, x=0, y=0, size=10):
poly = QtWidgets.QPolygonF()
poly.append(QtCore.QPointF(x-size, y))
poly.append(QtCore.QPointF(x+size, y))
poly.append(QtCore.QPointF(x+size, y-size))
poly.append(QtCore.QPointF(x, y))
poly.append(QtCore.QPointF(x-size, y-size))
poly.append(QtCore.QPointF(x-size, y))
self.marker_line = QtGui.QPainterPath()
self.marker_line.addPolygon(poly)
self.update()
def update_plot(self):
try:
dump, value = self._rosdata.next()
if value:
self.value_uncap = round(value.pop(),1)
#print value
if len(value)>0:
self.set_gauge(self.value_uncap)
self.update_plot_timer.start(self.redraw_interval)
except RosPlotException as e:
QtCore.qWarning('PlotWidget.update_plot(): error in rosplot: %s' % e)
def set_gauge(self, value):
#Clamp between [min, max]
self.curr_value = round(max(min(value, self.max), self.min),1)
p = QtGui.QPainterPath()
p.arcMoveTo(self.bounding_rect, self.start_angle-self.val2deg(self.curr_value))
x, y = p.currentPosition().x(), p.currentPosition().y()
self.gauge_line = QtGui.QPainterPath()
self.gauge_line.moveTo(self.center)
self.gauge_line.lineTo(x, y)
self.update()
def increment_gauge(self, step):
#Clamp between (min, max)
self.curr_value = max(min(self.curr_value + step, self.max), self.min)
p = QtGui.QPainterPath()
p.arcMoveTo(self.bounding_rect, self.start_angle-self.val2deg(self.curr_value))
x, y = p.currentPosition().x(), p.currentPosition().y()
self.gauge_line = QtGui.QPainterPath()
self.gauge_line.moveTo(self.center)
self.gauge_line.lineTo(x, y)
self.update()
def set_bg_color(self):
#Determine the zone that the gauge arrow is inside
#Is it in a warning zone?
for w in self.warning:
if w[0] <= self.curr_value <= w[1]:
self.current_bg = self.warning_bg
return
#Or a danger zone?
for d in self.danger:
if d[0] <= self.curr_value <= d[1]:
self.current_bg = self.danger_bg
return
#Don't be scared, you're safe!
self.current_bg = QtCore.Qt.black
def paintEvent(self, event):
def center_text(text):
rect = painter.boundingRect(self.bounding_rect, QtCore.Qt.AlignHCenter, text)
return rect.width()/2
painter = QtGui.QPainter(self)
painter.setRenderHint(QtGui.QPainter.Antialiasing)
#Draw the background
self.set_bg_color()
painter.fillRect(event.rect(), self.current_bg)
painter.setBrush(QtCore.Qt.transparent)
painter.setPen(self.gauge_color)
painter.drawPath(self.gauge_line)
#Draw the safe zones
painter.setPen(self.ui_color)
for s in self.gauge_safe:
painter.drawPath(s)
#Draw the warning zones
painter.setPen(self.warning_color)
for w in self.gauge_warning:
painter.drawPath(w)
#Draw the danger zones
painter.setPen(self.danger_color)
for d in self.gauge_danger:
painter.drawPath(d)
#Draw the marker line
if getattr(self, 'marker_line', None):
painter.setPen(self.marker_tick_color)
painter.translate(self.marker_point)
painter.rotate(self.compute_marker_rotation())
painter.drawPath(self.marker_line)
painter.resetTransform()
#Draw the center circle
painter.setPen(self.ui_color)
painter.drawEllipse(self.center.x()-self.center_radius/2, self.center.y()-self.center_radius/2, self.center_radius, self.center_radius)
#Draw the paths
painter.setPen(self.ui_color_tick)
for i, path in enumerate(self.gauge_ticks):
if not (self.is_circle and i == (len(self.gauge_ticks))):
painter.drawText(path[0], str(int(path[1])))
painter.drawPath(path[2])
#Draw the text labels
painter.drawText(QtCore.QPointF(self.center.x()-center_text(str(self.value_uncap)), self.center.y()-40), str(self.value_uncap))
painter.drawText(QtCore.QPointF(self.center.x()-center_text(self.multiplier), self.center.y()+20+self.margin), self.multiplier)
painter.drawText(QtCore.QPointF(self.center.x()-center_text(self.units), self.center.y()+20+self.margin*2), self.units)
painter.drawText(QtCore.QPointF(self.center.x()-center_text(self.description), self.center.y()+20+self.margin*3), self.description)
painter.setPen(self.marker_tick_color)
if getattr(self, 'marker_value', None):
painter.drawText(QtCore.QPointF(self.center.x()-center_text(str(self.marker_value)), self.center.y()-20), str(self.marker_value))
QtWidgets.QWidget.paintEvent(self, event)
class Dashboard(Plugin):
def __init__(self, context):
super(Dashboard, self).__init__(context)
# Give QObjects reasonable names
self.setObjectName('Dashboard')
# Process standalone plugin command-line arguments
from argparse import ArgumentParser
parser = ArgumentParser()
# Add argument(s) to the parser.
parser.add_argument("-q", "--quiet", action="store_true",
dest="quiet", help="Put plugin in silent mode")
args, unknowns = parser.parse_known_args(context.argv())
if not args.quiet:
print 'arguments: ', args
print 'unknowns: ', unknowns
rospy.sleep(2.)
self._layout = DashboardGrid()
# Get path to UI file which is a sibling of this file
# in this example the .ui and .py file are in the same folder
##ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'MyPlugin.ui')
# Extend the widget with all attributes and children from UI file
##loadUi(ui_file, self._widget)
# Give QObjects reasonable names
self._layout.setObjectName('MyPluginUi')
# Show _widget.windowTitle on left-top of each plugin (when
# it's set in _widget). This is useful when you open multiple
# plugins at once. Also if you open multiple instances of your
# plugin at once, these lines add number to make it easy to
# tell from pane to pane.
if context.serial_number() > 1:
self._layout.setWindowTitle(self._layout.windowTitle() + (' (%d)' % context.serial_number()))
# Add widget to the user interface
context.add_widget(self._layout)
# foo = yaml.load("- Hesperiidae - Papilionidae - Apatelodidae - Epiplemidae")
# print foo
def shutdown_plugin(self):
# TODO unregister all publishers here
pass
def save_settings(self, plugin_settings, instance_settings):
# TODO save intrinsic configuration, usually using:
# instance_settings.set_value(k, v)
pass
def restore_settings(self, plugin_settings, instance_settings):
# TODO restore intrinsic configuration, usually using:
# v = instance_settings.value(k)
pass
#def trigger_configuration(self):
# Comment in to signal that the plugin has a way to configure
# This will enable a setting button (gear icon) in each dock widget title bar
# Usually used to open a modal configuration dialog
|
Georacer/last_letter
|
rqt_dashboard/src/rqt_dashboard/dashboard.py
|
Python
|
gpl-3.0
| 16,075 | 0.029425 |
import ctypes
class C_struct:
"""Decorator to convert the given class into a C struct."""
# contains a dict of all known translatable types
types = ctypes.__dict__
@classmethod
def register_type(cls, typename, obj):
"""Adds the new class to the dict of understood types."""
cls.types[typename] = obj
def __call__(self, cls):
"""Converts the given class into a C struct.
Usage:
>>> @C_struct()
... class Account:
... first_name = "c_char_p"
... last_name = "c_char_p"
... balance = "c_float"
...
>>> a = Account()
>>> a
<cstruct.Account object at 0xb7c0ee84>
A very important note: while it *is* possible to
instantiate these classes as follows:
>>> a = Account("Geremy", "Condra", 0.42)
This is strongly discouraged, because there is at
present no way to ensure what order the field names
will be read in.
"""
# build the field mapping (names -> types)
fields = []
for k, v in vars(cls).items():
# don't wrap private variables
if not k.startswith("_"):
# if its a pointer
if v.startswith("*"):
field_type = ctypes.POINTER(self.types[v[1:]])
else:
field_type = self.types[v]
new_field = (k, field_type)
fields.append(new_field)
# make our bases tuple
bases = (ctypes.Structure,) + tuple((base for base in cls.__bases__))
# finish up our wrapping dict
class_attrs = {"_fields_": fields, "__doc__": cls.__doc__}
# now create our class
return type(cls.__name__, bases, class_attrs)
|
ActiveState/code
|
recipes/Python/576734_C_struct_decorator/recipe-576734.py
|
Python
|
mit
| 1,507 | 0.033842 |
#!/usr/bin/env python
command += testshade("-g 256 256 --center -od uint8 -o Cout out.tif test")
outputs = [ "out.txt", "out.tif" ]
|
svenstaro/OpenShadingLanguage
|
testsuite/texture-withderivs/run.py
|
Python
|
bsd-3-clause
| 133 | 0.015038 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt
from frappe import msgprint, _
def execute(filters=None):
if not filters: filters = {}
invoice_list = get_invoices(filters)
columns, income_accounts, tax_accounts = get_columns(invoice_list)
if not invoice_list:
msgprint(_("No record found"))
return columns, invoice_list
invoice_income_map = get_invoice_income_map(invoice_list)
invoice_income_map, invoice_tax_map = get_invoice_tax_map(invoice_list,
invoice_income_map, income_accounts)
invoice_so_dn_map = get_invoice_so_dn_map(invoice_list)
customer_map = get_customer_deatils(invoice_list)
data = []
for inv in invoice_list:
# invoice details
sales_order = list(set(invoice_so_dn_map.get(inv.name, {}).get("sales_order", [])))
delivery_note = list(set(invoice_so_dn_map.get(inv.name, {}).get("delivery_note", [])))
row = [inv.name, inv.posting_date, inv.customer, inv.customer_name,
customer_map.get(inv.customer, {}).get("customer_group"),
customer_map.get(inv.customer, {}).get("territory"),
inv.debit_to, inv.project_name, inv.remarks, ", ".join(sales_order), ", ".join(delivery_note)]
# map income values
base_net_total = 0
for income_acc in income_accounts:
income_amount = flt(invoice_income_map.get(inv.name, {}).get(income_acc))
base_net_total += income_amount
row.append(income_amount)
# net total
row.append(base_net_total or inv.base_net_total)
# tax account
total_tax = 0
for tax_acc in tax_accounts:
if tax_acc not in income_accounts:
tax_amount = flt(invoice_tax_map.get(inv.name, {}).get(tax_acc))
total_tax += tax_amount
row.append(tax_amount)
# total tax, grand total, outstanding amount & rounded total
row += [total_tax, inv.base_grand_total, inv.base_rounded_total, inv.outstanding_amount]
data.append(row)
return columns, data
def get_columns(invoice_list):
"""return columns based on filters"""
columns = [
_("Invoice") + ":Link/Sales Invoice:120", _("Posting Date") + ":Date:80", _("Customer Id") + "::120",
_("Customer Name") + "::120", _("Customer Group") + ":Link/Customer Group:120", _("Territory") + ":Link/Territory:80",
_("Receivable Account") + ":Link/Account:120", _("Project") +":Link/Project:80", _("Remarks") + "::150",
_("Sales Order") + ":Link/Sales Order:100", _("Delivery Note") + ":Link/Delivery Note:100"
]
income_accounts = tax_accounts = income_columns = tax_columns = []
if invoice_list:
income_accounts = frappe.db.sql_list("""select distinct income_account
from `tabSales Invoice Item` where docstatus = 1 and parent in (%s)
order by income_account""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]))
tax_accounts = frappe.db.sql_list("""select distinct account_head
from `tabSales Taxes and Charges` where parenttype = 'Sales Invoice'
and docstatus = 1 and ifnull(base_tax_amount_after_discount_amount, 0) != 0
and parent in (%s) order by account_head""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]))
income_columns = [(account + ":Currency:120") for account in income_accounts]
for account in tax_accounts:
if account not in income_accounts:
tax_columns.append(account + ":Currency:120")
columns = columns + income_columns + [_("Net Total") + ":Currency:120"] + tax_columns + \
[_("Total Tax") + ":Currency:120", _("Grand Total") + ":Currency:120",
_("Rounded Total") + ":Currency:120", _("Outstanding Amount") + ":Currency:120"]
return columns, income_accounts, tax_accounts
def get_conditions(filters):
conditions = ""
if filters.get("company"): conditions += " and company=%(company)s"
if filters.get("customer"): conditions += " and customer = %(customer)s"
if filters.get("from_date"): conditions += " and posting_date >= %(from_date)s"
if filters.get("to_date"): conditions += " and posting_date <= %(to_date)s"
return conditions
def get_invoices(filters):
conditions = get_conditions(filters)
return frappe.db.sql("""select name, posting_date, debit_to, project_name, customer,
customer_name, remarks, base_net_total, base_grand_total, base_rounded_total, outstanding_amount
from `tabSales Invoice`
where docstatus = 1 %s order by posting_date desc, name desc""" %
conditions, filters, as_dict=1)
def get_invoice_income_map(invoice_list):
income_details = frappe.db.sql("""select parent, income_account, sum(base_net_amount) as amount
from `tabSales Invoice Item` where parent in (%s) group by parent, income_account""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_income_map = {}
for d in income_details:
invoice_income_map.setdefault(d.parent, frappe._dict()).setdefault(d.income_account, [])
invoice_income_map[d.parent][d.income_account] = flt(d.amount)
return invoice_income_map
def get_invoice_tax_map(invoice_list, invoice_income_map, income_accounts):
tax_details = frappe.db.sql("""select parent, account_head,
sum(base_tax_amount_after_discount_amount) as tax_amount
from `tabSales Taxes and Charges` where parent in (%s) group by parent, account_head""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_tax_map = {}
for d in tax_details:
if d.account_head in income_accounts:
if invoice_income_map[d.parent].has_key(d.account_head):
invoice_income_map[d.parent][d.account_head] += flt(d.tax_amount)
else:
invoice_income_map[d.parent][d.account_head] = flt(d.tax_amount)
else:
invoice_tax_map.setdefault(d.parent, frappe._dict()).setdefault(d.account_head, [])
invoice_tax_map[d.parent][d.account_head] = flt(d.tax_amount)
return invoice_income_map, invoice_tax_map
def get_invoice_so_dn_map(invoice_list):
si_items = frappe.db.sql("""select parent, sales_order, delivery_note, so_detail
from `tabSales Invoice Item` where parent in (%s)
and (ifnull(sales_order, '') != '' or ifnull(delivery_note, '') != '')""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_so_dn_map = {}
for d in si_items:
if d.sales_order:
invoice_so_dn_map.setdefault(d.parent, frappe._dict()).setdefault(
"sales_order", []).append(d.sales_order)
delivery_note_list = None
if d.delivery_note:
delivery_note_list = [d.delivery_note]
elif d.sales_order:
delivery_note_list = frappe.db.sql_list("""select distinct parent from `tabDelivery Note Item`
where docstatus=1 and so_detail=%s""", d.so_detail)
if delivery_note_list:
invoice_so_dn_map.setdefault(d.parent, frappe._dict()).setdefault("delivery_note", delivery_note_list)
return invoice_so_dn_map
def get_customer_deatils(invoice_list):
customer_map = {}
customers = list(set([inv.customer for inv in invoice_list]))
for cust in frappe.db.sql("""select name, territory, customer_group from `tabCustomer`
where name in (%s)""" % ", ".join(["%s"]*len(customers)), tuple(customers), as_dict=1):
customer_map.setdefault(cust.name, cust)
return customer_map
|
ThiagoGarciaAlves/erpnext
|
erpnext/accounts/report/sales_register/sales_register.py
|
Python
|
agpl-3.0
| 7,220 | 0.024931 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.meta_architectures.faster_rcnn_meta_arch."""
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from object_detection.meta_architectures import faster_rcnn_meta_arch_test_lib
class FasterRCNNMetaArchTest(
faster_rcnn_meta_arch_test_lib.FasterRCNNMetaArchTestBase,
parameterized.TestCase):
def test_postprocess_second_stage_only_inference_mode_with_masks(self):
model = self._build_model(
is_training=False, number_of_stages=2, second_stage_batch_size=6)
batch_size = 2
total_num_padded_proposals = batch_size * model.max_num_proposals
proposal_boxes = tf.constant(
[[[1, 1, 2, 3],
[0, 0, 1, 1],
[.5, .5, .6, .6],
4*[0], 4*[0], 4*[0], 4*[0], 4*[0]],
[[2, 3, 6, 8],
[1, 2, 5, 3],
4*[0], 4*[0], 4*[0], 4*[0], 4*[0], 4*[0]]], dtype=tf.float32)
num_proposals = tf.constant([3, 2], dtype=tf.int32)
refined_box_encodings = tf.zeros(
[total_num_padded_proposals, model.num_classes, 4], dtype=tf.float32)
class_predictions_with_background = tf.ones(
[total_num_padded_proposals, model.num_classes+1], dtype=tf.float32)
image_shape = tf.constant([batch_size, 36, 48, 3], dtype=tf.int32)
mask_height = 2
mask_width = 2
mask_predictions = 30. * tf.ones(
[total_num_padded_proposals, model.num_classes,
mask_height, mask_width], dtype=tf.float32)
exp_detection_masks = np.array([[[[1, 1], [1, 1]],
[[1, 1], [1, 1]],
[[1, 1], [1, 1]],
[[1, 1], [1, 1]],
[[1, 1], [1, 1]]],
[[[1, 1], [1, 1]],
[[1, 1], [1, 1]],
[[1, 1], [1, 1]],
[[1, 1], [1, 1]],
[[0, 0], [0, 0]]]])
_, true_image_shapes = model.preprocess(tf.zeros(image_shape))
detections = model.postprocess({
'refined_box_encodings': refined_box_encodings,
'class_predictions_with_background': class_predictions_with_background,
'num_proposals': num_proposals,
'proposal_boxes': proposal_boxes,
'image_shape': image_shape,
'mask_predictions': mask_predictions
}, true_image_shapes)
with self.test_session() as sess:
detections_out = sess.run(detections)
self.assertAllEqual(detections_out['detection_boxes'].shape, [2, 5, 4])
self.assertAllClose(detections_out['detection_scores'],
[[1, 1, 1, 1, 1], [1, 1, 1, 1, 0]])
self.assertAllClose(detections_out['detection_classes'],
[[0, 0, 0, 1, 1], [0, 0, 1, 1, 0]])
self.assertAllClose(detections_out['num_detections'], [5, 4])
self.assertAllClose(detections_out['detection_masks'],
exp_detection_masks)
self.assertTrue(np.amax(detections_out['detection_masks'] <= 1.0))
self.assertTrue(np.amin(detections_out['detection_masks'] >= 0.0))
def test_postprocess_second_stage_only_inference_mode_with_shared_boxes(self):
model = self._build_model(
is_training=False, number_of_stages=2, second_stage_batch_size=6)
batch_size = 2
total_num_padded_proposals = batch_size * model.max_num_proposals
proposal_boxes = tf.constant(
[[[1, 1, 2, 3],
[0, 0, 1, 1],
[.5, .5, .6, .6],
4*[0], 4*[0], 4*[0], 4*[0], 4*[0]],
[[2, 3, 6, 8],
[1, 2, 5, 3],
4*[0], 4*[0], 4*[0], 4*[0], 4*[0], 4*[0]]], dtype=tf.float32)
num_proposals = tf.constant([3, 2], dtype=tf.int32)
# This has 1 box instead of one for each class.
refined_box_encodings = tf.zeros(
[total_num_padded_proposals, 1, 4], dtype=tf.float32)
class_predictions_with_background = tf.ones(
[total_num_padded_proposals, model.num_classes+1], dtype=tf.float32)
image_shape = tf.constant([batch_size, 36, 48, 3], dtype=tf.int32)
_, true_image_shapes = model.preprocess(tf.zeros(image_shape))
detections = model.postprocess({
'refined_box_encodings': refined_box_encodings,
'class_predictions_with_background': class_predictions_with_background,
'num_proposals': num_proposals,
'proposal_boxes': proposal_boxes,
'image_shape': image_shape,
}, true_image_shapes)
with self.test_session() as sess:
detections_out = sess.run(detections)
self.assertAllEqual(detections_out['detection_boxes'].shape, [2, 5, 4])
self.assertAllClose(detections_out['detection_scores'],
[[1, 1, 1, 1, 1], [1, 1, 1, 1, 0]])
self.assertAllClose(detections_out['detection_classes'],
[[0, 0, 0, 1, 1], [0, 0, 1, 1, 0]])
self.assertAllClose(detections_out['num_detections'], [5, 4])
@parameterized.parameters(
{'masks_are_class_agnostic': False},
{'masks_are_class_agnostic': True},
)
def test_predict_correct_shapes_in_inference_mode_three_stages_with_masks(
self, masks_are_class_agnostic):
batch_size = 2
image_size = 10
max_num_proposals = 8
initial_crop_size = 3
maxpool_stride = 1
input_shapes = [(batch_size, image_size, image_size, 3),
(None, image_size, image_size, 3),
(batch_size, None, None, 3),
(None, None, None, 3)]
expected_num_anchors = image_size * image_size * 3 * 3
expected_shapes = {
'rpn_box_predictor_features':
(2, image_size, image_size, 512),
'rpn_features_to_crop': (2, image_size, image_size, 3),
'image_shape': (4,),
'rpn_box_encodings': (2, expected_num_anchors, 4),
'rpn_objectness_predictions_with_background':
(2, expected_num_anchors, 2),
'anchors': (expected_num_anchors, 4),
'refined_box_encodings': (2 * max_num_proposals, 2, 4),
'class_predictions_with_background': (2 * max_num_proposals, 2 + 1),
'num_proposals': (2,),
'proposal_boxes': (2, max_num_proposals, 4),
'proposal_boxes_normalized': (2, max_num_proposals, 4),
'box_classifier_features':
self._get_box_classifier_features_shape(image_size,
batch_size,
max_num_proposals,
initial_crop_size,
maxpool_stride,
3)
}
for input_shape in input_shapes:
test_graph = tf.Graph()
with test_graph.as_default():
model = self._build_model(
is_training=False,
number_of_stages=3,
second_stage_batch_size=2,
predict_masks=True,
masks_are_class_agnostic=masks_are_class_agnostic)
preprocessed_inputs = tf.placeholder(tf.float32, shape=input_shape)
_, true_image_shapes = model.preprocess(preprocessed_inputs)
result_tensor_dict = model.predict(preprocessed_inputs,
true_image_shapes)
init_op = tf.global_variables_initializer()
with self.test_session(graph=test_graph) as sess:
sess.run(init_op)
tensor_dict_out = sess.run(result_tensor_dict, feed_dict={
preprocessed_inputs:
np.zeros((batch_size, image_size, image_size, 3))})
self.assertEqual(
set(tensor_dict_out.keys()),
set(expected_shapes.keys()).union(
set([
'detection_boxes', 'detection_scores', 'detection_classes',
'detection_masks', 'num_detections', 'mask_predictions',
])))
for key in expected_shapes:
self.assertAllEqual(tensor_dict_out[key].shape, expected_shapes[key])
self.assertAllEqual(tensor_dict_out['detection_boxes'].shape, [2, 5, 4])
self.assertAllEqual(tensor_dict_out['detection_masks'].shape,
[2, 5, 14, 14])
self.assertAllEqual(tensor_dict_out['detection_classes'].shape, [2, 5])
self.assertAllEqual(tensor_dict_out['detection_scores'].shape, [2, 5])
self.assertAllEqual(tensor_dict_out['num_detections'].shape, [2])
num_classes = 1 if masks_are_class_agnostic else 2
self.assertAllEqual(tensor_dict_out['mask_predictions'].shape,
[10, num_classes, 14, 14])
@parameterized.parameters(
{'masks_are_class_agnostic': False},
{'masks_are_class_agnostic': True},
)
def test_predict_gives_correct_shapes_in_train_mode_both_stages_with_masks(
self, masks_are_class_agnostic):
test_graph = tf.Graph()
with test_graph.as_default():
model = self._build_model(
is_training=True,
number_of_stages=3,
second_stage_batch_size=7,
predict_masks=True,
masks_are_class_agnostic=masks_are_class_agnostic)
batch_size = 2
image_size = 10
max_num_proposals = 7
initial_crop_size = 3
maxpool_stride = 1
image_shape = (batch_size, image_size, image_size, 3)
preprocessed_inputs = tf.zeros(image_shape, dtype=tf.float32)
groundtruth_boxes_list = [
tf.constant([[0, 0, .5, .5], [.5, .5, 1, 1]], dtype=tf.float32),
tf.constant([[0, .5, .5, 1], [.5, 0, 1, .5]], dtype=tf.float32)
]
groundtruth_classes_list = [
tf.constant([[1, 0], [0, 1]], dtype=tf.float32),
tf.constant([[1, 0], [1, 0]], dtype=tf.float32)
]
groundtruth_weights_list = [
tf.constant([1, 1], dtype=tf.float32),
tf.constant([1, 1], dtype=tf.float32)]
_, true_image_shapes = model.preprocess(tf.zeros(image_shape))
model.provide_groundtruth(
groundtruth_boxes_list,
groundtruth_classes_list,
groundtruth_weights_list=groundtruth_weights_list)
result_tensor_dict = model.predict(preprocessed_inputs, true_image_shapes)
mask_shape_1 = 1 if masks_are_class_agnostic else model._num_classes
expected_shapes = {
'rpn_box_predictor_features': (2, image_size, image_size, 512),
'rpn_features_to_crop': (2, image_size, image_size, 3),
'image_shape': (4,),
'refined_box_encodings': (2 * max_num_proposals, 2, 4),
'class_predictions_with_background': (2 * max_num_proposals, 2 + 1),
'num_proposals': (2,),
'proposal_boxes': (2, max_num_proposals, 4),
'proposal_boxes_normalized': (2, max_num_proposals, 4),
'box_classifier_features':
self._get_box_classifier_features_shape(
image_size, batch_size, max_num_proposals, initial_crop_size,
maxpool_stride, 3),
'mask_predictions': (2 * max_num_proposals, mask_shape_1, 14, 14)
}
init_op = tf.global_variables_initializer()
with self.test_session(graph=test_graph) as sess:
sess.run(init_op)
tensor_dict_out = sess.run(result_tensor_dict)
self.assertEqual(
set(tensor_dict_out.keys()),
set(expected_shapes.keys()).union(
set([
'rpn_box_encodings',
'rpn_objectness_predictions_with_background',
'anchors',
])))
for key in expected_shapes:
self.assertAllEqual(tensor_dict_out[key].shape, expected_shapes[key])
anchors_shape_out = tensor_dict_out['anchors'].shape
self.assertEqual(2, len(anchors_shape_out))
self.assertEqual(4, anchors_shape_out[1])
num_anchors_out = anchors_shape_out[0]
self.assertAllEqual(tensor_dict_out['rpn_box_encodings'].shape,
(2, num_anchors_out, 4))
self.assertAllEqual(
tensor_dict_out['rpn_objectness_predictions_with_background'].shape,
(2, num_anchors_out, 2))
def test_postprocess_third_stage_only_inference_mode(self):
num_proposals_shapes = [(2), (None)]
refined_box_encodings_shapes = [(16, 2, 4), (None, 2, 4)]
class_predictions_with_background_shapes = [(16, 3), (None, 3)]
proposal_boxes_shapes = [(2, 8, 4), (None, 8, 4)]
batch_size = 2
image_shape = np.array((2, 36, 48, 3), dtype=np.int32)
for (num_proposals_shape, refined_box_encoding_shape,
class_predictions_with_background_shape,
proposal_boxes_shape) in zip(num_proposals_shapes,
refined_box_encodings_shapes,
class_predictions_with_background_shapes,
proposal_boxes_shapes):
tf_graph = tf.Graph()
with tf_graph.as_default():
model = self._build_model(
is_training=False, number_of_stages=3,
second_stage_batch_size=6, predict_masks=True)
total_num_padded_proposals = batch_size * model.max_num_proposals
proposal_boxes = np.array(
[[[1, 1, 2, 3],
[0, 0, 1, 1],
[.5, .5, .6, .6],
4*[0], 4*[0], 4*[0], 4*[0], 4*[0]],
[[2, 3, 6, 8],
[1, 2, 5, 3],
4*[0], 4*[0], 4*[0], 4*[0], 4*[0], 4*[0]]])
num_proposals = np.array([3, 2], dtype=np.int32)
refined_box_encodings = np.zeros(
[total_num_padded_proposals, model.num_classes, 4])
class_predictions_with_background = np.ones(
[total_num_padded_proposals, model.num_classes+1])
num_proposals_placeholder = tf.placeholder(tf.int32,
shape=num_proposals_shape)
refined_box_encodings_placeholder = tf.placeholder(
tf.float32, shape=refined_box_encoding_shape)
class_predictions_with_background_placeholder = tf.placeholder(
tf.float32, shape=class_predictions_with_background_shape)
proposal_boxes_placeholder = tf.placeholder(
tf.float32, shape=proposal_boxes_shape)
image_shape_placeholder = tf.placeholder(tf.int32, shape=(4))
_, true_image_shapes = model.preprocess(
tf.zeros(image_shape_placeholder))
detections = model.postprocess({
'refined_box_encodings': refined_box_encodings_placeholder,
'class_predictions_with_background':
class_predictions_with_background_placeholder,
'num_proposals': num_proposals_placeholder,
'proposal_boxes': proposal_boxes_placeholder,
'image_shape': image_shape_placeholder,
'detection_boxes': tf.zeros([2, 5, 4]),
'detection_masks': tf.zeros([2, 5, 14, 14]),
'detection_scores': tf.zeros([2, 5]),
'detection_classes': tf.zeros([2, 5]),
'num_detections': tf.zeros([2]),
}, true_image_shapes)
with self.test_session(graph=tf_graph) as sess:
detections_out = sess.run(
detections,
feed_dict={
refined_box_encodings_placeholder: refined_box_encodings,
class_predictions_with_background_placeholder:
class_predictions_with_background,
num_proposals_placeholder: num_proposals,
proposal_boxes_placeholder: proposal_boxes,
image_shape_placeholder: image_shape
})
self.assertAllEqual(detections_out['detection_boxes'].shape, [2, 5, 4])
self.assertAllEqual(detections_out['detection_masks'].shape,
[2, 5, 14, 14])
self.assertAllClose(detections_out['detection_scores'].shape, [2, 5])
self.assertAllClose(detections_out['detection_classes'].shape, [2, 5])
self.assertAllClose(detections_out['num_detections'].shape, [2])
self.assertTrue(np.amax(detections_out['detection_masks'] <= 1.0))
self.assertTrue(np.amin(detections_out['detection_masks'] >= 0.0))
def _get_box_classifier_features_shape(self,
image_size,
batch_size,
max_num_proposals,
initial_crop_size,
maxpool_stride,
num_features):
return (batch_size * max_num_proposals,
initial_crop_size/maxpool_stride,
initial_crop_size/maxpool_stride,
num_features)
if __name__ == '__main__':
tf.test.main()
|
cshallue/models
|
research/object_detection/meta_architectures/faster_rcnn_meta_arch_test.py
|
Python
|
apache-2.0
| 17,423 | 0.004018 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class SaleOrderCancel(models.TransientModel):
_inherit = 'sale.order.cancel'
display_delivery_alert = fields.Boolean('Delivery Alert', compute='_compute_display_delivery_alert')
@api.depends('order_id')
def _compute_display_delivery_alert(self):
for wizard in self:
wizard.display_delivery_alert = bool(any(picking.state == 'done' for picking in wizard.order_id.picking_ids))
|
ygol/odoo
|
addons/sale_stock/wizard/sale_order_cancel.py
|
Python
|
agpl-3.0
| 553 | 0.003617 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Constant Value Tensors
TensorFlow provides several operations that you can use to generate constants.
@@zeros
@@zeros_like
@@ones
@@ones_like
@@fill
@@constant
## Sequences
@@linspace
@@range
## Random Tensors
TensorFlow has several ops that create random tensors with different
distributions. The random ops are stateful, and create new random values each
time they are evaluated.
The `seed` keyword argument in these functions acts in conjunction with
the graph-level random seed. Changing either the graph-level seed using
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed) or the
op-level seed will change the underlying seed of these operations. Setting
neither graph-level nor op-level seed, results in a random seed for all
operations.
See [`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for details on the interaction between operation-level and graph-level random
seeds.
### Examples:
```python
# Create a tensor of shape [2, 3] consisting of random normal values, with mean
# -1 and standard deviation 4.
norm = tf.random_normal([2, 3], mean=-1, stddev=4)
# Shuffle the first dimension of a tensor
c = tf.constant([[1, 2], [3, 4], [5, 6]])
shuff = tf.random_shuffle(c)
# Each time we run these ops, different results are generated
sess = tf.Session()
print(sess.run(norm))
print(sess.run(norm))
# Set an op-level seed to generate repeatable sequences across sessions.
c = tf.constant([[1, 2], [3, 4], [5, 6]])
sess = tf.Session()
norm = tf.random_normal(c, seed=1234)
print(sess.run(norm))
print(sess.run(norm))
```
Another common use of random values is the initialization of variables. Also see
the [Variables How To](../../how_tos/variables/index.md).
```python
# Use random uniform values in [0, 1) as the initializer for a variable of shape
# [2, 3]. The default type is float32.
var = tf.Variable(tf.random_uniform([2, 3]), name="var")
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
print(sess.run(var))
```
@@random_normal
@@truncated_normal
@@random_uniform
@@random_shuffle
@@set_random_seed
"""
# Must be separate from array_ops to avoid a cyclic dependency.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
from tensorflow.core.framework import attr_value_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
def constant(value, dtype=None, shape=None, name="Const"):
"""Creates a constant tensor.
The resulting tensor is populated with values of type `dtype`, as
specified by arguments `value` and (optionally) `shape` (see examples
below).
The argument `value` can be a constant value, or a list of values of type
`dtype`. If `value` is a list, then the length of the list must be less
than or equal to the number of elements implied by the `shape` argument (if
specified). In the case where the list length is less than the number of
elements specified by `shape`, the last element in the list will be used
to fill the remaining entries.
The argument `shape` is optional. If present, it specifies the dimensions
of the resulting tensor. If not present, then the tensor is a scalar (0-D)
if `value` is a scalar, or 1-D otherwise.
If the argument `dtype` is not specified, then the type is inferred from
the type of `value`.
For example:
```python
# Constant 1-D Tensor populated with value list.
tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]
# Constant 2-D tensor populated with scalar value -1.
tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.]
[-1. -1. -1.]]
```
Args:
value: A constant value (or list) of output type `dtype`.
dtype: The type of the elements of the resulting tensor.
shape: Optional dimensions of resulting tensor.
name: Optional name for the tensor.
Returns:
A Constant Tensor.
"""
g = ops.get_default_graph()
tensor_value = attr_value_pb2.AttrValue()
tensor_value.tensor.CopyFrom(
tensor_util.make_tensor_proto(value, dtype=dtype, shape=shape))
dtype_value = attr_value_pb2.AttrValue(type=tensor_value.tensor.dtype)
const_tensor = g.create_op(
"Const", [], [dtype_value.type],
attrs={"value": tensor_value, "dtype": dtype_value}, name=name).outputs[0]
return const_tensor
@ops.RegisterShape("Const")
def _ConstantShape(op):
return [tensor_shape.TensorShape(
[d.size for d in op.get_attr("value").tensor_shape.dim])]
def _constant_tensor_conversion_function(v, dtype=None, name=None,
as_ref=False):
_ = as_ref
return constant(v, dtype=dtype, name=name)
ops.register_tensor_conversion_function(
(list, tuple), _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
np.ndarray, _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
np.generic, _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
object, _constant_tensor_conversion_function, 200)
def _tensor_shape_tensor_conversion_function(s, dtype=None, name=None,
as_ref=False):
_ = as_ref
if not s.is_fully_defined():
raise ValueError(
"Cannot convert a partially known TensorShape to a Tensor: %s" % s)
if dtype is not None:
if dtype not in (dtypes.int32, dtypes.int64):
raise TypeError("Cannot convert a TensorShape to dtype: %s" % dtype)
else:
dtype = dtypes.int32
if name is None:
name = "shape_as_tensor"
return constant(s.as_list(), dtype=dtype, name=name)
ops.register_tensor_conversion_function(
tensor_shape.TensorShape, _tensor_shape_tensor_conversion_function, 100)
def _dimension_tensor_conversion_function(d, dtype=None, name=None,
as_ref=False):
_ = as_ref
if d.value is None:
raise ValueError("Cannot convert an unknown Dimension to a Tensor: %s" % d)
if dtype is not None:
if dtype not in (dtypes.int32, dtypes.int64):
raise TypeError("Cannot convert a TensorShape to dtype: %s" % dtype)
else:
dtype = dtypes.int32
if name is None:
name = "shape_as_tensor"
return constant(d.value, dtype=dtype, name=name)
ops.register_tensor_conversion_function(
tensor_shape.Dimension, _dimension_tensor_conversion_function, 100)
|
DeepThoughtTeam/tensorflow
|
tensorflow/python/ops/constant_op.py
|
Python
|
apache-2.0
| 7,338 | 0.004361 |
"""
==========================
PySpecKit ASCII Reader
==========================
Routines for reading in ASCII format spectra. If atpy is not installed,
will use a very simple routine for reading in the data.
.. moduleauthor:: Adam Ginsburg <adam.g.ginsburg@gmail.com>
.. moduleauthor:: Jordan Mirocha <mirochaj@gmail.com>
"""
try:
import atpy
atpyOK = True
except ImportError:
atpyOK = False
from .. import units
import numpy as np
from pyspeckit.specwarnings import warn
import readcol
def open_1d_txt(filename, xaxcol=0, datacol=1, errorcol=2,
text_reader='simple', atpytype='ascii', **kwargs):
"""
Attempt to read a 1D spectrum from a text file assuming wavelength as the
first column, data as the second, and (optionally) error as the third.
Reading can be done either with atpy or a 'simple' reader. If you have an
IPAC, CDS, or formally formatted table, you'll want to use atpy.
If you have a simply formatted file of the form, e.g.
# name name
# unit unit
data data
data data
kwargs are passed to atpy.Table
"""
if text_reader in ('simple','readcol') or not atpyOK:
if not atpyOK:
warn("WARNING: atpy not installed; will use simple reader instead.")
if text_reader == 'simple':
data, error, XAxis, T = simple_txt(filename, xaxcol = xaxcol,
datacol = datacol, errorcol = errorcol, **kwargs)
elif text_reader == 'readcol':
Tlist = readcol.readcol(filename, twod=False, **kwargs)
XAxis = units.SpectroscopicAxis(Tlist[xaxcol])
data = Tlist[datacol]
error = Tlist[errorcol]
T = dummy_class()
Tdict = readcol.readcol(filename, asDict=True, **kwargs)
T.data = dummy_class()
T.data.dtype = dummy_class()
T.data.dtype.names = hdr
T.columns = {}
T.columns[T.data.dtype.names[xaxcol]] = dummy_class()
T.columns[T.data.dtype.names[xaxcol]].unit = colunits[xaxcol]
T.columns[T.data.dtype.names[datacol]] = dummy_class()
T.columns[T.data.dtype.names[datacol]].unit = colunits[datacol]
elif text_reader in ('atpy','asciitable'):
T = atpy.Table(filename, type=atpytype, masked=True, **kwargs)
xarr = T.data[T.data.dtype.names[xaxcol]]
data = T.data[T.data.dtype.names[datacol]]
if len(T.columns) > errorcol:
error = T.data[T.data.dtype.names[errorcol]]
else:
# assume uniform, zero error
error = data*0
if 'xunits' in T.keywords:
xunits = T.keywords['xunits']
else:
xunits = 'unknown'
XAxis = units.SpectroscopicAxis(xarr,xunits)
# Need this in Spectrum class to correctly parse header
T.xaxcol = xaxcol
T.datacol = datacol
return data, error, XAxis, T
def simple_txt(filename, xaxcol=0, datacol=1, errorcol=2, skiplines=0, **kwargs):
"""
Very simple method for reading columns from ASCII file.
"""
f = open(filename, 'r')
hdr = None
colunits = []
coldata = []
for ii, line in enumerate(f):
# Ignore blank lines
if not line.strip():
continue
# Possibly read in header
if line.split()[0][0] == '#':
if (ii) == (0+skiplines):
hdr = line[1:].split()
if (ii) == (1+skiplines):
colunits = line[1:].split()
continue
if ii < skiplines:
continue
coldata.append(line.split())
for j, element in enumerate(coldata[-1]):
try:
coldata[-1][j] = float(element)
except ValueError:
coldata[-1][j] = str(element)
f.close()
coldata = zip(*coldata)
if not colunits:
colunits = ['unknown'] * len(coldata)
if not hdr:
hdr = ['unknown'] * len(coldata)
N = len(hdr)
# Prepare to return data
data = coldata[datacol]
xarr = coldata[xaxcol]
if errorcol > len(coldata) - 1:
error = np.array(data)*0
else:
error = coldata[errorcol]
if len(error) != len(data):
raise ValueError("Data and Error lengths do not match.")
XAxis = units.SpectroscopicAxis(xarr, colunits[xaxcol])
# Create atPy style Table instance
T = dummy_class()
T.data = dummy_class()
T.data.dtype = dummy_class()
T.data.dtype.names = hdr
T.columns = {}
T.columns[T.data.dtype.names[xaxcol]] = dummy_class()
T.columns[T.data.dtype.names[xaxcol]].unit = colunits[xaxcol]
T.columns[T.data.dtype.names[datacol]] = dummy_class()
T.columns[T.data.dtype.names[datacol]].unit = colunits[datacol]
return np.array(data), np.array(error), XAxis, T
class dummy_class:
def __init__(self):
pass
|
keflavich/pyspeckit-obsolete
|
pyspeckit/spectrum/readers/txt_reader.py
|
Python
|
mit
| 5,045 | 0.010109 |
import pygame
# Import the android module. If we can't import it, set it to None - this
# lets us test it, and check to see if we want android-specific behavior.
try:
import android
except ImportError:
android = None
# Event constant.
TIMEREVENT = pygame.USEREVENT
# The FPS the game runs at.
FPS = 30
# Color constants.
RED = (255, 0, 0, 255)
GREEN = (0, 255, 0, 255)
def main():
pygame.init()
# Set the screen size.
screen = pygame.display.set_mode((480, 800))
# Map the back button to the escape key.
if android:
android.init()
android.map_key(android.KEYCODE_BACK, pygame.K_ESCAPE)
# Use a timer to control FPS.
pygame.time.set_timer(TIMEREVENT, 1000 / FPS)
# The color of the screen.
color = RED
while True:
ev = pygame.event.wait()
# Android-specific:
if android:
if android.check_pause():
android.wait_for_resume()
# Draw the screen based on the timer.
if ev.type == TIMEREVENT:
screen.fill(color)
pygame.display.flip()
# When the touchscreen is pressed, change the color to green.
elif ev.type == pygame.MOUSEBUTTONDOWN:
color = GREEN
# When it's released, change the color to RED.
elif ev.type == pygame.MOUSEBUTTONUP:
color = RED
# When the user hits back, ESCAPE is sent. Handle it and end
# the game.
elif ev.type == pygame.KEYDOWN and ev.key == pygame.K_ESCAPE:
break
# This isn't run on Android.
if __name__ == "__main__":
main()
|
kallimachos/archive
|
andpygame/android_example.py
|
Python
|
gpl-3.0
| 1,612 | 0.001861 |
import os
import unittest
from vsg.rules import iteration_scheme
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_300_test_input.vhd'))
dIndentMap = utils.read_indent_file()
lExpected = []
lExpected.append('')
utils.read_file(os.path.join(sTestDir, 'rule_300_test_input.fixed.vhd'), lExpected)
class test_iteration_scheme_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
self.oFile.set_indent_map(dIndentMap)
def test_rule_300(self):
oRule = iteration_scheme.rule_300()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'iteration_scheme')
self.assertEqual(oRule.identifier, '300')
lExpected = [13, 17]
oRule.analyze(self.oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_fix_rule_300(self):
oRule = iteration_scheme.rule_300()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
|
jeremiah-c-leary/vhdl-style-guide
|
vsg/tests/iteration_scheme/test_rule_300.py
|
Python
|
gpl-3.0
| 1,279 | 0.003909 |
from datetime import datetime, timedelta
from pprint import pprint
from django import forms
from utils.functions import shift_years
from .models import (
NfEntrada,
PosicaoCarga,
)
class NotafiscalChaveForm(forms.Form):
chave = forms.CharField(
widget=forms.TextInput())
class NotafiscalRelForm(forms.Form):
def data_ini():
return (datetime.now().replace(day=1)-timedelta(days=1)).replace(day=1)
data_de = forms.DateField(
label='Data do Faturamento: De',
initial=data_ini,
widget=forms.DateInput(attrs={'type': 'date',
'autofocus': 'autofocus'}))
data_ate = forms.DateField(
label='Até', required=False,
widget=forms.DateInput(attrs={'type': 'date'}))
uf = forms.CharField(
label='UF', max_length=2, min_length=2, required=False,
widget=forms.TextInput(attrs={'size': 2}))
nf = forms.CharField(
label='Número da NF', required=False,
widget=forms.TextInput(attrs={'type': 'number'}))
transportadora = forms.CharField(
label='Transportadora', required=False,
help_text='Sigla da transportadora.',
widget=forms.TextInput())
cliente = forms.CharField(
label='Cliente', required=False,
help_text='Parte do nome ou início do CNPJ.',
widget=forms.TextInput())
pedido = forms.CharField(
label='Pedido Tussor', required=False,
widget=forms.TextInput(attrs={'type': 'number'}))
ped_cliente = forms.CharField(
label='Pedido de cliente', required=False,
widget=forms.TextInput(attrs={'type': 'string'}))
CHOICES = [('N', 'Não filtra'),
('C', 'Com data de saída informada'),
('S', 'Sem data de saída')]
data_saida = forms.ChoiceField(
label='Quanto a data de saída', choices=CHOICES, initial='S')
CHOICES = [('T', 'Todos (Sim ou Não)'),
('S', 'Sim'),
('N', 'Não')]
entregue = forms.ChoiceField(
choices=CHOICES, initial='T')
CHOICES = [('N', 'Número da nota fiscal (decrescente)'),
('P', 'Número do pedido (crescente)'),
('A', 'Atraso (maior primeiro)')]
ordem = forms.ChoiceField(
label='Ordem de apresentação', choices=CHOICES, initial='A')
CHOICES = [('V', 'Apenas NF de venda e ativas (não canceladas)'),
('T', 'Totas as notas fiscais')]
listadas = forms.ChoiceField(
label='Notas listadas', choices=CHOICES, initial='V')
posicao = forms.ModelChoiceField(
label='Posição', required=False,
queryset=PosicaoCarga.objects.all().order_by('id'),
empty_label='--Todas--')
CHOICES = [('-', 'Todas'),
('a', 'Atacado'),
('v', 'Varejo'),
('o', 'Outras')]
tipo = forms.ChoiceField(
choices=CHOICES, initial='-')
por_pagina = forms.IntegerField(
label='NF por página', required=True, initial=100,
widget=forms.TextInput(attrs={'type': 'number'}))
page = forms.IntegerField(
required=False, widget=forms.HiddenInput())
def clean_uf(self):
uf = self.cleaned_data['uf'].upper()
data = self.data.copy()
data['uf'] = uf
self.data = data
return uf
def clean_data_de(self):
data_de = self.cleaned_data['data_de']
if data_de:
if data_de.year < 100:
data_de = shift_years(2000, data_de)
return data_de
class NfPosicaoForm(forms.Form):
data = forms.DateField(
label='Data de movimento da carga',
help_text='Só pode ficar vazia de posição form "Entregue ao apoio".',
initial=datetime.now(), required=False,
widget=forms.DateInput(attrs={'type': 'date',
'autofocus': 'autofocus'}))
posicao = forms.ModelChoiceField(
label='Posição', required=False,
queryset=PosicaoCarga.objects.all().order_by('id'),
initial=2, empty_label='--Todas--')
class EntradaNfForm(forms.ModelForm):
cadastro = forms.CharField(
label='CNPJ',
widget=forms.TextInput(
attrs={'size': 20, 'autofocus': 'autofocus'}))
emissor = forms.CharField(
widget=forms.TextInput(attrs={'size': 80}))
descricao = forms.CharField(
widget=forms.TextInput(attrs={'size': 80}))
transportadora = forms.CharField(
widget=forms.TextInput(attrs={'size': 60}))
motorista = forms.CharField(
widget=forms.TextInput(attrs={'size': 60}))
class EntradaNfSemXmlForm(EntradaNfForm):
class Meta:
model = NfEntrada
fields = [
'cadastro', 'numero', 'emissor', 'descricao', 'volumes',
'chegada', 'transportadora', 'motorista', 'placa',
'responsavel'
]
class ListaForm(forms.Form):
numero = forms.CharField(
label='Número da NF', required=False,
widget=forms.TextInput(attrs={
'type': 'number',
'size': 8,
'autofocus': 'autofocus',
}))
data = forms.DateField(
label='Data de chegada', required=False,
widget=forms.DateInput(attrs={'type': 'date'}))
pagina = forms.IntegerField(
required=False, widget=forms.HiddenInput())
|
anselmobd/fo2
|
src/logistica/forms.py
|
Python
|
mit
| 5,361 | 0.000187 |
# -*- encoding: utf-8 -*-
# $Id: __init__.py,v 1.8.2.10 2012/02/03 23:04:01 customdesigned Exp $
#
# This file is part of the pydns project.
# Homepage: http://pydns.sourceforge.net
#
# This code is covered by the standard Python License. See LICENSE for details.
#
# __init__.py for DNS class.
__version__ = '2.3.6'
from . import Type,Opcode,Status,Class
from .Base import DnsRequest, DNSError
from .Lib import DnsResult
from .Base import *
from .Lib import *
Error=DNSError
from .lazy import *
Request = DnsRequest
Result = DnsResult
#
# $Log: __init__.py,v $
# Revision 1.8.2.10 2012/02/03 23:04:01 customdesigned
# Release 2.3.6
#
# Revision 1.8.2.9 2011/03/16 20:06:39 customdesigned
# Refer to explicit LICENSE file.
#
# Revision 1.8.2.8 2011/03/03 21:57:15 customdesigned
# Release 2.3.5
#
# Revision 1.8.2.7 2009/06/09 18:05:29 customdesigned
# Release 2.3.4
#
# Revision 1.8.2.6 2008/08/01 04:01:25 customdesigned
# Release 2.3.3
#
# Revision 1.8.2.5 2008/07/28 02:11:07 customdesigned
# Bump version.
#
# Revision 1.8.2.4 2008/07/28 00:17:10 customdesigned
# Randomize source ports.
#
# Revision 1.8.2.3 2008/07/24 20:10:55 customdesigned
# Randomize tid in requests, and check in response.
#
# Revision 1.8.2.2 2007/05/22 21:06:52 customdesigned
# utf-8 in __init__.py
#
# Revision 1.8.2.1 2007/05/22 20:39:20 customdesigned
# Release 2.3.1
#
# Revision 1.8 2002/05/06 06:17:49 anthonybaxter
# found that the old README file called itself release 2.2. So make
# this one 2.3...
#
# Revision 1.7 2002/05/06 06:16:15 anthonybaxter
# make some sort of reasonable version string. releasewards ho!
#
# Revision 1.6 2002/03/19 13:05:02 anthonybaxter
# converted to class based exceptions (there goes the python1.4 compatibility :)
#
# removed a quite gross use of 'eval()'.
#
# Revision 1.5 2002/03/19 12:41:33 anthonybaxter
# tabnannied and reindented everything. 4 space indent, no tabs.
# yay.
#
# Revision 1.4 2001/11/26 17:57:51 stroeder
# Added __version__
#
# Revision 1.3 2001/08/09 09:08:55 anthonybaxter
# added identifying header to top of each file
#
# Revision 1.2 2001/07/19 06:57:07 anthony
# cvs keywords added
#
#
|
hansroh/aquests
|
aquests/protocols/dns/pydns/__init__.py
|
Python
|
mit
| 2,174 | 0.00276 |
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from eventlet import tpool
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.virt.disk.vfs import api as vfs
LOG = logging.getLogger(__name__)
guestfs = None
class VFSGuestFS(vfs.VFS):
"""This class implements a VFS module that uses the libguestfs APIs
to access the disk image. The disk image is never mapped into
the host filesystem, thus avoiding any potential for symlink
attacks from the guest filesystem.
"""
def __init__(self, imgfile, imgfmt='raw', partition=None):
super(VFSGuestFS, self).__init__(imgfile, imgfmt, partition)
global guestfs
if guestfs is None:
guestfs = __import__('guestfs')
self.handle = None
def setup_os(self):
if self.partition == -1:
self.setup_os_inspect()
else:
self.setup_os_static()
def setup_os_static(self):
LOG.debug(_("Mount guest OS image %(imgfile)s partition %(part)s"),
{'imgfile': self.imgfile, 'part': str(self.partition)})
if self.partition:
self.handle.mount_options("", "/dev/sda%d" % self.partition, "/")
else:
self.handle.mount_options("", "/dev/sda", "/")
def setup_os_inspect(self):
LOG.debug(_("Inspecting guest OS image %s"), self.imgfile)
roots = self.handle.inspect_os()
if len(roots) == 0:
raise exception.NovaException(_("No operating system found in %s")
% self.imgfile)
if len(roots) != 1:
LOG.debug(_("Multi-boot OS %(roots)s") % {'roots': str(roots)})
raise exception.NovaException(
_("Multi-boot operating system found in %s") %
self.imgfile)
self.setup_os_root(roots[0])
def setup_os_root(self, root):
LOG.debug(_("Inspecting guest OS root filesystem %s"), root)
mounts = self.handle.inspect_get_mountpoints(root)
if len(mounts) == 0:
raise exception.NovaException(
_("No mount points found in %(root)s of %(imgfile)s") %
{'root': root, 'imgfile': self.imgfile})
# the root directory must be mounted first
mounts.sort(key=lambda mount: mount[0])
root_mounted = False
for mount in mounts:
LOG.debug(_("Mounting %(dev)s at %(dir)s") %
{'dev': mount[1], 'dir': mount[0]})
try:
self.handle.mount_options("", mount[1], mount[0])
root_mounted = True
except RuntimeError as e:
msg = _("Error mounting %(device)s to %(dir)s in image"
" %(imgfile)s with libguestfs (%(e)s)") % \
{'imgfile': self.imgfile, 'device': mount[1],
'dir': mount[0], 'e': e}
if root_mounted:
LOG.debug(msg)
else:
raise exception.NovaException(msg)
def setup(self):
LOG.debug(_("Setting up appliance for %(imgfile)s %(imgfmt)s") %
{'imgfile': self.imgfile, 'imgfmt': self.imgfmt})
try:
self.handle = tpool.Proxy(guestfs.GuestFS(close_on_exit=False))
except TypeError as e:
if 'close_on_exit' in str(e):
# NOTE(russellb) In case we're not using a version of
# libguestfs new enough to support the close_on_exit parameter,
# which was added in libguestfs 1.20.
self.handle = tpool.Proxy(guestfs.GuestFS())
else:
raise
try:
self.handle.add_drive_opts(self.imgfile, format=self.imgfmt)
self.handle.launch()
self.setup_os()
self.handle.aug_init("/", 0)
except RuntimeError as e:
# explicitly teardown instead of implicit close()
# to prevent orphaned VMs in cases when an implicit
# close() is not enough
self.teardown()
raise exception.NovaException(
_("Error mounting %(imgfile)s with libguestfs (%(e)s)") %
{'imgfile': self.imgfile, 'e': e})
except Exception:
# explicitly teardown instead of implicit close()
# to prevent orphaned VMs in cases when an implicit
# close() is not enough
self.teardown()
raise
def teardown(self):
LOG.debug(_("Tearing down appliance"))
try:
try:
self.handle.aug_close()
except RuntimeError as e:
LOG.warn(_("Failed to close augeas %s"), e)
try:
self.handle.shutdown()
except AttributeError:
# Older libguestfs versions haven't an explicit shutdown
pass
except RuntimeError as e:
LOG.warn(_("Failed to shutdown appliance %s"), e)
try:
self.handle.close()
except AttributeError:
# Older libguestfs versions haven't an explicit close
pass
except RuntimeError as e:
LOG.warn(_("Failed to close guest handle %s"), e)
finally:
# dereference object and implicitly close()
self.handle = None
@staticmethod
def _canonicalize_path(path):
if path[0] != '/':
return '/' + path
return path
def make_path(self, path):
LOG.debug(_("Make directory path=%s"), path)
path = self._canonicalize_path(path)
self.handle.mkdir_p(path)
def append_file(self, path, content):
LOG.debug(_("Append file path=%s"), path)
path = self._canonicalize_path(path)
self.handle.write_append(path, content)
def replace_file(self, path, content):
LOG.debug(_("Replace file path=%s"), path)
path = self._canonicalize_path(path)
self.handle.write(path, content)
def read_file(self, path):
LOG.debug(_("Read file path=%s"), path)
path = self._canonicalize_path(path)
return self.handle.read_file(path)
def has_file(self, path):
LOG.debug(_("Has file path=%s"), path)
path = self._canonicalize_path(path)
try:
self.handle.stat(path)
return True
except RuntimeError:
return False
def set_permissions(self, path, mode):
LOG.debug(_("Set permissions path=%(path)s mode=%(mode)s"),
{'path': path, 'mode': mode})
path = self._canonicalize_path(path)
self.handle.chmod(mode, path)
def set_ownership(self, path, user, group):
LOG.debug(_("Set ownership path=%(path)s "
"user=%(user)s group=%(group)s"),
{'path': path, 'user': user, 'group': group})
path = self._canonicalize_path(path)
uid = -1
gid = -1
if user is not None:
uid = int(self.handle.aug_get(
"/files/etc/passwd/" + user + "/uid"))
if group is not None:
gid = int(self.handle.aug_get(
"/files/etc/group/" + group + "/gid"))
LOG.debug(_("chown uid=%(uid)d gid=%(gid)s"),
{'uid': uid, 'gid': gid})
self.handle.chown(uid, gid, path)
|
tanglei528/nova
|
nova/virt/disk/vfs/guestfs.py
|
Python
|
apache-2.0
| 8,034 | 0 |
from pathlib import Path
import pytest
from loguru import logger
from libretime_shared.logging import (
DEBUG,
INFO,
create_task_logger,
level_from_name,
setup_logger,
)
@pytest.mark.parametrize(
"name,level_name,level_no",
[
("error", "error", 40),
("warning", "warning", 30),
("info", "info", 20),
("debug", "debug", 10),
("trace", "trace", 5),
],
)
def test_level_from_name(name, level_name, level_no):
level = level_from_name(name)
assert level.name == level_name
assert level.no == level_no
def test_level_from_name_invalid():
with pytest.raises(ValueError):
level_from_name("invalid")
def test_setup_logger(tmp_path: Path):
log_filepath = tmp_path / "test.log"
extra_log_filepath = tmp_path / "extra.log"
setup_logger(INFO, log_filepath)
extra_logger = create_task_logger(DEBUG, extra_log_filepath, True)
logger.info("test info")
extra_logger.info("extra info")
logger.debug("test debug")
extra_logger.complete()
logger.complete()
assert len(log_filepath.read_text(encoding="utf-8").splitlines()) == 1
assert len(extra_log_filepath.read_text(encoding="utf-8").splitlines()) == 1
|
LibreTime/libretime
|
shared/tests/logging_test.py
|
Python
|
agpl-3.0
| 1,238 | 0.000808 |
# Based on STScI's JWST calibration pipeline.
from __future__ import print_function
import os
import subprocess
import sys
from setuptools import setup, find_packages, Extension, Command
from glob import glob
# Open the README as the package long description
readme = open('README.rst', 'r')
README_TEXT = readme.read()
readme.close()
NAME = 'Nifty4NIFS'
SCRIPTS = glob('scripts/*')
PACKAGE_DATA = {
'': ['*.dat', '*.cfg', '*.fits', '*.txt']
}
setup(
name=NAME,
version="1.0b5",
author='ncomeau',
author_email='ncomeau@gemini.edu',
description='The Gemini NIFS data reduction pipeline.',
long_description = README_TEXT,
url='http://www.gemini.edu',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: MacOS X',
'Intended Audience :: Science/Research',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Scientific/Engineering :: Physics',
],
keywords='Gemini NIFS nifs pipeline reduction data IRAF iraf PYRAF pyraf astronomy integral field spectroscopy ifs ifu',
python_requires='~=2.7',
scripts=SCRIPTS, # TODO(nat): Update this to use entry_points instead of scripts for better cross-platform performance
packages=find_packages(),
package_data=PACKAGE_DATA
)
|
Nat1405/newer-nifty
|
setup.py
|
Python
|
mit
| 1,483 | 0.003372 |
# (c) 2013, Michael DeHaan <michael.dehaan@gmail.com>
# Stephen Fromm <sfromm@gmail.com>
# Brian Coca <briancoca+dev@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
import os
import os.path
import pipes
import shutil
import tempfile
from ansible import utils
from ansible.runner.return_data import ReturnData
class ActionModule(object):
TRANSFERS_FILES = True
def __init__(self, runner):
self.runner = runner
def _assemble_from_fragments(self, src_path, delimiter=None):
''' assemble a file from a directory of fragments '''
tmpfd, temp_path = tempfile.mkstemp()
tmp = os.fdopen(tmpfd,'w')
delimit_me = False
for f in sorted(os.listdir(src_path)):
fragment = "%s/%s" % (src_path, f)
if delimit_me and delimiter:
tmp.write(delimiter)
if os.path.isfile(fragment):
tmp.write(file(fragment).read())
delimit_me = True
tmp.close()
return temp_path
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
# load up options
options = {}
if complex_args:
options.update(complex_args)
options.update(utils.parse_kv(module_args))
src = options.get('src', None)
dest = options.get('dest', None)
delimiter = options.get('delimiter', None)
remote_src = options.get('remote_src', True)
if src is None or dest is None:
result = dict(failed=True, msg="src and dest are required")
return ReturnData(conn=conn, comm_ok=False, result=result)
if remote_src:
return self.runner._execute_module(conn, tmp, 'assemble', module_args, inject=inject, complex_args=complex_args)
# Does all work assembling the file
path = self._assemble_from_fragments(src, delimiter)
pathmd5 = utils.md5s(path)
remote_md5 = self.runner._remote_md5(conn, tmp, dest)
if pathmd5 != remote_md5:
resultant = file(path).read()
if self.runner.diff:
dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, inject=inject, persist_files=True)
if 'content' in dest_result.result:
dest_contents = dest_result.result['content']
if dest_result.result['encoding'] == 'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise Exception("unknown encoding, failed: %s" % dest_result.result)
xfered = self.runner._transfer_str(conn, tmp, 'src', resultant)
# fix file permissions when the copy is done as a different user
if self.runner.sudo and self.runner.sudo_user != 'root':
self.runner._low_level_exec_command(conn, "chmod a+r %s" % xfered, tmp)
# run the copy module
module_args = "%s src=%s dest=%s original_basename=%s" % (module_args, pipes.quote(xfered), pipes.quote(dest), pipes.quote(os.path.basename(src)))
if self.runner.noop_on_check(inject):
return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=src, after=resultant))
else:
res = self.runner._execute_module(conn, tmp, 'copy', module_args, inject=inject)
res.diff = dict(after=resultant)
return res
else:
module_args = "%s src=%s dest=%s original_basename=%s" % (module_args, pipes.quote(xfered), pipes.quote(dest), pipes.quote(os.path.basename(src)))
return self.runner._execute_module(conn, tmp, 'file', module_args, inject=inject)
|
bezhermoso/home
|
lib/ansible/runner/action_plugins/assemble.py
|
Python
|
gpl-3.0
| 4,340 | 0.002995 |
from collections import namedtuple
Datapoint = namedtuple("Datapoint", "phrase sentiment")
|
ahmedshabib/evergreen-gainsight-hack
|
sentiment Analyser/samr/data.py
|
Python
|
mit
| 93 | 0 |
import pytest
from hangups import channel
@pytest.mark.parametrize('input_,expected', [
(b'79\n[[0,["c","98803CAAD92268E8","",8]\n]\n,[1,[{"gsid":"7tCoFHumSL-IT6BHpCaxLA"}]]\n]\n',
('98803CAAD92268E8', '7tCoFHumSL-IT6BHpCaxLA')
),
])
def test_parse_sid_response(input_, expected):
assert channel._parse_sid_response(input_) == expected
@pytest.mark.parametrize('input_,expected', [
# '€' is 3 bytes in UTF-8.
('€€'.encode()[:6], '€€'),
('€€'.encode()[:5], '€'),
('€€'.encode()[:4], '€'),
('€€'.encode()[:3], '€'),
('€€'.encode()[:2], ''),
('€€'.encode()[:1], ''),
('€€'.encode()[:0], ''),
])
def test_best_effort_decode(input_, expected):
assert channel._best_effort_decode(input_) == expected
def test_simple():
p = channel.PushDataParser()
assert list(p.get_submissions('10\n01234567893\nabc'.encode())) == [
'0123456789',
'abc',
]
def test_truncated_message():
p = channel.PushDataParser()
assert list(p.get_submissions('12\n012345678'.encode())) == []
def test_truncated_length():
p = channel.PushDataParser()
assert list(p.get_submissions('13'.encode())) == []
def test_malformed_length():
p = channel.PushDataParser()
# TODO: could detect errors like these with some extra work
assert list(p.get_submissions('11\n0123456789\n5e\n"abc"'.encode())) == [
'0123456789\n'
]
def test_incremental():
p = channel.PushDataParser()
assert list(p.get_submissions(''.encode())) == []
assert list(p.get_submissions('5'.encode())) == []
assert list(p.get_submissions('\n'.encode())) == []
assert list(p.get_submissions('abc'.encode())) == []
assert list(p.get_submissions('de'.encode())) == ['abcde']
assert list(p.get_submissions(''.encode())) == []
def test_unicode():
p = channel.PushDataParser()
# smile is actually 2 code units
assert list(p.get_submissions('3\na😀'.encode())) == ['a😀']
def test_split_characters():
p = channel.PushDataParser()
assert list(p.get_submissions(b'1\n\xe2\x82')) == []
assert list(p.get_submissions(b'\xac')) == ['€']
|
j16sdiz/hangups
|
hangups/test/test_channel.py
|
Python
|
mit
| 2,184 | 0.000936 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unique element dataset transformations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.data.python.ops import contrib_op_loader # pylint: disable=unused-import
from tensorflow.contrib.data.python.ops import gen_dataset_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
def unique():
"""Creates a `Dataset` from another `Dataset`, discarding duplicates.
Use this transformation to produce a dataset that contains one instance of
each unique element in the input. For example:
```python
dataset = tf.data.Dataset.from_tensor_slices([1, 37, 2, 37, 2, 1])
# Using `unique()` will drop the duplicate elements.
dataset = dataset.apply(tf.contrib.data.unique()) # ==> { 1, 37, 2 }
```
Returns:
A `Dataset` transformation function, which can be passed to
@{tf.data.Dataset.apply}.
"""
def _apply_fn(dataset):
return _UniqueDataset(dataset)
return _apply_fn
class _UniqueDataset(dataset_ops.Dataset):
"""A `Dataset` contains the unique elements from its input."""
def __init__(self, input_dataset):
"""See `unique()` for details."""
super(_UniqueDataset, self).__init__()
self._input_dataset = input_dataset
if input_dataset.output_types not in (dtypes.int32, dtypes.int64,
dtypes.string):
raise TypeError(
"`tf.contrib.data.unique()` only supports inputs with a single "
"`tf.int32`, `tf.int64`, or `tf.string` component.")
def _as_variant_tensor(self):
return gen_dataset_ops.unique_dataset(
self._input_dataset._as_variant_tensor(), # pylint: disable=protected-access
**dataset_ops.flat_structure(self))
@property
def output_classes(self):
return self._input_dataset.output_classes
@property
def output_shapes(self):
return self._input_dataset.output_shapes
@property
def output_types(self):
return self._input_dataset.output_types
|
drpngx/tensorflow
|
tensorflow/contrib/data/python/ops/unique.py
|
Python
|
apache-2.0
| 2,748 | 0.005459 |
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout,Submit
from .models import Details, Feedback
from crispy_forms.bootstrap import TabHolder, Tab
from crispy_forms.bootstrap import AppendedText, PrependedText, FormActions
class AddmeForm(forms.ModelForm):
class Meta:
model = Details
exclude = ['']
"""Forms for the ``feedback_form`` app."""
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
fields = ('email', 'message')
|
Thuruv/pilgrim
|
blood/forms.py
|
Python
|
mit
| 537 | 0.007449 |
'''
common XBMC Module
Copyright (C) 2011 t0mm0
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import random
import cookielib
import gzip
import re
import StringIO
import urllib
import urllib2
import socket
import time
import kodi
# Set Global timeout - Useful for slow connections and Putlocker.
socket.setdefaulttimeout(10)
BR_VERS = [
['%s.0' % i for i in xrange(18, 50)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111', '39.0.2171.71', '39.0.2171.95', '39.0.2171.99', '40.0.2214.93', '40.0.2214.111',
'40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124', '44.0.2403.155', '44.0.2403.157', '45.0.2454.101', '45.0.2454.85', '46.0.2490.71',
'46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80', '48.0.2564.116', '49.0.2623.112', '50.0.2661.86'],
['11.0'],
['8.0', '9.0', '10.0', '10.6']]
WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1', 'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko',
'Mozilla/5.0 (compatible; MSIE {br_ver}; {win_ver}{feature}; Trident/6.0)']
def get_ua():
try: last_gen = int(kodi.get_setting('last_ua_create'))
except: last_gen = 0
if not kodi.get_setting('current_ua') or last_gen < (time.time() - (7 * 24 * 60 * 60)):
index = random.randrange(len(RAND_UAS))
versions = {'win_ver': random.choice(WIN_VERS), 'feature': random.choice(FEATURES), 'br_ver': random.choice(BR_VERS[index])}
user_agent = RAND_UAS[index].format(**versions)
# log_utils.log('Creating New User Agent: %s' % (user_agent), log_utils.LOGDEBUG)
kodi.set_setting('current_ua', user_agent)
kodi.set_setting('last_ua_create', str(int(time.time())))
else:
user_agent = kodi.get_setting('current_ua')
return user_agent
class Net:
'''
This class wraps :mod:`urllib2` and provides an easy way to make http
requests while taking care of cookies, proxies, gzip compression and
character encoding.
Example::
from addon.common.net import Net
net = Net()
response = net.http_GET('http://xbmc.org')
print response.content
'''
_cj = cookielib.LWPCookieJar()
_proxy = None
_user_agent = 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'
_http_debug = False
def __init__(self, cookie_file='', proxy='', user_agent='', http_debug=False):
'''
Kwargs:
cookie_file (str): Full path to a file to be used to load and save
cookies to.
proxy (str): Proxy setting (eg.
``'http://user:pass@example.com:1234'``)
user_agent (str): String to use as the User Agent header. If not
supplied the class will use a default user agent (chrome)
http_debug (bool): Set ``True`` to have HTTP header info written to
the XBMC log for all requests.
'''
if cookie_file:
self.set_cookies(cookie_file)
if proxy:
self.set_proxy(proxy)
if user_agent:
self.set_user_agent(user_agent)
self._http_debug = http_debug
self._update_opener()
def set_cookies(self, cookie_file):
'''
Set the cookie file and try to load cookies from it if it exists.
Args:
cookie_file (str): Full path to a file to be used to load and save
cookies to.
'''
try:
self._cj.load(cookie_file, ignore_discard=True)
self._update_opener()
return True
except:
return False
def get_cookies(self):
'''Returns A dictionary containing all cookie information by domain.'''
return self._cj._cookies
def save_cookies(self, cookie_file):
'''
Saves cookies to a file.
Args:
cookie_file (str): Full path to a file to save cookies to.
'''
self._cj.save(cookie_file, ignore_discard=True)
def set_proxy(self, proxy):
'''
Args:
proxy (str): Proxy setting (eg.
``'http://user:pass@example.com:1234'``)
'''
self._proxy = proxy
self._update_opener()
def get_proxy(self):
'''Returns string containing proxy details.'''
return self._proxy
def set_user_agent(self, user_agent):
'''
Args:
user_agent (str): String to use as the User Agent header.
'''
self._user_agent = user_agent
def get_user_agent(self):
'''Returns user agent string.'''
return self._user_agent
def _update_opener(self):
'''
Builds and installs a new opener to be used by all future calls to
:func:`urllib2.urlopen`.
'''
if self._http_debug:
http = urllib2.HTTPHandler(debuglevel=1)
else:
http = urllib2.HTTPHandler()
if self._proxy:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.ProxyHandler({'http':
self._proxy}),
urllib2.HTTPBasicAuthHandler(),
http)
else:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
urllib2.HTTPBasicAuthHandler(),
http)
urllib2.install_opener(opener)
def http_GET(self, url, headers={}, compression=True):
'''
Perform an HTTP GET request.
Args:
url (str): The URL to GET.
Kwargs:
headers (dict): A dictionary describing any headers you would like
to add to the request. (eg. ``{'X-Test': 'testing'}``)
compression (bool): If ``True`` (default), try to use gzip
compression.
Returns:
An :class:`HttpResponse` object containing headers and other
meta-information about the page and the page content.
'''
return self._fetch(url, headers=headers, compression=compression)
def http_POST(self, url, form_data, headers={}, compression=True):
'''
Perform an HTTP POST request.
Args:
url (str): The URL to POST.
form_data (dict): A dictionary of form data to POST.
Kwargs:
headers (dict): A dictionary describing any headers you would like
to add to the request. (eg. ``{'X-Test': 'testing'}``)
compression (bool): If ``True`` (default), try to use gzip
compression.
Returns:
An :class:`HttpResponse` object containing headers and other
meta-information about the page and the page content.
'''
return self._fetch(url, form_data, headers=headers, compression=compression)
def http_HEAD(self, url, headers={}):
'''
Perform an HTTP HEAD request.
Args:
url (str): The URL to GET.
Kwargs:
headers (dict): A dictionary describing any headers you would like
to add to the request. (eg. ``{'X-Test': 'testing'}``)
Returns:
An :class:`HttpResponse` object containing headers and other
meta-information about the page.
'''
request = urllib2.Request(url)
request.get_method = lambda: 'HEAD'
request.add_header('User-Agent', self._user_agent)
for key in headers:
request.add_header(key, headers[key])
response = urllib2.urlopen(request)
return HttpResponse(response)
def _fetch(self, url, form_data={}, headers={}, compression=True):
'''
Perform an HTTP GET or POST request.
Args:
url (str): The URL to GET or POST.
form_data (dict): A dictionary of form data to POST. If empty, the
request will be a GET, if it contains form data it will be a POST.
Kwargs:
headers (dict): A dictionary describing any headers you would like
to add to the request. (eg. ``{'X-Test': 'testing'}``)
compression (bool): If ``True`` (default), try to use gzip
compression.
Returns:
An :class:`HttpResponse` object containing headers and other
meta-information about the page and the page content.
'''
req = urllib2.Request(url)
if form_data:
if isinstance(form_data, basestring):
form_data = form_data
else:
form_data = urllib.urlencode(form_data, True)
req = urllib2.Request(url, form_data)
req.add_header('User-Agent', self._user_agent)
for key in headers:
req.add_header(key, headers[key])
if compression:
req.add_header('Accept-Encoding', 'gzip')
req.add_unredirected_header('Host', req.get_host())
response = urllib2.urlopen(req)
return HttpResponse(response)
class HttpResponse:
'''
This class represents a resoponse from an HTTP request.
The content is examined and every attempt is made to properly encode it to
Unicode.
.. seealso::
:meth:`Net.http_GET`, :meth:`Net.http_HEAD` and :meth:`Net.http_POST`
'''
content = ''
'''Unicode encoded string containing the body of the reposne.'''
def __init__(self, response):
'''
Args:
response (:class:`mimetools.Message`): The object returned by a call
to :func:`urllib2.urlopen`.
'''
self._response = response
@property
def content(self):
html = self._response.read()
encoding = None
try:
if self._response.headers['content-encoding'].lower() == 'gzip':
html = gzip.GzipFile(fileobj=StringIO.StringIO(html)).read()
except:
pass
try:
content_type = self._response.headers['content-type']
if 'charset=' in content_type:
encoding = content_type.split('charset=')[-1]
except:
pass
r = re.search('<meta\s+http-equiv="Content-Type"\s+content="(?:.+?);\s+charset=(.+?)"', html, re.IGNORECASE)
if r:
encoding = r.group(1)
if encoding is not None:
try: html = html.decode(encoding)
except: pass
return html
def get_headers(self, as_dict=False):
'''Returns headers returned by the server.
If as_dict is True, headers are returned as a dictionary otherwise a list'''
if as_dict:
return dict([(item[0].title(), item[1]) for item in self._response.info().items()])
else:
return self._response.info().headers
def get_url(self):
'''
Return the URL of the resource retrieved, commonly used to determine if
a redirect was followed.
'''
return self._response.geturl()
|
mrknow/filmkodi
|
script.mrknow.urlresolver/lib/urlresolver9/lib/net.py
|
Python
|
apache-2.0
| 12,168 | 0.002959 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Utilities to start simulator."""
import ctypes
import json
import tvm
from ..environment import get_env
from ..libinfo import find_libvta
def _load_sw():
"""Load software library, assuming they are simulator."""
lib_sw = find_libvta("libvta", optional=True)
if not lib_sw:
return []
try:
return [ctypes.CDLL(lib_sw[0], ctypes.RTLD_GLOBAL)]
except OSError:
return []
def _load_all():
"""Load hardware library for tsim."""
lib = _load_sw()
env = get_env()
if env.TARGET == "tsim":
lib = find_libvta("libvta_hw", optional=True)
f = tvm.get_global_func("vta.tsim.init")
m = tvm.module.load(lib[0], "vta-tsim")
f(m)
return lib
def enabled():
"""Check if simulator is enabled."""
f = tvm.get_global_func("vta.simulator.profiler_clear", True)
return f is not None
def clear_stats():
"""Clear profiler statistics."""
env = get_env()
if env.TARGET == "sim":
f = tvm.get_global_func("vta.simulator.profiler_clear", True)
else:
f = tvm.get_global_func("vta.tsim.profiler_clear", True)
if f:
f()
def stats():
"""Get profiler statistics
Returns
-------
stats : dict
Current profiler statistics
"""
env = get_env()
if env.TARGET == "sim":
x = tvm.get_global_func("vta.simulator.profiler_status")()
else:
x = tvm.get_global_func("vta.tsim.profiler_status")()
return json.loads(x)
# debug flag to skip execution.
DEBUG_SKIP_EXEC = 1
def debug_mode(flag):
"""Set debug mode
Paramaters
----------
flag : int
The debug flag, 0 means clear all flags.
"""
tvm.get_global_func("vta.simulator.profiler_debug_mode")(flag)
LIBS = _load_all()
|
mlperf/training_results_v0.7
|
Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/vta/python/vta/testing/simulator.py
|
Python
|
apache-2.0
| 2,565 | 0.00039 |
#!/usr/bin/env python3
# Copyright (c) 2014 Pawel Rozlach, Brainly.com sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import logging
from inventory_tool.exception import MalformedInputException, GenericException
# For Python3 < 3.3, ipaddress module is available as an extra module,
# under a different name:
try:
from ipaddress import ip_address
from ipaddress import ip_network
from ipaddress import IPv4Address
from ipaddress import IPv6Address
ipaddress_name_network = "network_address"
ipaddress_name_broadcast = "broadcast_address"
except ImportError:
from ipaddr import IPAddress as ip_address
from ipaddr import IPNetwork as ip_network
from ipaddr import IPv4Address
from ipaddr import IPv6Address
ipaddress_name_network = "network"
ipaddress_name_broadcast = "broadcast"
class IPPool:
"""IP pool representation and manipulation
This class takes care of managing ip pools available, and this includes:
- assigning and releasing IPs, both manually and automatically
- booking and canceling IPs for special use
- serialization of IP pools for storage in YAML documents
- human readable representation of ip pools
"""
__slots__ = ['_network', '_allocated', '_reserved']
def __init__(self, network, allocated=[], reserved=[]):
"""Init IPPool
Args:
network: network from which ip addresses should be allocated
allocated: list of ip addres strings that are already allocated
reserved: list of ip address strings that should not be available
for allocation.
Raises:
ValueError: ip address or network is invalid or malformed.
"""
self._network = ip_network(network)
self._allocated = [ip_address(x) for x in allocated]
self._reserved = [ip_address(x) for x in reserved]
def get_hash(self):
"""Extract data from object in a way suitable for serializing
Returns:
Method returns data necessary for re-initializing the same object in
a form suitable for serialization using YAML/JSON. Normally, this
object contains other objects which can not be easily serialized or
are not very readable after serializing.
"""
tmp = {"network": str(self._network),
"allocated": sorted([str(x) for x in self._allocated]),
"reserved": sorted([str(x) for x in self._reserved]),
}
return tmp
def allocate(self, ip=None):
"""Allocate an IP from the pool.
Method allocates next free adress from the pool if ip is None, or
marks given ip as already allocated
Args:
ip: either None or ipaddress.ip_address object
Returns:
An ip that has been allocated. In case when "ip" argument is not
none, then the object pointed by it is returned.
Raises:
MalformedInputException - user provided data is invalid
GenericException - pool has run out of free ip adresses
"""
if ip is not None:
if ip not in self._network:
msg = "Attempt to allocate IP from outside of the pool: "
msg += "{0} is not in {1}.".format(ip, self._network)
raise MalformedInputException(msg)
if ip in self._allocated:
msg = "Attempt to allocate already allocated IP: " + str(ip)
raise MalformedInputException(msg)
elif ip in self._reserved:
msg = "Attempt to allocate from reserved pool: " + str(ip)
raise MalformedInputException(msg)
else:
self._allocated.append(ip)
return ip
else:
for candidate in [x for x in self._network
if x != self._network.__getattribute__(ipaddress_name_broadcast) and
x != self._network.__getattribute__(ipaddress_name_network)]:
if candidate not in self._allocated and \
candidate not in self._reserved:
logging.info(
"IP {0} has been auto-assigned.".format(candidate))
self._allocated.append(candidate)
return candidate
msg = "The pool has run out of free ip addresses."
raise GenericException(msg)
def release(self, ip):
"""Mark given IP as free, available for allocation.
Args:
ip: ip to deallocate
Raises:
MalformedInputException: provided ip has not been alocated yet.
"""
if ip in self._allocated:
self._allocated.remove(ip_address(ip))
else:
msg = "An attempt to release an ip {0} ".format(ip)
msg += "which has not been allocated yet."
raise MalformedInputException(msg)
def release_all(self):
"""Mark all ip addresses in the pool as available"""
self._allocated = []
def overlaps(self, other):
"""Check if IP pools overlap
Args:
other: ip pool to check for overlap with this pool
"""
return self._network.overlaps(other._network)
def book(self, ip):
"""Prevent IP from being allocated.
Marks given IP as reserved/unavailable for allocation.
Args:
ip: ip to book.
Raises:
MalformedInputException: ip does not belong to this pool
"""
if ip not in self._network:
msg = "IP {0} does not belong to network {1}".format(ip, self._network)
raise MalformedInputException(msg)
elif ip in self._reserved:
msg = "IP {0} has already been booked".format(ip)
raise MalformedInputException(msg)
else:
self._reserved.append(ip)
def cancel(self, ip):
"""Remove reservation of an IP address
Marks given IP as available for allocation.
Args:
ip: ip to release
Raises:
MalformedInputException: ip has not been reserved yet.
"""
if ip in self._reserved:
self._reserved.remove(ip)
else:
msg = "IP {0} has not been reserved yet".format(ip)
raise MalformedInputException(msg)
def __contains__(self, other):
"""Check if ip belongs to the pool.
Args:
other: ip, either as a string or an ipaddress.ip_address object
to check the membership for.
"""
if isinstance(other, str):
tmp = ip_address(other)
return tmp in self._network
elif isinstance(other, IPv4Address) or \
isinstance(other, IPv6Address):
return other in self._network
else:
msg = "Could not determine membership of the object {0}".format(other)
raise MalformedInputException(msg)
def __str__(self):
"""Present object in human-readable form"""
msg = "Network: {0}\n".format(self._network)
msg += "Allocated:\n"
if self._allocated:
for tmp in self._allocated:
msg += "\t- {0}\n".format(tmp)
else:
msg += "\t<None>\n"
msg += "Reserved:\n"
if self._reserved:
for tmp in self._reserved:
msg += "\t- {0}\n".format(tmp)
else:
msg += "\t<None>\n"
return msg
|
vespian/inventory_tool
|
inventory_tool/object/ippool.py
|
Python
|
apache-2.0
| 8,065 | 0.00062 |
from __future__ import absolute_import
from __future__ import print_function
import ujson
from django.http import HttpResponse
from mock import patch
from typing import Any, Dict, List, Text, Union
from zerver.lib.actions import (
do_change_is_admin,
do_set_realm_property,
do_deactivate_realm,
)
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import tornado_redirected_to_list
from zerver.models import get_realm, get_user_profile_by_email, Realm
class RealmTest(ZulipTestCase):
def assert_user_profile_cache_gets_new_name(self, email, new_realm_name):
# type: (Text, Text) -> None
user_profile = get_user_profile_by_email(email)
self.assertEqual(user_profile.realm.name, new_realm_name)
def test_do_set_realm_name_caching(self):
# type: () -> None
"""The main complicated thing about setting realm names is fighting the
cache, and we start by populating the cache for Hamlet, and we end
by checking the cache to ensure that the new value is there."""
self.example_user('hamlet')
realm = get_realm('zulip')
new_name = u'Zed You Elle Eye Pea'
do_set_realm_property(realm, 'name', new_name)
self.assertEqual(get_realm(realm.string_id).name, new_name)
self.assert_user_profile_cache_gets_new_name('hamlet@zulip.com', new_name)
def test_update_realm_name_events(self):
# type: () -> None
realm = get_realm('zulip')
new_name = u'Puliz'
events = [] # type: List[Dict[str, Any]]
with tornado_redirected_to_list(events):
do_set_realm_property(realm, 'name', new_name)
event = events[0]['event']
self.assertEqual(event, dict(
type='realm',
op='update',
property='name',
value=new_name,
))
def test_update_realm_description_events(self):
# type: () -> None
realm = get_realm('zulip')
new_description = u'zulip dev group'
events = [] # type: List[Dict[str, Any]]
with tornado_redirected_to_list(events):
do_set_realm_property(realm, 'description', new_description)
event = events[0]['event']
self.assertEqual(event, dict(
type='realm',
op='update',
property='description',
value=new_description,
))
def test_update_realm_description(self):
# type: () -> None
email = 'iago@zulip.com'
self.login(email)
realm = get_realm('zulip')
new_description = u'zulip dev group'
data = dict(description=ujson.dumps(new_description))
events = [] # type: List[Dict[str, Any]]
with tornado_redirected_to_list(events):
result = self.client_patch('/json/realm', data)
self.assert_json_success(result)
realm = get_realm('zulip')
self.assertEqual(realm.description, new_description)
event = events[0]['event']
self.assertEqual(event, dict(
type='realm',
op='update',
property='description',
value=new_description,
))
def test_realm_description_length(self):
# type: () -> None
new_description = u'A' * 1001
data = dict(description=ujson.dumps(new_description))
# create an admin user
email = 'iago@zulip.com'
self.login(email)
result = self.client_patch('/json/realm', data)
self.assert_json_error(result, 'Realm description is too long.')
realm = get_realm('zulip')
self.assertNotEqual(realm.description, new_description)
def test_admin_restrictions_for_changing_realm_name(self):
# type: () -> None
new_name = 'Mice will play while the cat is away'
user_profile = self.example_user('othello')
email = user_profile.email
self.login(email)
do_change_is_admin(user_profile, False)
req = dict(name=ujson.dumps(new_name))
result = self.client_patch('/json/realm', req)
self.assert_json_error(result, 'Must be a realm administrator')
def test_unauthorized_name_change(self):
# type: () -> None
data = {'full_name': 'Sir Hamlet'}
user_profile = self.example_user('hamlet')
email = user_profile.email
self.login(email)
do_set_realm_property(user_profile.realm, 'name_changes_disabled', True)
url = '/json/settings/change'
result = self.client_post(url, data)
self.assertEqual(result.status_code, 200)
# Since the setting fails silently, no message is returned
self.assert_in_response("", result)
def test_do_deactivate_realm_clears_user_realm_cache(self):
# type: () -> None
"""The main complicated thing about deactivating realm names is
updating the cache, and we start by populating the cache for
Hamlet, and we end by checking the cache to ensure that his
realm appears to be deactivated. You can make this test fail
by disabling cache.flush_realm()."""
self.example_user('hamlet')
realm = get_realm('zulip')
do_deactivate_realm(realm)
user = self.example_user('hamlet')
self.assertTrue(user.realm.deactivated)
def test_do_deactivate_realm_on_deactived_realm(self):
# type: () -> None
"""Ensure early exit is working in realm deactivation"""
realm = get_realm('zulip')
self.assertFalse(realm.deactivated)
do_deactivate_realm(realm)
self.assertTrue(realm.deactivated)
do_deactivate_realm(realm)
self.assertTrue(realm.deactivated)
def test_change_realm_default_language(self):
# type: () -> None
new_lang = "de"
realm = get_realm('zulip')
self.assertNotEqual(realm.default_language, new_lang)
# we need an admin user.
email = 'iago@zulip.com'
self.login(email)
req = dict(default_language=ujson.dumps(new_lang))
result = self.client_patch('/json/realm', req)
self.assert_json_success(result)
realm = get_realm('zulip')
self.assertEqual(realm.default_language, new_lang)
# Test to make sure that when invalid languages are passed
# as the default realm language, correct validation error is
# raised and the invalid language is not saved in db
invalid_lang = "invalid_lang"
req = dict(default_language=ujson.dumps(invalid_lang))
result = self.client_patch('/json/realm', req)
self.assert_json_error(result, "Invalid language '%s'" % (invalid_lang,))
realm = get_realm('zulip')
self.assertNotEqual(realm.default_language, invalid_lang)
class RealmAPITest(ZulipTestCase):
def setUp(self):
# type: () -> None
user_profile = self.example_user('cordelia')
email = user_profile.email
self.login(email)
do_change_is_admin(user_profile, True)
def set_up_db(self, attr, value):
# type: (str, Any) -> None
realm = get_realm('zulip')
setattr(realm, attr, value)
realm.save()
def update_with_api(self, name, value):
# type: (str, Union[Text, int, bool]) -> Realm
result = self.client_patch('/json/realm', {name: ujson.dumps(value)})
self.assert_json_success(result)
return get_realm('zulip') # refresh data
def do_test_realm_update_api(self, name):
# type: (str) -> None
"""Test updating realm properties.
If new realm properties have been added to the Realm model but the
test_values dict below has not been updated, this will raise an
assertion error.
"""
bool_tests = [False, True] # type: List[bool]
test_values = dict(
add_emoji_by_admins_only=bool_tests,
create_stream_by_admins_only=bool_tests,
default_language=[u'de', u'en'],
description=[u'Realm description', u'New description'],
email_changes_disabled=bool_tests,
invite_required=bool_tests,
invite_by_admins_only=bool_tests,
inline_image_preview=bool_tests,
inline_url_embed_preview=bool_tests,
message_retention_days=[10, 20],
name=[u'Zulip', u'New Name'],
name_changes_disabled=bool_tests,
restricted_to_domain=bool_tests,
waiting_period_threshold=[10, 20],
) # type: Dict[str, Any]
vals = test_values.get(name)
if vals is None:
raise AssertionError('No test created for %s' % (name))
self.set_up_db(name, vals[0])
realm = self.update_with_api(name, vals[1])
self.assertEqual(getattr(realm, name), vals[1])
realm = self.update_with_api(name, vals[0])
self.assertEqual(getattr(realm, name), vals[0])
def test_update_realm_properties(self):
# type: () -> None
for prop in Realm.property_types:
self.do_test_realm_update_api(prop)
def test_update_realm_allow_message_editing(self):
# type: () -> None
"""Tests updating the realm property 'allow_message_editing'."""
self.set_up_db('allow_message_editing', False)
self.set_up_db('message_content_edit_limit_seconds', 0)
realm = self.update_with_api('allow_message_editing', True)
realm = self.update_with_api('message_content_edit_limit_seconds', 100)
self.assertEqual(realm.allow_message_editing, True)
self.assertEqual(realm.message_content_edit_limit_seconds, 100)
realm = self.update_with_api('allow_message_editing', False)
self.assertEqual(realm.allow_message_editing, False)
self.assertEqual(realm.message_content_edit_limit_seconds, 100)
realm = self.update_with_api('message_content_edit_limit_seconds', 200)
self.assertEqual(realm.allow_message_editing, False)
self.assertEqual(realm.message_content_edit_limit_seconds, 200)
|
christi3k/zulip
|
zerver/tests/test_realm.py
|
Python
|
apache-2.0
| 10,099 | 0.000594 |
import copy
from django.utils import six
class MergeDict(object):
"""
A simple class for creating new "virtual" dictionaries that actually look
up values in more than one dictionary, passed in the constructor.
If a key appears in more than one of the given dictionaries, only the
first occurrence will be used.
"""
def __init__(self, *dicts):
self.dicts = dicts
def __bool__(self):
return any(self.dicts)
def __nonzero__(self):
return type(self).__bool__(self)
def __getitem__(self, key):
for dict_ in self.dicts:
try:
return dict_[key]
except KeyError:
pass
raise KeyError(key)
def __copy__(self):
return self.__class__(*self.dicts)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
# This is used by MergeDicts of MultiValueDicts.
def getlist(self, key):
for dict_ in self.dicts:
if key in dict_:
return dict_.getlist(key)
return []
def _iteritems(self):
seen = set()
for dict_ in self.dicts:
for item in six.iteritems(dict_):
k = item[0]
if k in seen:
continue
seen.add(k)
yield item
def _iterkeys(self):
for k, v in self._iteritems():
yield k
def _itervalues(self):
for k, v in self._iteritems():
yield v
if six.PY3:
items = _iteritems
keys = _iterkeys
values = _itervalues
else:
iteritems = _iteritems
iterkeys = _iterkeys
itervalues = _itervalues
def items(self):
return list(self.iteritems())
def keys(self):
return list(self.iterkeys())
def values(self):
return list(self.itervalues())
def has_key(self, key):
for dict_ in self.dicts:
if key in dict_:
return True
return False
__contains__ = has_key
__iter__ = _iterkeys
def copy(self):
"""Returns a copy of this object."""
return self.__copy__()
def __str__(self):
'''
Returns something like
"{'key1': 'val1', 'key2': 'val2', 'key3': 'val3'}"
instead of the generic "<object meta-data>" inherited from object.
'''
return str(dict(self.items()))
def __repr__(self):
'''
Returns something like
MergeDict({'key1': 'val1', 'key2': 'val2'}, {'key3': 'val3'})
instead of generic "<object meta-data>" inherited from object.
'''
dictreprs = ', '.join(repr(d) for d in self.dicts)
return '%s(%s)' % (self.__class__.__name__, dictreprs)
class SortedDict(dict):
"""
A dictionary that keeps its keys in the order in which they're inserted.
"""
def __new__(cls, *args, **kwargs):
instance = super(SortedDict, cls).__new__(cls, *args, **kwargs)
instance.keyOrder = []
return instance
def __init__(self, data=None):
if data is None or isinstance(data, dict):
data = data or []
super(SortedDict, self).__init__(data)
self.keyOrder = list(data) if data else []
else:
super(SortedDict, self).__init__()
super_set = super(SortedDict, self).__setitem__
for key, value in data:
# Take the ordering from first key
if key not in self:
self.keyOrder.append(key)
# But override with last value in data (dict() does this)
super_set(key, value)
def __deepcopy__(self, memo):
return self.__class__([(key, copy.deepcopy(value, memo))
for key, value in self.items()])
def __copy__(self):
# The Python's default copy implementation will alter the state
# of self. The reason for this seems complex but is likely related to
# subclassing dict.
return self.copy()
def __setitem__(self, key, value):
if key not in self:
self.keyOrder.append(key)
super(SortedDict, self).__setitem__(key, value)
def __delitem__(self, key):
super(SortedDict, self).__delitem__(key)
self.keyOrder.remove(key)
def __iter__(self):
return iter(self.keyOrder)
def __reversed__(self):
return reversed(self.keyOrder)
def pop(self, k, *args):
result = super(SortedDict, self).pop(k, *args)
try:
self.keyOrder.remove(k)
except ValueError:
# Key wasn't in the dictionary in the first place. No problem.
pass
return result
def popitem(self):
result = super(SortedDict, self).popitem()
self.keyOrder.remove(result[0])
return result
def _iteritems(self):
for key in self.keyOrder:
yield key, self[key]
def _iterkeys(self):
for key in self.keyOrder:
yield key
def _itervalues(self):
for key in self.keyOrder:
yield self[key]
if six.PY3:
items = _iteritems
keys = _iterkeys
values = _itervalues
else:
iteritems = _iteritems
iterkeys = _iterkeys
itervalues = _itervalues
def items(self):
return [(k, self[k]) for k in self.keyOrder]
def keys(self):
return self.keyOrder[:]
def values(self):
return [self[k] for k in self.keyOrder]
def update(self, dict_):
for k, v in six.iteritems(dict_):
self[k] = v
def setdefault(self, key, default):
if key not in self:
self.keyOrder.append(key)
return super(SortedDict, self).setdefault(key, default)
def copy(self):
"""Returns a copy of this object."""
# This way of initializing the copy means it works for subclasses, too.
return self.__class__(self)
def __repr__(self):
"""
Replaces the normal dict.__repr__ with a version that returns the keys
in their sorted order.
"""
return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in six.iteritems(self)])
def clear(self):
super(SortedDict, self).clear()
self.keyOrder = []
class MultiValueDictKeyError(KeyError):
pass
class MultiValueDict(dict):
"""
A subclass of dictionary customized to handle multiple values for the
same key.
>>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
>>> d['name']
'Simon'
>>> d.getlist('name')
['Adrian', 'Simon']
>>> d.getlist('doesnotexist')
[]
>>> d.getlist('doesnotexist', ['Adrian', 'Simon'])
['Adrian', 'Simon']
>>> d.get('lastname', 'nonexistent')
'nonexistent'
>>> d.setlist('lastname', ['Holovaty', 'Willison'])
This class exists to solve the irritating problem raised by cgi.parse_qs,
which returns a list for every key, even though most Web forms submit
single name-value pairs.
"""
def __init__(self, key_to_list_mapping=()):
super(MultiValueDict, self).__init__(key_to_list_mapping)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__,
super(MultiValueDict, self).__repr__())
def __getitem__(self, key):
"""
Returns the last data value for this key, or [] if it's an empty list;
raises KeyError if not found.
"""
try:
list_ = super(MultiValueDict, self).__getitem__(key)
except KeyError:
raise MultiValueDictKeyError("Key %r not found in %r" % (key, self))
try:
return list_[-1]
except IndexError:
return []
def __setitem__(self, key, value):
super(MultiValueDict, self).__setitem__(key, [value])
def __copy__(self):
return self.__class__([
(k, v[:])
for k, v in self.lists()
])
def __deepcopy__(self, memo=None):
if memo is None:
memo = {}
result = self.__class__()
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo),
copy.deepcopy(value, memo))
return result
def __getstate__(self):
obj_dict = self.__dict__.copy()
obj_dict['_data'] = dict([(k, self.getlist(k)) for k in self])
return obj_dict
def __setstate__(self, obj_dict):
data = obj_dict.pop('_data', {})
for k, v in data.items():
self.setlist(k, v)
self.__dict__.update(obj_dict)
def get(self, key, default=None):
"""
Returns the last data value for the passed key. If key doesn't exist
or value is an empty list, then default is returned.
"""
try:
val = self[key]
except KeyError:
return default
if val == []:
return default
return val
def getlist(self, key, default=None):
"""
Returns the list of values for the passed key. If key doesn't exist,
then a default value is returned.
"""
try:
return super(MultiValueDict, self).__getitem__(key)
except KeyError:
if default is None:
return []
return default
def setlist(self, key, list_):
super(MultiValueDict, self).__setitem__(key, list_)
def setdefault(self, key, default=None):
if key not in self:
self[key] = default
# Do not return default here because __setitem__() may store
# another value -- QueryDict.__setitem__() does. Look it up.
return self[key]
def setlistdefault(self, key, default_list=None):
if key not in self:
if default_list is None:
default_list = []
self.setlist(key, default_list)
# Do not return default_list here because setlist() may store
# another value -- QueryDict.setlist() does. Look it up.
return self.getlist(key)
def appendlist(self, key, value):
"""Appends an item to the internal list associated with key."""
self.setlistdefault(key).append(value)
def _iteritems(self):
"""
Yields (key, value) pairs, where value is the last item in the list
associated with the key.
"""
for key in self:
yield key, self[key]
def _iterlists(self):
"""Yields (key, list) pairs."""
return six.iteritems(super(MultiValueDict, self))
def _itervalues(self):
"""Yield the last value on every key list."""
for key in self:
yield self[key]
if six.PY3:
items = _iteritems
lists = _iterlists
values = _itervalues
else:
iteritems = _iteritems
iterlists = _iterlists
itervalues = _itervalues
def items(self):
return list(self.iteritems())
def lists(self):
return list(self.iterlists())
def values(self):
return list(self.itervalues())
def copy(self):
"""Returns a shallow copy of this object."""
return copy.copy(self)
def update(self, *args, **kwargs):
"""
update() extends rather than replaces existing key lists.
Also accepts keyword args.
"""
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, got %d" % len(args))
if args:
other_dict = args[0]
if isinstance(other_dict, MultiValueDict):
for key, value_list in other_dict.lists():
self.setlistdefault(key).extend(value_list)
else:
try:
for key, value in other_dict.items():
self.setlistdefault(key).append(value)
except TypeError:
raise ValueError("MultiValueDict.update() takes either a MultiValueDict or dictionary")
for key, value in six.iteritems(kwargs):
self.setlistdefault(key).append(value)
def dict(self):
"""
Returns current object as a dict with singular values.
"""
return dict((key, self[key]) for key in self)
class ImmutableList(tuple):
"""
A tuple-like object that raises useful errors when it is asked to mutate.
Example::
>>> a = ImmutableList(range(5), warning="You cannot mutate this.")
>>> a[3] = '4'
Traceback (most recent call last):
...
AttributeError: You cannot mutate this.
"""
def __new__(cls, *args, **kwargs):
if 'warning' in kwargs:
warning = kwargs['warning']
del kwargs['warning']
else:
warning = 'ImmutableList object is immutable.'
self = tuple.__new__(cls, *args, **kwargs)
self.warning = warning
return self
def complain(self, *wargs, **kwargs):
if isinstance(self.warning, Exception):
raise self.warning
else:
raise AttributeError(self.warning)
# All list mutation functions complain.
__delitem__ = complain
__delslice__ = complain
__iadd__ = complain
__imul__ = complain
__setitem__ = complain
__setslice__ = complain
append = complain
extend = complain
insert = complain
pop = complain
remove = complain
sort = complain
reverse = complain
class DictWrapper(dict):
"""
Wraps accesses to a dictionary so that certain values (those starting with
the specified prefix) are passed through a function before being returned.
The prefix is removed before looking up the real value.
Used by the SQL construction code to ensure that values are correctly
quoted before being used.
"""
def __init__(self, data, func, prefix):
super(DictWrapper, self).__init__(data)
self.func = func
self.prefix = prefix
def __getitem__(self, key):
"""
Retrieves the real value after stripping the prefix string (if
present). If the prefix is present, pass the value through self.func
before returning, otherwise return the raw value.
"""
if key.startswith(self.prefix):
use_func = True
key = key[len(self.prefix):]
else:
use_func = False
value = super(DictWrapper, self).__getitem__(key)
if use_func:
return self.func(value)
return value
|
makinacorpus/django
|
django/utils/datastructures.py
|
Python
|
bsd-3-clause
| 14,882 | 0.001344 |
#!/usr/bin/env python
#-----------------------------------------------------------------------------
# Copyright (c) 2016--, Evguenia Kopylova, Jad Kanbar, SevenBridges dev team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
"""
Create tasks for tcga-workflow-fasta-input-full-kraken-test workflow.
"""
from __future__ import print_function
import logging, yaml
import click
import sevenbridges as sb
from sevenbridges.errors import SbgError
from os.path import join, splitext, basename
from collections import OrderedDict
import re
def load_config(yaml_fp):
"""Load CGC API configuration file.
Parameters
----------
yaml_fp: str
Filepath to CGC API configuration file
Return
------
logger: logger instance
Log
"""
try:
fp = open(yaml_fp)
config = yaml.load(fp)
except:
raise SbgError('%s file missing!' % yaml_fp)
logger = logging.getLogger('log')
log_handler = logging.FileHandler(config['log_file'])
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
logger.setLevel(logging.DEBUG)
return logger, config
def create_task_workflow_cgc(local_mapping_fp,
all_files,
task_name,
api,
config,
logger):
"""Create CGC task for tcga_fasta_input_disease_type_workflow workflow.
Parameters
----------
local_mapping_fp: str
Filepath to master QIIME mapping file
all_files: list
TCGA file IDs
task_name: str
CGC task name
api: SevenBridges API instance
Api
config: dict
YAML configuration file
logger: logger instance
Log
Returns
-------
all_files: list
TCGA file IDs
total_size_gb: float
Total size of all TCGA files
"""
project = config['project']
# Upload local mapping file to project
try:
api.files.upload(project=project, path=local_mapping_fp)
# File already exists
except SbgError as e:
logger.error(
"Could not upload file, trying to query for it", exc_info=e)
pass
# Retrieve File object for mapping file
local_mapping_file = list(
api.files.query(
project=project, names=[basename(local_mapping_fp)]).all())
if len(local_mapping_file) > 1:
raise ValueError(
'List contains >1 files: %s' % len(local_mapping_file))
# Retrieve File objects for all bacterial and viral database files.
# We're not calling files directly by their ID because this can change,
# whereas file names are expected to stay the same.
input_index_files = list(api.files.query(
project=project,
names=['bacterial_database.idx',
'bacterial_nodes.dmp',
'bacterial_names.dmp',
'bacterial_database.kdb',
'viral_database.idx',
'viral_names.dmp',
'viral_nodes.dmp',
'viral_database.kdb']).all())
inputs = {}
inputs['qiime_mapping_file'] = local_mapping_file[0]
inputs['fasta_file_input'] = all_files
for _file in input_index_files:
name = _file.name
if name == 'bacterial_database.idx':
inputs['bacterial_database_idx'] = _file
elif name == 'bacterial_nodes.dmp':
inputs['bacterial_nodes_dmp'] = _file
elif name == 'bacterial_names.dmp':
inputs['bacterial_names_dmp'] = _file
elif name == 'bacterial_database.kdb':
inputs['bacterial_database_kdb'] = _file
elif name == 'viral_database.idx':
inputs['viral_database_idx'] = _file
elif name == 'viral_names.dmp':
inputs['viral_names_dmp'] = _file
elif name == 'viral_nodes.dmp':
inputs['viral_nodes_dmp'] = _file
elif name == 'viral_database.kdb':
inputs['viral_database_kdb'] = _file
else:
raise ValueError(
"File %s not assigned to any input argument." % name)
task_name = "workflow_%s" % task_name
my_project = api.projects.get(id = config['project'])
try:
api.tasks.create(name=task_name,
project=my_project.id,
app=config['app-workflow'],
inputs=inputs,
description=task_name)
except SbgError as e:
logger.error("Draft task was not created!", exc_info=e)
raise SbgError("Draft task was not created!")
# Initialize files array and total size
all_files = []
total_size_gb = 0.0
return all_files, total_size_gb
def generate_mapping_file(mapping_fp,
all_files,
config,
total_tasks_created,
output_dp,
sampleID_count,
logger,
fasta_files_workflow):
"""Create mini mapping file based on defined sample IDs.
Parameters
----------
mapping_fp: str
Filepath to master QIIME mapping file
all_files: list
List of CGC file IDs for which to generate mini-mapping file
config: dict
YAML configuration file
total_tasks_created: int
Number of task
output_dp: str
Output directory path
sampleID_count: int
Begin naming sample IDs from this integer
logger: logger instance
Log
fasta_files_workflow: list
FASTA file names
Returns
-------
output_fp: str
Filepath to mini-mapping file
sampleID_count: int
Updated sampleID count start
all_files: list
List of updated CGC file IDs (duplicates removed)
"""
disease_type = config['disease'].split()
filename = "%s_cgc_qiime_mapping_file_%s.txt" % (
'_'.join(disease_type), total_tasks_created)
output_fp = join(output_dp, filename)
all_files_names = [file.name for file in all_files]
all_files_names_added = []
with open(output_fp, 'w') as output_f:
with open(mapping_fp) as mapping_f:
for line in mapping_f:
if line.startswith('#SampleID'):
output_f.write(line)
else:
line = line.strip().split('\t')
# FASTA file name
filename = line[4]
if filename in all_files_names:
# update sampleID count
output_f.write('s%s\t' % sampleID_count)
sampleID_count += 1
output_f.write('\t'.join(line[1:]))
output_f.write('\n')
all_files_names_added.append(filename)
files_not_added = set(all_files_names) - set(all_files_names_added)
all_files_updated = list(all_files)
# At least one FASTA file analyzed not found in mapping file
if len(files_not_added) > 0:
logger.error(
'Following files missing in mapping file:\n')
# Check missing files are duplicates of those that have been added
files_accounted_for = 0
for _file in files_not_added:
# Remove prefix _*_ which signifies duplicate
regex = re.compile('_._')
prefix = _file[0:3]
if re.match(regex, prefix):
original_file_name = _file[3:]
if original_file_name not in fasta_files_workflow:
logger.error('\t%s, [status] missing file')
else:
files_accounted_for += 1
logger.info('\t%s, [status] duplicate' % _file)
# File does not have prefix _*_ which signifies it is not a
# duplicate
else:
logger.error('\t%s [status] missing file' % _file)
# Reassure user all missing files are duplicates
if files_accounted_for == len(files_not_added):
logger.info('All missing files are duplicates and original files '
'exists in analysis.')
# Remove duplicate FASTA files from analysis
for __file in all_files:
if __file.name in files_not_added:
all_files_updated.remove(__file)
# Non-duplicate files do not exist in mapping file, raise error
else:
logger.error('Not all missing files have been accounted for.')
raise ValueError('Not all missing files have been accounted for.')
return output_fp, sampleID_count, all_files_updated
def create_tasks(api,
mapping_fp,
logger,
config,
lower_bound_group_size,
upper_bound_group_size,
output_dp,
count_start):
"""Create draft tasks for tcga-workflow-fasta-input-full-kraken-test
workflow.
Parameters
----------
api: SevenBridges Api instance
Api
mapping_fp: str
Filepath to master QIIME mapping file
logger: logger instance
Log
config: dict
YAML configuration file
lower_bound_group_size: int
Lower bound on total size of input files to pass to workflow
upper_bound_group_size: int
Upper bound on total size of input files to pass to workflow
output_dp: str
Directory path to output QIIME mini mapping files
count_start: int
Count from which to start SampleID generation
"""
logger.info('Creating draft tasks.')
# Retrieve BAM and FASTA files associated with project, disease type,
# data format experimental strategy and data type
file_list = list(
api.files.query(
project=config['project'],
metadata={'disease_type': config['disease'],
'data_format': ['BAM', 'FASTA'],
'experimental_strategy': ['RNA-Seq', 'WGS'],
'data_type': ['Raw sequencing data']}).all())
bam_inputs = [_file.name for _file in file_list
if _file.name.lower().endswith('bam')]
# FASTA files
fasta_files = {}
for _file in file_list:
if _file.name.lower().endswith('fasta'):
if _file.name not in fasta_files:
fasta_files[_file.name] = _file
else:
raise ValueError('%s already exists' % _file.name)
# Check BAM associated FASTA file exists
for _file in bam_inputs:
file_name, file_ext = splitext(_file)
if "%s.fasta" % file_name not in fasta_files:
raise ValueError(
'%s.fasta is missing from FASTA files' % file_name)
# Remove all non BAM associated FASTA files from further analysis
fasta_files_workflow = OrderedDict(fasta_files)
for key, value in fasta_files.iteritems():
file_name, file_ext = splitext(key)
if "%s.bam" % file_name not in bam_inputs:
del fasta_files_workflow[key]
# Check number of BAM files equals to number of bam2fasta FASTA files
if len(fasta_files_workflow) != len(bam_inputs):
raise ValueError('%s != %s' % (
len(fasta_files_workflow), len(bam_inputs)))
# Loop through FASTA files computing total size, create task if size
# within lower and upper bounds
total_size_gb = 0.0
all_files = []
total_files_tasked = 0
total_tasks_created = 0
sampleID_count = count_start
for i, key in enumerate(fasta_files_workflow):
file = fasta_files_workflow[key]
file_size_gb = file.size/float(1073741824)
# If:
# (1) File will cause total file size to exceed upper limit, then
# Create task and add file to next task
if (total_size_gb + file_size_gb > upper_bound_group_size and
len(all_files) > 1):
# Create local mapping file
local_mapping_fp, sampleID_count, all_files =\
generate_mapping_file(
mapping_fp, all_files, config, total_tasks_created,
output_dp, sampleID_count, logger,
fasta_files_workflow.keys())
total_files_tasked += len(all_files)
total_tasks_created += 1
task_name = "%s_%s_task_%s_files_%.2fGb" % (
config['disease'],
str(total_tasks_created),
str(len(all_files)),
total_size_gb)
# Add info to logger
logger.info('Task %s: %s files, %.2f Gb' % (total_tasks_created,
len(all_files),
total_size_gb))
# Create draft tasks for tcga_fasta_input_disease_type_workflow
# workflow
all_files, total_size_gb = create_task_workflow_cgc(
local_mapping_fp, all_files, task_name, api, config, logger)
# Add new file to next task
all_files.append(file)
total_size_gb += file_size_gb
# If:
# (1) Single file larger than upper bound limit, or
# (2) Group of files fall within defined limit, or
# (3) Last file encountered, then
# Create task.
if ( (len(all_files) == 1 and
total_size_gb >= upper_bound_group_size) or
(total_size_gb > lower_bound_group_size and
total_size_gb < upper_bound_group_size) or
i+1 == len(bam_inputs) ):
# Create local mapping file
local_mapping_fp, sampleID_count, all_files =\
generate_mapping_file(
mapping_fp, all_files, config, total_tasks_created,
output_dp, sampleID_count, logger,
fasta_files_workflow.keys())
total_files_tasked += len(all_files)
total_tasks_created += 1
task_name = "%s_%s_task_%s_files_%.2fGb" % (
config['disease'],
str(total_tasks_created),
str(len(all_files)),
total_size_gb)
# Add info to logger
logger.info('Task %s: %s files, %.2f Gb' % (total_tasks_created,
len(all_files),
total_size_gb))
# Create draft tasks for tcga_fasta_input_disease_type_workflow
# workflow
all_files, total_size_gb = create_task_workflow_cgc(
local_mapping_fp, all_files, task_name, api, config, logger)
logger.info('Total tasks created: %s' % str(total_tasks_created))
logger.info('Total files tasked: %s' % str(total_files_tasked))
logger.info('Total files for disease type: %s' % str(len(bam_inputs)))
def run_tasks(api,
logger,
config):
"""Launch CGC tasks.
Parameters
----------
api: SevenBridges API instance
Api
logger: logger instance
Log
config: dict
YAML configuration file
"""
logger.info('Running tasks!')
project = config['project']
max_task_number = config['task_max_per_run']
app = config['app-bam2fasta']
running_tasks = list(
api.tasks.query(project=project, limit=100, status='RUNNING').all()
)
queued_tasks = list(
api.tasks.query(project=project, limit=100, status='QUEUED').all()
)
if len(running_tasks) + len(queued_tasks) >= max_task_number:
logger.info("Maximum number of active tasks reached!")
raise SbgError(
'Unable to run! You already have {active} active tasks. '
'Please try later!'.format
(active=len(running_tasks) + len(queued_tasks)))
draft_tasks = list(
api.tasks.query(project=project,
limit=100,
status='DRAFT').all())
if len(draft_tasks) == 0:
print('No draft tasks left to be run!')
return
# Remove draft tasks that weren't created by current app
draft_tasks_app = list(draft_tasks)
for task in draft_tasks:
if app not in task.app:
draft_tasks_app.remove(task)
executable_tasks = draft_tasks_app[0:max_task_number - len(running_tasks)]
for task in executable_tasks:
# Sanity check only current app draft tasks are run
if app in task.app:
try:
task.run()
except SbgError as e:
logger.error("Task was not started! Error happened ", exc_info=e)
raise SbgError('Task was not started! Error happened')
if task.status == 'DRAFT':
logger.error("Task was not started! Task state is DRAFT!")
raise SbgError("Task was not started! Task state is DRAFT!")
def show_status(api):
"""Show CGC status.
Parameters
----------
api: SevenBridges API instance
Api
"""
logger.info('Fetching task statuses!')
project = config['project']
queued = api.tasks.query(project=project, status='QUEUED').total
running = api.tasks.query(project=project, status='RUNNING').total
completed = api.tasks.query(project=project, status='COMPLETED').total
draft = api.tasks.query(project=project, status='DRAFT').total
failed = api.tasks.query(project=project, status='FAILED').total
aborted = api.tasks.query(project=project, status='ABORTED').total
print("Draft={}, Queued={}, Running={}, Completed={},"
" Failed={}, Aborted={} ".format(draft, queued,
running, completed,
failed, aborted)
)
@click.command()
@click.option('--mapping-fp', required=True,
type=click.Path(resolve_path=True, readable=True, exists=True,
file_okay=True),
help='Filepath to QIIME mapping file')
@click.option('--yaml-fp', required=True,
type=click.Path(resolve_path=True, readable=True, exists=False,
file_okay=True),
help='Filepath to output CGC API yaml file')
@click.option('--create-draft-tasks', required=True, type=bool, default=True,
show_default=True, help='Create CGC draft tasks')
@click.option('--run-draft-tasks', required=False, type=bool, default=False,
show_default=False, help='Run CGC draft tasks')
@click.option('--check-status', required=False, type=bool, default=False,
show_default=True, help='Show CGC task status')
@click.option('--lower-bound-group-size', required=False, type=int,
default=300, show_default=True,
help='Lower bound on total size of input files to pass to '
'workflow')
@click.option('--upper-bound-group-size', required=False, type=int,
default=400, show_default=True,
help='Upper bound on total size of input files to pass to '
'workflow')
@click.option('--output-dp', required=True,
type=click.Path(resolve_path=True, readable=True, exists=False,
file_okay=True),
help='Directory path to output QIIME mini-mapping files')
@click.option('--count-start', required=True, type=int,
help='Count from which to start SampleID generation')
def main(mapping_fp,
yaml_fp,
create_draft_tasks,
run_draft_tasks,
check_status,
lower_bound_group_size,
upper_bound_group_size,
output_dp,
count_start):
logger, config = load_config(yaml_fp)
sb_config = sb.Config(url=config['api-url'], token=config['token'])
api = sb.Api(config=sb_config)
if create_draft_tasks:
create_tasks(api, mapping_fp, logger, config, lower_bound_group_size,
upper_bound_group_size, output_dp, count_start)
elif run_draft_tasks:
run_tasks(api, logger, config)
elif check_status:
show_status(api)
else:
raise ValueError('Please select one of --create-draft-tasks, '
'--run-draft-tasks or --check-status')
if __name__ == "__main__":
main()
|
ekopylova/tcga-1
|
python_scripts/cgc_create_tcga_workflow_task.py
|
Python
|
bsd-3-clause
| 20,620 | 0.000533 |
"""Unit tests for wx.Gauge.
Methods yet to test:
__init__, Create, Pulse"""
import unittest
import wx
import wxtest
import testControl
class GaugeTest(testControl.ControlTest):
def setUp(self):
self.app = wx.PySimpleApp()
self.frame = wx.Frame(parent=None)
self.testControl = wx.Gauge(parent=self.frame)
# C++ docs state:
# This method is not implemented (returns 0) for most platforms.
def testBezelFace(self):
"""SetBezelFace, GetBezelFace"""
if wxtest.PlatformIsMac() or wxtest.PlatformIsGtk() or \
wxtest.PlatformIsWindows():
for i in range(self.testControl.GetRange()):
self.testControl.SetBezelFace(i)
self.assertEquals(0, self.testControl.GetBezelFace())
else:
# this can't happen.
# TODO: what platforms does it work on?
raise EnvironmentError("Current platform not represented in wxtest")
def testIsVertical(self):
"""IsVertical"""
vert = wx.Gauge(self.frame, style=wx.GA_VERTICAL)
horiz = wx.Gauge(self.frame, style=wx.GA_HORIZONTAL)
self.assert_(not self.testControl.IsVertical()) # default
self.assert_(vert.IsVertical())
self.assert_(not horiz.IsVertical())
def testRange(self):
"""SetRange, GetRange"""
for i in range(0,1000,10):
self.testControl.SetRange(i)
self.assertEquals(i, self.testControl.GetRange())
# C++ docs state:
# This method is not implemented (returns 0) for most platforms.
def testShadowWidth(self):
"""SetShadowWidth, GetShadowWidth"""
if wxtest.PlatformIsMac() or wxtest.PlatformIsGtk() or \
wxtest.PlatformIsWindows():
for i in range(self.testControl.GetRange()):
self.testControl.SetShadowWidth(i)
self.assertEquals(0, self.testControl.GetShadowWidth())
else:
# this can't happen.
# TODO: what platforms does it work on?
raise EnvironmentError("Current platform not represented in wxtest")
def testValue(self):
"""SetValue, GetValue"""
for i in range(self.testControl.GetRange()):
self.testControl.SetValue(i)
self.assertEquals(i, self.testControl.GetValue())
if __name__ == '__main__':
unittest.main()
|
ifwe/wxpy
|
src/tests/wxPythonTests/testGauge.py
|
Python
|
mit
| 2,410 | 0.005809 |
#!/usr/bin/env python
# -*- encoding: utf-8
from __future__ import division, print_function
from tagassess.dao.helpers import FilteredUserItemAnnotations
from tagassess.dao.pytables.annotations import AnnotReader
from tagassess.index_creator import create_occurrence_index
from tagassess.probability_estimates.precomputed import PrecomputedEstimator
import os
import plac
import sys
def get_baselines(annot_filter, reader, user_to_tags):
annotations = annot_filter.annotations(reader.iterate())
user_to_item = create_occurrence_index(annotations, 'user', 'item')
annotations = annot_filter.annotations(reader.iterate())
item_to_tags = create_occurrence_index(annotations, 'item', 'tag')
overlap = {}
for user in user_to_tags:
for item in user_to_item:
for tag in item_to_tags[item]:
if (user, tag) not in overlap:
overlap[user, tag] = 0
if tag not in user_to_tags[user]:
overlap[user, tag] += 1
idf = {}
annotations = annot_filter.annotations(reader.iterate())
for annot in annotations:
tag = annot['tag']
if tag not in idf:
idf[tag] = 0
idf[tag] += 1
for tag in idf.keys():
idf[tag] = 1.0 / idf[tag]
return idf, overlap
def run_exp(user_validation_tags, user_test_tags, user_test_items, est,
annot_filter, reader):
user_to_tags = {}
for user in est.get_valid_users():
#Remove validation tags. The script focuses on test tags
tags_to_compute = []
tags = est.tags_for_user(user)
for tag in tags:
if tag not in user_validation_tags[user]:
tags_to_compute.append(tag)
user_to_tags[user] = tags_to_compute
annotations = annot_filter.annotations(reader.iterate())
tag_to_items = create_occurrence_index(annotations, 'tag', 'item')
# item_to_tags = create_occurrence_index(annotations, 'item', 'tag')
print('#user', 'tag', 'precision', 'recall', 'hidden')
for user in est.get_valid_users():
tags = user_to_tags[user]
for tag in tags:
hidden = tag in user_test_tags[user]
relevant = user_test_items[user]
retrieved = tag_to_items[tag]
intersect = retrieved.intersection(relevant)
precision = len(intersect) / len(retrieved)
recall = len(intersect) / len(relevant)
# tags_for_relevant = set()
# for item in relevant:
# tags_for_relevant.update(item_to_tags[item])
print(user, tag, precision, recall, hidden)
def load_dict_from_file(fpath):
'''Loads dictionary from file'''
return_val = {}
with open(fpath) as in_file:
for line in in_file:
spl = line.split('-')
key = int(spl[0].strip())
value = set(int(x.strip()) for x in spl[1].split())
return_val[key] = value
return return_val
def load_train_test_validation(cross_val_folder):
'''Loads cross validation dictionaries used for the experiment'''
filter_fpath = os.path.join(cross_val_folder, 'user_item_filter.dat')
user_items_to_filter = load_dict_from_file(filter_fpath)
val_tags_fpath = os.path.join(cross_val_folder, 'user_val_tags.dat')
user_validation_tags = load_dict_from_file(val_tags_fpath)
test_tags_fpath = os.path.join(cross_val_folder, 'user_test_tags.dat')
user_test_tags = load_dict_from_file(test_tags_fpath)
test_items_fpath = os.path.join(cross_val_folder, 'user_test_items.dat')
user_test_items = load_dict_from_file(test_items_fpath)
return user_items_to_filter, user_validation_tags, user_test_tags, \
user_test_items
@plac.annotations(
db_fpath = plac.Annotation('H5 database file', type=str),
db_name = plac.Annotation('H5 database name', type=str),
cross_val_folder = plac.Annotation('Folder with cross validation files',
type=str),
probs_folder = plac.Annotation('Probabilities Folder', type=str))
def main(db_fpath, db_name, cross_val_folder, probs_folder):
#get cross validation dicts
user_items_to_filter, user_validation_tags, user_test_tags, \
user_test_items = load_train_test_validation(cross_val_folder)
with AnnotReader(db_fpath) as reader:
reader.change_table(db_name)
annot_filter = FilteredUserItemAnnotations(user_items_to_filter)
est = PrecomputedEstimator(probs_folder)
run_exp(user_validation_tags, user_test_tags, user_test_items, est,
annot_filter, reader)
if __name__ == '__main__':
sys.exit(plac.call(main))
|
flaviovdf/tag_assess
|
src/scripts/PrecisionRecall.py
|
Python
|
bsd-3-clause
| 4,909 | 0.010593 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'HistoricalArticle'
db.create_table(u'core_historicalarticle', (
(u'baseitem_ptr_id', self.gf('django.db.models.fields.IntegerField')(db_index=True, null=True, blank=True)),
(u'id', self.gf('django.db.models.fields.IntegerField')(db_index=True, blank=True)),
('polymorphic_ctype_id', self.gf('django.db.models.fields.IntegerField')(db_index=True, null=True, blank=True)),
('duplicate_of_id', self.gf('django.db.models.fields.IntegerField')(db_index=True, null=True, blank=True)),
('duplicate_status', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('language_id', self.gf('django.db.models.fields.IntegerField')(db_index=True, null=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=1024)),
('slug', self.gf('django.db.models.fields.CharField')(max_length=1024, null=True, blank=True)),
('user_id', self.gf('django.db.models.fields.IntegerField')(db_index=True, null=True, blank=True)),
('is_restricted', self.gf('django.db.models.fields.BooleanField')(default=False)),
('date_created', self.gf('django.db.models.fields.DateTimeField')(db_index=True, blank=True)),
('date_updated', self.gf('django.db.models.fields.DateTimeField')(blank=True)),
('date_published', self.gf('django.db.models.fields.DateTimeField')(db_index=True, null=True, blank=True)),
('default_rating', self.gf('django.db.models.fields.FloatField')(default=0.0, blank=True)),
('text_direction', self.gf('django.db.models.fields.CharField')(default=u'ltr', max_length=3, db_index=True)),
('origin', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=512, db_index=True)),
('comments_feed_url', self.gf('django.db.models.fields.URLField')(max_length=512, null=True, blank=True)),
('url_absolute', self.gf('django.db.models.fields.BooleanField')(default=False)),
('url_error', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('is_orphaned', self.gf('django.db.models.fields.BooleanField')(default=False)),
('image_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('excerpt', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('content', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('content_type', self.gf('django.db.models.fields.IntegerField')(db_index=True, null=True, blank=True)),
('content_error', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('word_count', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
(u'history_id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
(u'history_date', self.gf('django.db.models.fields.DateTimeField')()),
(u'history_user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.User'], null=True)),
(u'history_type', self.gf('django.db.models.fields.CharField')(max_length=1)),
))
db.send_create_signal(u'core', ['HistoricalArticle'])
def backwards(self, orm):
# Deleting model 'HistoricalArticle'
db.delete_table(u'core_historicalarticle')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.user': {
'Meta': {'object_name': 'User'},
'address_book': ('json_field.fields.JSONField', [], {'default': '[]', 'blank': 'True'}),
'avatar': ('django.db.models.fields.files.ImageField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'avatar_url': ('django.db.models.fields.URLField', [], {'max_length': '384', 'null': 'True', 'blank': 'True'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'email_announcements': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
'hash_codes': ('jsonfield.fields.JSONField', [], {'default': "{'unsubscribe': '3a9b390dc65a4381a1adb78af170cdcd'}", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'register_data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'sent_emails': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.article': {
'Meta': {'object_name': 'Article', '_ormbases': ['core.BaseItem']},
u'baseitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseItem']", 'unique': 'True', 'primary_key': 'True'}),
'comments_feed_url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'is_orphaned': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publishers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'publications'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '512'}),
'url_absolute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'word_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'core.author': {
'Meta': {'unique_together': "(('origin_name', 'website'),)", 'object_name': 'Author'},
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Author']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identities': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'identities_rel_+'", 'null': 'True', 'to': "orm['core.Author']"}),
'is_unsure': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '7168', 'null': 'True', 'blank': 'True'}),
'origin_id': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'origin_id_str': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'origin_name': ('django.db.models.fields.CharField', [], {'max_length': '7168', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'authors'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'}),
'website_data': ('json_field.fields.JSONField', [], {'default': '{}', 'blank': 'True'})
},
'core.baseaccount': {
'Meta': {'object_name': 'BaseAccount'},
'conn_error': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_conn': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '384', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_usable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'options': ('json_field.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.baseaccount_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'accounts'", 'to': u"orm['base.User']"})
},
'core.basefeed': {
'Meta': {'object_name': 'BaseFeed'},
'closed_reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_fetch': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseFeed']", 'null': 'True', 'blank': 'True'}),
'duplicate_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'errors': ('json_field.fields.JSONField', [], {'default': '[]', 'blank': 'True'}),
'fetch_interval': ('django.db.models.fields.IntegerField', [], {'default': '43200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_good': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.BaseItem']"}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.Language']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'options': ('json_field.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.basefeed_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_nt': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.SimpleTag']", 'null': 'True', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '384', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'to': u"orm['base.User']"})
},
'core.baseitem': {
'Meta': {'object_name': 'BaseItem'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'authored_items'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Author']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'date_published': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'default_rating': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseItem']", 'null': 'True', 'blank': 'True'}),
'duplicate_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'origin': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.baseitem_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'sources': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'sources_rel_+'", 'null': 'True', 'to': "orm['core.BaseItem']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.SimpleTag']", 'null': 'True', 'blank': 'True'}),
'text_direction': ('django.db.models.fields.CharField', [], {'default': "u'ltr'", 'max_length': '3', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'})
},
'core.combinedfeed': {
'Meta': {'object_name': 'CombinedFeed'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.combinedfeedrule': {
'Meta': {'ordering': "('position',)", 'object_name': 'CombinedFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeedRule']", 'null': 'True', 'blank': 'True'}),
'combinedfeed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.CombinedFeed']"}),
'feeds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.BaseFeed']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.folder': {
'Meta': {'unique_together': "(('name', 'user', 'parent'),)", 'object_name': 'Folder'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '384', 'null': 'True', 'blank': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.Folder']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'folders'", 'to': u"orm['base.User']"})
},
'core.helpcontent': {
'Meta': {'ordering': "['ordering', 'id']", 'object_name': 'HelpContent'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content_en': ('django.db.models.fields.TextField', [], {}),
'content_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'name_nt': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'core.helpwizards': {
'Meta': {'object_name': 'HelpWizards'},
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'wizards'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'show_all': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'welcome_beta_shown': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'core.historicalarticle': {
'Meta': {'ordering': "(u'-history_date', u'-history_id')", 'object_name': 'HistoricalArticle'},
u'baseitem_ptr_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'comments_feed_url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'blank': 'True'}),
'date_published': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'default_rating': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'blank': 'True'}),
'duplicate_of_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'duplicate_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'history_date': ('django.db.models.fields.DateTimeField', [], {}),
u'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'history_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'history_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True'}),
u'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'is_orphaned': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'origin': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'polymorphic_ctype_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'text_direction': ('django.db.models.fields.CharField', [], {'default': "u'ltr'", 'max_length': '3', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'db_index': 'True'}),
'url_absolute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'word_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'core.historyentry': {
'Meta': {'object_name': 'HistoryEntry'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.historyentry_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.homepreferences': {
'Meta': {'object_name': 'HomePreferences'},
'experimental_features': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'home'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'read_shows': ('django.db.models.fields.IntegerField', [], {'default': '2', 'blank': 'True'}),
'show_advanced_preferences': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'style': ('django.db.models.fields.CharField', [], {'default': "u'RL'", 'max_length': '2', 'blank': 'True'})
},
'core.language': {
'Meta': {'object_name': 'Language'},
'dj_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '16'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']", 'null': 'True', 'blank': 'True'}),
'duplicate_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iso639_1': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'iso639_2': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'iso639_3': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.Language']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'core.mailaccount': {
'Meta': {'object_name': 'MailAccount', '_ormbases': ['core.BaseAccount']},
u'baseaccount_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseAccount']", 'unique': 'True', 'primary_key': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'password': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'use_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'core.mailfeed': {
'Meta': {'object_name': 'MailFeed', '_ormbases': ['core.BaseFeed']},
'account': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'mail_feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.MailAccount']"}),
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'finish_action': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'match_action': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'rules_operation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'scrape_blacklist': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'scrape_whitelist': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
'core.mailfeedrule': {
'Meta': {'ordering': "('group', 'position')", 'object_name': 'MailFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeedRule']", 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_operation': ('django.db.models.fields.IntegerField', [], {'default': '1', 'blank': 'True'}),
'header_field': ('django.db.models.fields.IntegerField', [], {'default': '4', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mailfeed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rules'", 'to': "orm['core.MailFeed']"}),
'match_case': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'match_type': ('django.db.models.fields.IntegerField', [], {'default': '1', 'blank': 'True'}),
'match_value': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'other_header': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.nodepermissions': {
'Meta': {'object_name': 'NodePermissions'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'node': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SyncNode']", 'null': 'True', 'blank': 'True'}),
'permission': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'default': "'3fc87c9299344bda9233ca5a219dd146'", 'max_length': '32', 'blank': 'True'})
},
'core.originaldata': {
'Meta': {'object_name': 'OriginalData'},
'feedparser': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feedparser_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'google_reader': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'google_reader_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'item': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'original_data'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.BaseItem']"}),
'raw_email': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw_email_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'twitter': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'twitter_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.preferences': {
'Meta': {'object_name': 'Preferences'},
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['base.User']", 'unique': 'True', 'primary_key': 'True'})
},
'core.read': {
'Meta': {'unique_together': "(('user', 'item'),)", 'object_name': 'Read'},
'bookmark_type': ('django.db.models.fields.CharField', [], {'default': "u'U'", 'max_length': '2'}),
'check_set_subscriptions_131004_done': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_analysis': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_archived': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_auto_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_bookmarked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'date_fact': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_fun': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_knowhow': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_knowledge': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_number': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_prospective': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_quote': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_rules': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_starred': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_analysis': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_auto_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_bookmarked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_fact': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_fun': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_good': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_knowhow': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_number': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_prospective': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_quote': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_rules': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_starred': ('django.db.models.fields.NullBooleanField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reads'", 'to': "orm['core.BaseItem']"}),
'knowledge_type': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'senders': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'reads_sent'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.SimpleTag']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'all_reads'", 'to': u"orm['base.User']"})
},
'core.readpreferences': {
'Meta': {'object_name': 'ReadPreferences'},
'auto_mark_read_delay': ('django.db.models.fields.IntegerField', [], {'default': '4500', 'blank': 'True'}),
'bookmarked_marks_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'bookmarked_marks_unread': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'read'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'read_switches_to_fullscreen': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'reading_speed': ('django.db.models.fields.IntegerField', [], {'default': '200', 'blank': 'True'}),
'show_bottom_navbar': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'starred_marks_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'starred_marks_read': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'starred_removes_bookmarked': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'watch_attributes_mark_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.rssatomfeed': {
'Meta': {'object_name': 'RssAtomFeed', '_ormbases': ['core.BaseFeed']},
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'last_etag': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '512'}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'to': "orm['core.WebSite']"})
},
'core.selectorpreferences': {
'Meta': {'object_name': 'SelectorPreferences'},
'extended_folders_depth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'folders_show_unread_count': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'lists_show_unread_count': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'selector'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'show_closed_streams': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subscriptions_in_multiple_folders': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'titles_show_unread_count': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.sharepreferences': {
'Meta': {'object_name': 'SharePreferences'},
'default_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'share'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"})
},
'core.simpletag': {
'Meta': {'unique_together': "(('name', 'language'),)", 'object_name': 'SimpleTag'},
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SimpleTag']", 'null': 'True', 'blank': 'True'}),
'duplicate_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']", 'null': 'True', 'blank': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'origin_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'origin_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.SimpleTag']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'core.snappreferences': {
'Meta': {'object_name': 'SnapPreferences'},
'default_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'snap'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'select_paragraph': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.staffpreferences': {
'Meta': {'object_name': 'StaffPreferences'},
'no_home_redirect': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'staff'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'reading_lists_show_bad_articles': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'selector_shows_admin_links': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'super_powers_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'core.subscription': {
'Meta': {'unique_together': "(('feed', 'user'),)", 'object_name': 'Subscription'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriptions'", 'blank': 'True', 'to': "orm['core.BaseFeed']"}),
'folders': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Folder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'reads': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Read']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.SimpleTag']", 'null': 'True', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '384', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'all_subscriptions'", 'blank': 'True', 'to': u"orm['base.User']"})
},
'core.syncnode': {
'Meta': {'object_name': 'SyncNode'},
'broadcast': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_seen': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_local_instance': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'local_token': ('django.db.models.fields.CharField', [], {'default': "'7494b58a639f4ce9a76a47ec0c5a3fda'", 'max_length': '32', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '384', 'null': 'True', 'blank': 'True'}),
'permission': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'remote_token': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '-1', 'blank': 'True'}),
'strategy': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'sync_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '384', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'core.tweet': {
'Meta': {'object_name': 'Tweet', '_ormbases': ['core.BaseItem']},
u'baseitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseItem']", 'unique': 'True', 'primary_key': 'True'}),
'entities': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tweets'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.BaseItem']"}),
'entities_fetched': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mentions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'mentions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Author']"}),
'tweet_id': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True', 'unique': 'True', 'blank': 'True'})
},
'core.twitteraccount': {
'Meta': {'object_name': 'TwitterAccount', '_ormbases': ['core.BaseAccount']},
u'baseaccount_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseAccount']", 'unique': 'True', 'primary_key': 'True'}),
'fetch_owned_lists': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'fetch_subscribed_lists': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'social_auth': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'twitter_account'", 'unique': 'True', 'to': u"orm['default.UserSocialAuth']"}),
'timeline': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'twitter_account'", 'unique': 'True', 'null': 'True', 'to': "orm['core.TwitterFeed']"})
},
'core.twitterfeed': {
'Meta': {'object_name': 'TwitterFeed', '_ormbases': ['core.BaseFeed']},
'account': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'twitter_feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.TwitterAccount']"}),
'backfill_completed': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'finish_action': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'is_backfilled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_timeline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'match_action': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'rules_operation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'scrape_blacklist': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'scrape_whitelist': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'track_locations': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'track_terms': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'core.twitterfeedrule': {
'Meta': {'ordering': "('group', 'position')", 'object_name': 'TwitterFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.TwitterFeedRule']", 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_operation': ('django.db.models.fields.IntegerField', [], {'default': '1', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'match_case': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'match_field': ('django.db.models.fields.IntegerField', [], {'default': '1', 'blank': 'True'}),
'match_type': ('django.db.models.fields.IntegerField', [], {'default': '1', 'blank': 'True'}),
'match_value': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'other_field': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'twitterfeed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rules'", 'to': "orm['core.TwitterFeed']"})
},
'core.usercounters': {
'Meta': {'object_name': 'UserCounters'},
'placeholder': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'user_counters'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"})
},
'core.userfeeds': {
'Meta': {'object_name': 'UserFeeds'},
'blogs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.BaseFeed']", 'null': 'True', 'blank': 'True'}),
'imported_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'imported_items_user_feed'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'received_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'received_items_user_feed'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'sent_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'sent_items_user_feed'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'user_feeds'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"}),
'written_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'written_items_user_feed'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"})
},
'core.userimport': {
'Meta': {'object_name': 'UserImport', '_ormbases': ['core.HistoryEntry']},
'date_finished': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'historyentry_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.HistoryEntry']", 'unique': 'True', 'primary_key': 'True'}),
'lines': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'results': ('json_field.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'urls': ('django.db.models.fields.TextField', [], {})
},
'core.usersubscriptions': {
'Meta': {'object_name': 'UserSubscriptions'},
'blogs': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blogs'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Subscription']"}),
'imported_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'imported_items_user_subscriptions'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'received_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'received_items_user_subscriptions'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'sent_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'sent_items_user_subscriptions'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'user_subscriptions'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"}),
'written_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'written_items_user_subscriptions'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"})
},
'core.website': {
'Meta': {'object_name': 'WebSite'},
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'}),
'duplicate_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'fetch_limit_nr': ('django.db.models.fields.IntegerField', [], {'default': '16', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '384', 'null': 'True', 'blank': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'mail_warned': ('json_field.fields.JSONField', [], {'default': '[]', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.WebSite']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_nt': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'blank': 'True'})
},
u'default.usersocialauth': {
'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'UserSocialAuth', 'db_table': "'social_auth_usersocialauth'"},
'extra_data': ('social.apps.django_app.default.fields.JSONField', [], {'default': "'{}'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'social_auth'", 'to': u"orm['base.User']"})
}
}
complete_apps = ['core']
|
1flow/1flow
|
oneflow/core/migrations/0101_auto__add_historicalarticle.py
|
Python
|
agpl-3.0
| 62,707 | 0.007926 |
"""
Color definitions are used as per CSS3 specification:
http://www.w3.org/TR/css3-color/#svg-color
A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`.
In these cases the LAST color when sorted alphabetically takes preferences,
eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua".
"""
import math
import re
from colorsys import hls_to_rgb, rgb_to_hls
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast
from .errors import ColorError
from .utils import Representation, almost_equal_floats
if TYPE_CHECKING:
from .typing import CallableGenerator, ReprArgs
ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]]
ColorType = Union[ColorTuple, str]
HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]]
class RGBA:
"""
Internal use only as a representation of a color.
"""
__slots__ = 'r', 'g', 'b', 'alpha', '_tuple'
def __init__(self, r: float, g: float, b: float, alpha: Optional[float]):
self.r = r
self.g = g
self.b = b
self.alpha = alpha
self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha)
def __getitem__(self, item: Any) -> Any:
return self._tuple[item]
# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached
r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*'
r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*'
_r_255 = r'(\d{1,3}(?:\.\d+)?)'
_r_comma = r'\s*,\s*'
r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*'
_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)'
r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*'
_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?'
_r_sl = r'(\d{1,3}(?:\.\d+)?)%'
r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*'
r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*'
# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used
repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'}
rads = 2 * math.pi
class Color(Representation):
__slots__ = '_original', '_rgba'
def __init__(self, value: ColorType) -> None:
self._rgba: RGBA
self._original: ColorType
if isinstance(value, (tuple, list)):
self._rgba = parse_tuple(value)
elif isinstance(value, str):
self._rgba = parse_str(value)
elif isinstance(value, Color):
self._rgba = value._rgba
value = value._original
else:
raise ColorError(reason='value must be a tuple, list or string')
# if we've got here value must be a valid color
self._original = value
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format='color')
def original(self) -> ColorType:
"""
Original value passed to Color
"""
return self._original
def as_named(self, *, fallback: bool = False) -> str:
if self._rgba.alpha is None:
rgb = cast(Tuple[int, int, int], self.as_rgb_tuple())
try:
return COLORS_BY_VALUE[rgb]
except KeyError as e:
if fallback:
return self.as_hex()
else:
raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e
else:
return self.as_hex()
def as_hex(self) -> str:
"""
Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string
a "short" representation of the color is possible and whether there's an alpha channel.
"""
values = [float_to_255(c) for c in self._rgba[:3]]
if self._rgba.alpha is not None:
values.append(float_to_255(self._rgba.alpha))
as_hex = ''.join(f'{v:02x}' for v in values)
if all(c in repeat_colors for c in values):
as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2))
return '#' + as_hex
def as_rgb(self) -> str:
"""
Color as an rgb(<r>, <g>, <b>) or rgba(<r>, <g>, <b>, <a>) string.
"""
if self._rgba.alpha is None:
return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})'
else:
return (
f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, '
f'{round(self._alpha_float(), 2)})'
)
def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple:
"""
Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is
in the range 0 to 1.
:param alpha: whether to include the alpha channel, options are
None - (default) include alpha only if it's set (e.g. not None)
True - always include alpha,
False - always omit alpha,
"""
r, g, b = [float_to_255(c) for c in self._rgba[:3]]
if alpha is None:
if self._rgba.alpha is None:
return r, g, b
else:
return r, g, b, self._alpha_float()
elif alpha:
return r, g, b, self._alpha_float()
else:
# alpha is False
return r, g, b
def as_hsl(self) -> str:
"""
Color as an hsl(<h>, <s>, <l>) or hsl(<h>, <s>, <l>, <a>) string.
"""
if self._rgba.alpha is None:
h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore
return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})'
else:
h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore
return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})'
def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple:
"""
Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in
the range 0 to 1.
NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys.
:param alpha: whether to include the alpha channel, options are
None - (default) include alpha only if it's set (e.g. not None)
True - always include alpha,
False - always omit alpha,
"""
h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b)
if alpha is None:
if self._rgba.alpha is None:
return h, s, l
else:
return h, s, l, self._alpha_float()
if alpha:
return h, s, l, self._alpha_float()
else:
# alpha is False
return h, s, l
def _alpha_float(self) -> float:
return 1 if self._rgba.alpha is None else self._rgba.alpha
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls
def __str__(self) -> str:
return self.as_named(fallback=True)
def __repr_args__(self) -> 'ReprArgs':
return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] # type: ignore
def parse_tuple(value: Tuple[Any, ...]) -> RGBA:
"""
Parse a tuple or list as a color.
"""
if len(value) == 3:
r, g, b = [parse_color_value(v) for v in value]
return RGBA(r, g, b, None)
elif len(value) == 4:
r, g, b = [parse_color_value(v) for v in value[:3]]
return RGBA(r, g, b, parse_float_alpha(value[3]))
else:
raise ColorError(reason='tuples must have length 3 or 4')
def parse_str(value: str) -> RGBA:
"""
Parse a string to an RGBA tuple, trying the following formats (in this order):
* named color, see COLORS_BY_NAME below
* hex short eg. `<prefix>fff` (prefix can be `#`, `0x` or nothing)
* hex long eg. `<prefix>ffffff` (prefix can be `#`, `0x` or nothing)
* `rgb(<r>, <g>, <b>) `
* `rgba(<r>, <g>, <b>, <a>)`
"""
value_lower = value.lower()
try:
r, g, b = COLORS_BY_NAME[value_lower]
except KeyError:
pass
else:
return ints_to_rgba(r, g, b, None)
m = re.fullmatch(r_hex_short, value_lower)
if m:
*rgb, a = m.groups()
r, g, b = [int(v * 2, 16) for v in rgb]
if a:
alpha: Optional[float] = int(a * 2, 16) / 255
else:
alpha = None
return ints_to_rgba(r, g, b, alpha)
m = re.fullmatch(r_hex_long, value_lower)
if m:
*rgb, a = m.groups()
r, g, b = [int(v, 16) for v in rgb]
if a:
alpha = int(a, 16) / 255
else:
alpha = None
return ints_to_rgba(r, g, b, alpha)
m = re.fullmatch(r_rgb, value_lower)
if m:
return ints_to_rgba(*m.groups(), None) # type: ignore
m = re.fullmatch(r_rgba, value_lower)
if m:
return ints_to_rgba(*m.groups()) # type: ignore
m = re.fullmatch(r_hsl, value_lower)
if m:
h, h_units, s, l_ = m.groups()
return parse_hsl(h, h_units, s, l_)
m = re.fullmatch(r_hsla, value_lower)
if m:
h, h_units, s, l_, a = m.groups()
return parse_hsl(h, h_units, s, l_, parse_float_alpha(a))
raise ColorError(reason='string not recognised as a valid color')
def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA:
return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha))
def parse_color_value(value: Union[int, str], max_val: int = 255) -> float:
"""
Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number
in the range 0 to 1
"""
try:
color = float(value)
except ValueError:
raise ColorError(reason='color values must be a valid number')
if 0 <= color <= max_val:
return color / max_val
else:
raise ColorError(reason=f'color values must be in the range 0 to {max_val}')
def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]:
"""
Parse a value checking it's a valid float in the range 0 to 1
"""
if value is None:
return None
try:
if isinstance(value, str) and value.endswith('%'):
alpha = float(value[:-1]) / 100
else:
alpha = float(value)
except ValueError:
raise ColorError(reason='alpha values must be a valid float')
if almost_equal_floats(alpha, 1):
return None
elif 0 <= alpha <= 1:
return alpha
else:
raise ColorError(reason='alpha values must be in the range 0 to 1')
def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA:
"""
Parse raw hue, saturation, lightness and alpha values and convert to RGBA.
"""
s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100)
h_value = float(h)
if h_units in {None, 'deg'}:
h_value = h_value % 360 / 360
elif h_units == 'rad':
h_value = h_value % rads / rads
else:
# turns
h_value = h_value % 1
r, g, b = hls_to_rgb(h_value, l_value, s_value)
return RGBA(r, g, b, alpha)
def float_to_255(c: float) -> int:
return int(round(c * 255))
COLORS_BY_NAME = {
'aliceblue': (240, 248, 255),
'antiquewhite': (250, 235, 215),
'aqua': (0, 255, 255),
'aquamarine': (127, 255, 212),
'azure': (240, 255, 255),
'beige': (245, 245, 220),
'bisque': (255, 228, 196),
'black': (0, 0, 0),
'blanchedalmond': (255, 235, 205),
'blue': (0, 0, 255),
'blueviolet': (138, 43, 226),
'brown': (165, 42, 42),
'burlywood': (222, 184, 135),
'cadetblue': (95, 158, 160),
'chartreuse': (127, 255, 0),
'chocolate': (210, 105, 30),
'coral': (255, 127, 80),
'cornflowerblue': (100, 149, 237),
'cornsilk': (255, 248, 220),
'crimson': (220, 20, 60),
'cyan': (0, 255, 255),
'darkblue': (0, 0, 139),
'darkcyan': (0, 139, 139),
'darkgoldenrod': (184, 134, 11),
'darkgray': (169, 169, 169),
'darkgreen': (0, 100, 0),
'darkgrey': (169, 169, 169),
'darkkhaki': (189, 183, 107),
'darkmagenta': (139, 0, 139),
'darkolivegreen': (85, 107, 47),
'darkorange': (255, 140, 0),
'darkorchid': (153, 50, 204),
'darkred': (139, 0, 0),
'darksalmon': (233, 150, 122),
'darkseagreen': (143, 188, 143),
'darkslateblue': (72, 61, 139),
'darkslategray': (47, 79, 79),
'darkslategrey': (47, 79, 79),
'darkturquoise': (0, 206, 209),
'darkviolet': (148, 0, 211),
'deeppink': (255, 20, 147),
'deepskyblue': (0, 191, 255),
'dimgray': (105, 105, 105),
'dimgrey': (105, 105, 105),
'dodgerblue': (30, 144, 255),
'firebrick': (178, 34, 34),
'floralwhite': (255, 250, 240),
'forestgreen': (34, 139, 34),
'fuchsia': (255, 0, 255),
'gainsboro': (220, 220, 220),
'ghostwhite': (248, 248, 255),
'gold': (255, 215, 0),
'goldenrod': (218, 165, 32),
'gray': (128, 128, 128),
'green': (0, 128, 0),
'greenyellow': (173, 255, 47),
'grey': (128, 128, 128),
'honeydew': (240, 255, 240),
'hotpink': (255, 105, 180),
'indianred': (205, 92, 92),
'indigo': (75, 0, 130),
'ivory': (255, 255, 240),
'khaki': (240, 230, 140),
'lavender': (230, 230, 250),
'lavenderblush': (255, 240, 245),
'lawngreen': (124, 252, 0),
'lemonchiffon': (255, 250, 205),
'lightblue': (173, 216, 230),
'lightcoral': (240, 128, 128),
'lightcyan': (224, 255, 255),
'lightgoldenrodyellow': (250, 250, 210),
'lightgray': (211, 211, 211),
'lightgreen': (144, 238, 144),
'lightgrey': (211, 211, 211),
'lightpink': (255, 182, 193),
'lightsalmon': (255, 160, 122),
'lightseagreen': (32, 178, 170),
'lightskyblue': (135, 206, 250),
'lightslategray': (119, 136, 153),
'lightslategrey': (119, 136, 153),
'lightsteelblue': (176, 196, 222),
'lightyellow': (255, 255, 224),
'lime': (0, 255, 0),
'limegreen': (50, 205, 50),
'linen': (250, 240, 230),
'magenta': (255, 0, 255),
'maroon': (128, 0, 0),
'mediumaquamarine': (102, 205, 170),
'mediumblue': (0, 0, 205),
'mediumorchid': (186, 85, 211),
'mediumpurple': (147, 112, 219),
'mediumseagreen': (60, 179, 113),
'mediumslateblue': (123, 104, 238),
'mediumspringgreen': (0, 250, 154),
'mediumturquoise': (72, 209, 204),
'mediumvioletred': (199, 21, 133),
'midnightblue': (25, 25, 112),
'mintcream': (245, 255, 250),
'mistyrose': (255, 228, 225),
'moccasin': (255, 228, 181),
'navajowhite': (255, 222, 173),
'navy': (0, 0, 128),
'oldlace': (253, 245, 230),
'olive': (128, 128, 0),
'olivedrab': (107, 142, 35),
'orange': (255, 165, 0),
'orangered': (255, 69, 0),
'orchid': (218, 112, 214),
'palegoldenrod': (238, 232, 170),
'palegreen': (152, 251, 152),
'paleturquoise': (175, 238, 238),
'palevioletred': (219, 112, 147),
'papayawhip': (255, 239, 213),
'peachpuff': (255, 218, 185),
'peru': (205, 133, 63),
'pink': (255, 192, 203),
'plum': (221, 160, 221),
'powderblue': (176, 224, 230),
'purple': (128, 0, 128),
'red': (255, 0, 0),
'rosybrown': (188, 143, 143),
'royalblue': (65, 105, 225),
'saddlebrown': (139, 69, 19),
'salmon': (250, 128, 114),
'sandybrown': (244, 164, 96),
'seagreen': (46, 139, 87),
'seashell': (255, 245, 238),
'sienna': (160, 82, 45),
'silver': (192, 192, 192),
'skyblue': (135, 206, 235),
'slateblue': (106, 90, 205),
'slategray': (112, 128, 144),
'slategrey': (112, 128, 144),
'snow': (255, 250, 250),
'springgreen': (0, 255, 127),
'steelblue': (70, 130, 180),
'tan': (210, 180, 140),
'teal': (0, 128, 128),
'thistle': (216, 191, 216),
'tomato': (255, 99, 71),
'turquoise': (64, 224, 208),
'violet': (238, 130, 238),
'wheat': (245, 222, 179),
'white': (255, 255, 255),
'whitesmoke': (245, 245, 245),
'yellow': (255, 255, 0),
'yellowgreen': (154, 205, 50),
}
COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()}
|
samuelcolvin/pydantic
|
pydantic/color.py
|
Python
|
mit
| 16,607 | 0.001505 |
class Frame:
def __init__(self,width,height,color):
self.width = width
self.height = height
self.data = []
for h in range(height):
row = []
for w in range(width):
row.append(color)
self.data.append(row)
def clear(self,color):
for h in range(self.height):
for w in range(self.width):
self.data[h][w] = color
|
keyvank/pyglare
|
pyglare/image/frame.py
|
Python
|
mit
| 343 | 0.061224 |
# The Hazard Library
# Copyright (C) 2012-2017 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.const` defines various constants.
"""
class ConstantContainer(object):
"""
Class that doesn't support instantiation.
>>> ConstantContainer()
Traceback (most recent call last):
...
AssertionError: do not create objects ConstantContainer, \
use class properties instead
"""
def __init__(self):
raise AssertionError('do not create objects %s, '
'use class properties instead'
% type(self).__name__)
class TRT(ConstantContainer):
"""
Container for constants that define some of the common Tectonic Region
Types.
"""
# Constant values correspond to the NRML schema definition.
ACTIVE_SHALLOW_CRUST = 'Active Shallow Crust'
STABLE_CONTINENTAL = 'Stable Shallow Crust'
SUBDUCTION_INTERFACE = 'Subduction Interface'
SUBDUCTION_INTRASLAB = 'Subduction IntraSlab'
VOLCANIC = 'Volcanic'
GEOTHERMAL = 'Geothermal'
INDUCED = 'Induced'
class IMC(ConstantContainer):
"""
The intensity measure component is the component of interest
of ground shaking for an
:mod:`intensity measure <openquake.hazardlib.imt>`.
"""
#: The horizontal component.
HORIZONTAL = 'Horizontal'
#: The median horizontal component.
MEDIAN_HORIZONTAL = 'Median horizontal'
#: Usually defined as the geometric average of the maximum
#: of the two horizontal components (which may not occur
#: at the same time).
AVERAGE_HORIZONTAL = 'Average horizontal'
#: An orientation-independent alternative to :attr:`AVERAGE_HORIZONTAL`.
#: Defined at Boore et al. (2006, Bull. Seism. Soc. Am. 96, 1502-1511)
#: and is used for all the NGA GMPEs.
GMRotI50 = 'Average Horizontal (GMRotI50)'
#: The geometric mean of the records rotated into the most adverse
#: direction for the structure.
GMRotD100 = "Average Horizontal (GMRotD100)"
#: An orientation-independent alternative to :attr:`AVERAGE_HORIZONTAL`.
#: Defined at Boore et al. (2006, Bull. Seism. Soc. Am. 96, 1502-1511)
#: and is used for all the NGA GMPEs.
RotD50 = 'Average Horizontal (RotD50)'
#:
RotD100 = 'Horizontal Maximum Direction (RotD100)'
#: A randomly chosen horizontal component.
RANDOM_HORIZONTAL = 'Random horizontal'
#: The largest value obtained from two perpendicular horizontal
#: components.
GREATER_OF_TWO_HORIZONTAL = 'Greater of two horizontal'
#: The vertical component.
VERTICAL = 'Vertical'
#: "Vectorial addition: a_V = sqrt(max|a_1(t)|^2 + max|a_2(t)|^2)).
#: This means that the maximum ground amplitudes occur simultaneously on
#: the two horizontal components; this is a conservative assumption."
#: p. 53 of Douglas (2003, Earth-Sci. Rev. 61, 43-104)
VECTORIAL = 'Square root of sum of squares of peak horizontals'
#: "the peak square root of the sum of squares of two orthogonal
#: horizontal components in the time domain"
#: p. 880 of Kanno et al. (2006, Bull. Seism. Soc. Am. 96, 879-897)
PEAK_SRSS_HORIZONTAL = 'Peak square root of sum of squares of horizontals'
class StdDev(ConstantContainer):
"""
GSIM standard deviation represents ground shaking variability at a site.
"""
#: Standard deviation representing ground shaking variability
#: within different events.
INTER_EVENT = 'Inter event'
#: Standard deviation representing ground shaking variability
#: within a single event.
INTRA_EVENT = 'Intra event'
#: Total standard deviation, defined as the square root of the sum
#: of inter- and intra-event squared standard deviations, represents
#: the total ground shaking variability, and is the only one that
#: is used for calculating a probability of intensity exceedance
#: (see
#: :meth:`openquake.hazardlib.gsim.base.GroundShakingIntensityModel.get_poes`).
TOTAL = 'Total'
|
gem/oq-hazardlib
|
openquake/hazardlib/const.py
|
Python
|
agpl-3.0
| 4,670 | 0.000428 |
from sympy import symbols, diff, N, Matrix
import numpy as np
from task4 import get_euler_dt
X1, X2, X3 = symbols('X1 X2 X3')
def get_vorticity_tensor(eq1, eq2, eq3):
vkl = get_euler_dt(eq1, eq2, eq3)
wkl = 0.5*(vkl - np.transpose(vkl))
return N(Matrix(wkl), 2)
def get_vorticity_components(eq1, eq2, eq3):
wkl = get_vorticity_tensor(eq1, eq2, eq3) # Tuple, indexes from 0 to 8
w1 = wkl[7] - wkl[5]
w2 = wkl[6] - wkl[2]
w3 = wkl[3] - wkl[1]
return [w1, w2, w3]
#from testdata import eq1, eq2, eq3
#print(get_vorticity_tensor(eq1, eq2, eq3))
#print(get_vorticity_components(eq1, eq2, eq3))
|
toomastahves/math-pg
|
pkmkt2_code/task6.py
|
Python
|
unlicense
| 625 | 0.0096 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.