text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
'''
NetNS, network namespaces support
=================================
Pyroute2 provides basic network namespaces support. The core
class is `NetNS`.
Please be aware, that in order to run system calls the library
uses `ctypes` module. It can fail on platforms where SELinux
is enforced. If the Python interpreter, loading this module,
dumps the core, one can check the SELinux state with `getenforce`
command.
By default, NetNS creates requested netns, if it doesn't exist,
or uses existing one. To control this behaviour, one can use flags
as for `open(2)` system call::
# create a new netns or fail, if it already exists
netns = NetNS('test', flags=os.O_CREAT | os.O_EXIST)
# create a new netns or use existing one
netns = NetNS('test', flags=os.O_CREAT)
# the same as above, the default behaviour
netns = NetNS('test')
NetNS supports standard IPRoute API, so can be used instead of
IPRoute, e.g., in IPDB::
# start the main network settings database:
ipdb_main = IPDB()
# start the same for a netns:
ipdb_test = IPDB(nl=NetNS('test'))
# create VETH
ipdb_main.create(ifname='v0p0', kind='veth', peer='v0p1').commit()
# move peer VETH into the netns
with ipdb_main.interfaces.v0p1 as veth:
veth.net_ns_fd = 'test'
# please keep in mind, that netns move clears all the settings
# on a VETH interface pair, so one should run netns assignment
# as a separate operation only
# assign addresses
# please notice, that `v0p1` is already in the `test` netns,
# so should be accessed via `ipdb_test`
with ipdb_main.interfaces.v0p0 as veth:
veth.add_ip('172.16.200.1/24')
veth.up()
with ipdb_test.interfaces.v0p1 as veth:
veth.add_ip('172.16.200.2/24')
veth.up()
Please review also the test code, under `tests/test_netns.py` for
more examples.
To remove a network namespace, one can use one of two ways::
# The approach 1)
#
from pyroute2 import NetNS
netns = NetNS('test')
netns.close()
netns.remove()
# The approach 2)
#
from pyroute2.netns import remove
remove('test')
Using NetNS, one should stop it first with `close()`, and only after
that run `remove()`.
classes and functions
---------------------
'''
import os
import errno
import atexit
import select
import struct
import threading
import traceback
from socket import SOL_SOCKET
from socket import SO_RCVBUF
from pyroute2.config import MpPipe
from pyroute2.config import MpProcess
from pyroute2.iproute import IPRoute
from pyroute2.netlink.nlsocket import NetlinkMixin
from pyroute2.netlink.rtnl import IPRSocketMixin
from pyroute2.iproute import IPRouteMixin
from pyroute2.netns import setns
from pyroute2.netns import remove
def NetNServer(netns, rcvch, cmdch, flags=os.O_CREAT):
'''
The netns server supposed to be started automatically by NetNS.
It has two communication channels: one simplex to forward incoming
netlink packets, `rcvch`, and other synchronous duplex to get
commands and send back responses, `cmdch`.
Channels should support standard socket API, should be compatible
with poll/select and should be able to transparently pickle objects.
NetNS uses `multiprocessing.Pipe` for this purpose, but it can be
any other implementation with compatible API.
The first parameter, `netns`, is a netns name. Depending on the
`flags`, the netns can be created automatically. The `flags` semantics
is exactly the same as for `open(2)` system call.
...
The server workflow is simple. The startup sequence::
1. Create or open a netns.
2. Start `IPRoute` instance. It will be used only on the low level,
the `IPRoute` will not parse any packet.
3. Start poll/select loop on `cmdch` and `IPRoute`.
On the startup, the server sends via `cmdch` the status packet. It can be
`None` if all is OK, or some exception.
Further data handling, depending on the channel, server side::
1. `IPRoute`: read an incoming netlink packet and send it unmodified
to the peer via `rcvch`. The peer, polling `rcvch`, can handle
the packet on its side.
2. `cmdch`: read tuple (cmd, argv, kwarg). If the `cmd` starts with
"send", then take `argv[0]` as a packet buffer, treat it as one
netlink packet and substitute PID field (offset 12, uint32) with
its own. Strictly speaking, it is not mandatory for modern netlink
implementations, but it is required by the protocol standard.
'''
try:
nsfd = setns(netns, flags)
except OSError as e:
cmdch.send(e)
return e.errno
except Exception as e:
cmdch.send(OSError(errno.ECOMM, str(e), netns))
return 255
#
try:
ipr = IPRoute()
rcvch_lock = ipr._sproxy.lock
ipr._s_channel = rcvch
poll = select.poll()
poll.register(ipr, select.POLLIN | select.POLLPRI)
poll.register(cmdch, select.POLLIN | select.POLLPRI)
except Exception as e:
cmdch.send(e)
return 255
# all is OK so far
cmdch.send(None)
# 8<-------------------------------------------------------------
while True:
events = poll.poll()
for (fd, event) in events:
if fd == ipr.fileno():
bufsize = ipr.getsockopt(SOL_SOCKET, SO_RCVBUF) // 2
with rcvch_lock:
rcvch.send(ipr.recv(bufsize))
elif fd == cmdch.fileno():
try:
cmdline = cmdch.recv()
if cmdline is None:
poll.unregister(ipr)
poll.unregister(cmdch)
ipr.close()
os.close(nsfd)
return
(cmd, argv, kwarg) = cmdline
if cmd[:4] == 'send':
# Achtung
#
# It's a hack, but we just have to do it: one
# must use actual pid in netlink messages
#
# FIXME: there can be several messages in one
# call buffer; but right now we can ignore it
msg = argv[0][:12]
msg += struct.pack("I", os.getpid())
msg += argv[0][16:]
argv = list(argv)
argv[0] = msg
cmdch.send(getattr(ipr, cmd)(*argv, **kwarg))
except Exception as e:
e.tb = traceback.format_exc()
cmdch.send(e)
class NetNSProxy(object):
netns = 'default'
flags = os.O_CREAT
def __init__(self, *argv, **kwarg):
self.cmdlock = threading.Lock()
self.rcvch, rcvch = MpPipe()
self.cmdch, cmdch = MpPipe()
self.server = MpProcess(target=NetNServer,
args=(self.netns, rcvch, cmdch, self.flags))
self.server.start()
error = self.cmdch.recv()
if error is not None:
self.server.join()
raise error
else:
atexit.register(self.close)
def recv(self, bufsize, flags=0):
return self.rcvch.recv()
def close(self):
self.cmdch.send(None)
self.server.join()
def proxy(self, cmd, *argv, **kwarg):
with self.cmdlock:
self.cmdch.send((cmd, argv, kwarg))
response = self.cmdch.recv()
if isinstance(response, Exception):
raise response
return response
def fileno(self):
return self.rcvch.fileno()
def bind(self, *argv, **kwarg):
if 'async' in kwarg:
kwarg['async'] = False
return self.proxy('bind', *argv, **kwarg)
def send(self, *argv, **kwarg):
return self.proxy('send', *argv, **kwarg)
def sendto(self, *argv, **kwarg):
return self.proxy('sendto', *argv, **kwarg)
def getsockopt(self, *argv, **kwarg):
return self.proxy('getsockopt', *argv, **kwarg)
def setsockopt(self, *argv, **kwarg):
return self.proxy('setsockopt', *argv, **kwarg)
class NetNSocket(NetlinkMixin, NetNSProxy):
def bind(self, *argv, **kwarg):
return NetNSProxy.bind(self, *argv, **kwarg)
class NetNSIPR(IPRSocketMixin, NetNSocket):
pass
class NetNS(IPRouteMixin, NetNSIPR):
'''
NetNS is the IPRoute API with network namespace support.
**Why not IPRoute?**
The task to run netlink commands in some network namespace, being in
another network namespace, requires the architecture, that differs
too much from a simple Netlink socket.
NetNS starts a proxy process in a network namespace and uses
`multiprocessing` communication channels between the main and the proxy
processes to route all `recv()` and `sendto()` requests/responses.
**Any specific API calls?**
Nope. `NetNS` supports all the same, that `IPRoute` does, in the same
way. It provides full `socket`-compatible API and can be used in
poll/select as well.
The only difference is the `close()` call. In the case of `NetNS` it
is **mandatory** to close the socket before exit.
**NetNS and IPDB**
It is possible to run IPDB with NetNS::
from pyroute2 import NetNS
from pyroute2 import IPDB
ip = IPDB(nl=NetNS('somenetns'))
...
ip.release()
Do not forget to call `release()` when the work is done. It will shut
down `NetNS` instance as well.
'''
def __init__(self, netns, flags=os.O_CREAT):
self.netns = netns
self.flags = flags
super(NetNS, self).__init__()
# disconnect proxy services
self.sendto = self._sendto
self.recv = self._recv
self._sproxy = None
self._rproxy = None
def remove(self):
'''
Try to remove this network namespace from the system.
This call be be ran only after `NetNS.close()`, otherwise
it will fail.
'''
remove(self.netns)
| alexliyu/CDMSYSTEM | pyroute2/netns/nslink.py | Python | mit | 10,211 | 0 |
#!/usr/bin/env python3
# A simple script that connects to a server and displays block headers
import time
import asyncio
from electrum.network import Network
from electrum.util import print_msg, json_encode, create_and_start_event_loop, log_exceptions
# start network
loop, stopping_fut, loop_thread = create_and_start_event_loop()
network = Network()
network.start()
# wait until connected
while not network.is_connected():
time.sleep(1)
print_msg("waiting for network to get connected...")
header_queue = asyncio.Queue()
@log_exceptions
async def f():
try:
await network.interface.session.subscribe('blockchain.headers.subscribe', [], header_queue)
# 3. wait for results
while network.is_connected():
header = await header_queue.get()
print_msg(json_encode(header))
finally:
stopping_fut.set_result(1)
# 2. send the subscription
asyncio.run_coroutine_threadsafe(f(), loop)
| neocogent/electrum | electrum/scripts/block_headers.py | Python | mit | 955 | 0.003141 |
# imports from python libraries
import os
import hashlib
import datetime
import json
from itertools import chain # Using from_iterable()
# imports from installed packages
from django.contrib.auth.models import User
from django.db import models
from django_mongokit import connection
from django_mongokit import get_database
from django_mongokit.document import DjangoDocument
from mongokit import IS
from mongokit import OR
from mongokit import INDEX_ASCENDING, INDEX_DESCENDING
try:
from bson import ObjectId
except ImportError: # old pymongo
from pymongo.objectid import ObjectId
# imports from application folders/files
from gnowsys_ndf.settings import RCS_REPO_DIR
from gnowsys_ndf.settings import RCS_REPO_DIR_HASH_LEVEL
from gnowsys_ndf.settings import MARKUP_LANGUAGE
from gnowsys_ndf.settings import MARKDOWN_EXTENSIONS
from gnowsys_ndf.settings import GSTUDIO_GROUP_AGENCY_TYPES, GSTUDIO_AUTHOR_AGENCY_TYPES
from gnowsys_ndf.settings import META_TYPE
from gnowsys_ndf.ndf.rcslib import RCS
from django.dispatch import receiver
from registration.signals import user_registered
NODE_TYPE_CHOICES = (
('Nodes'),
('Attribute Types'),
('Attributes'),
('Relation Types'),
('Relations'),
('GSystem Types'),
('GSystems'),
('Node Specification'),
('Attribute Specification'),
('Relation Specification'),
('Intersection'),
('Complement'),
('Union'),
('Process Types'),
('Process')
)
TYPES_OF_GROUP = (
('ANONYMOUS'),
('PUBLIC'),
('PRIVATE')
)
EDIT_POLICY = (
('NON_EDITABLE'),
('EDITABLE_MODERATED'),
('EDITABLE_NON_MODERATED')
)
SUBSCRIPTION_POLICY = (
('OPEN'),
('BY_REQUEST'),
('BY_INVITATION'),
)
EXISTANCE_POLICY = (
('ANNOUNCED'),
('NOT_ANNOUNCED')
)
LIST_MEMBER_POLICY = (
('DISCLOSED_TO_MEM'),
('NOT_DISCLOSED_TO_MEM')
)
ENCRYPTION_POLICY = (
('ENCRYPTED'),
('NOT_ENCRYPTED')
)
DATA_TYPE_CHOICES = (
"None",
"bool",
"basestring",
"unicode",
"int",
"float",
"long",
"datetime.datetime",
"list",
"dict",
"ObjectId",
"IS()"
)
my_doc_requirement = u'storing_orignal_doc'
reduced_doc_requirement = u'storing_reduced_doc'
to_reduce_doc_requirement = u'storing_to_be_reduced_doc'
indexed_word_list_requirement = u'storing_indexed_words'
# CUSTOM DATA-TYPE DEFINITIONS
STATUS_CHOICES_TU = IS(u'DRAFT', u'HIDDEN', u'PUBLISHED', u'DELETED')
STATUS_CHOICES = tuple(str(qtc) for qtc in STATUS_CHOICES_TU)
QUIZ_TYPE_CHOICES_TU = IS(u'Short-Response', u'Single-Choice', u'Multiple-Choice')
QUIZ_TYPE_CHOICES = tuple(str(qtc) for qtc in QUIZ_TYPE_CHOICES_TU)
# FRAME CLASS DEFINITIONS
@receiver(user_registered)
def user_registered_handler(sender, user, request, **kwargs):
tmp_hold = node_collection.collection.node_holder()
dict_to_hold = {}
dict_to_hold['node_type'] = 'Author'
dict_to_hold['userid'] = user.id
agency_type = request.POST.get("agency_type", "")
if agency_type:
dict_to_hold['agency_type'] = agency_type
else:
# Set default value for agency_type as "Other"
dict_to_hold['agency_type'] = "Other"
dict_to_hold['group_affiliation'] = request.POST.get("group_affiliation", "")
tmp_hold.details_to_hold = dict_to_hold
tmp_hold.save()
return
@connection.register
class Node(DjangoDocument):
'''Everything is a Node. Other classes should inherit this Node class.
According to the specification of GNOWSYS, all nodes, including
types, metatypes and members of types, edges of nodes, should all
be Nodes.
Member of this class must belong to one of the NODE_TYPE_CHOICES.
Some in-built Edge names (Relation types) are defined in this
class: type_of, member_of, prior_node, post_node, collection_set,
group_set.
type_of is used to express generalization of Node. And member_of
to express its type. This type_of should not be confused with
_type. The latter expresses the Python classes defined in this
program that the object inherits. The former (type_of) is about
the data the application represents.
_type is useful in seggregating the nodes from the mongodb
collection, where all nodes are stored.
prior_node is to express that the current node depends in some way
to another node/s. post_node is seldom used. Currently we use it
to define sub-Group, and to set replies to a post in the Forum App.
Nodes are publisehed in one group or another, or in more than one
group. The groups in which a node is publisehed is expressed in
group_set.
'''
objects = models.Manager()
collection_name = 'Nodes'
structure = {
'_type': unicode, # check required: required field, Possible
# values are to be taken only from the list
# NODE_TYPE_CHOICES
'name': unicode,
'altnames': unicode,
'plural': unicode,
'prior_node': [ObjectId],
'post_node': [ObjectId],
'language': unicode,
'type_of': [ObjectId], # check required: only ObjectIDs of GSystemType
'member_of': [ObjectId], # check required: only ObjectIDs of
# GSystemType for GSystems, or only
# ObjectIDs of MetaTypes for
# GSystemTypes
'access_policy': unicode, # check required: only possible
# values are Public or Private. Why
# is this unicode?
'created_at': datetime.datetime,
'created_by': int, # test required: only ids of Users
'last_update': datetime.datetime,
'modified_by': int, # test required: only ids of Users
'contributors': [int], # test required: set of all ids of
# Users of created_by and modified_by
# fields
'location': [dict], # check required: this dict should be a
# valid GeoJason format
'content': unicode,
'content_org': unicode,
'group_set': [ObjectId], # check required: should not be
# empty. For type nodes it should be
# set to a Factory Group called
# Administration
'collection_set': [ObjectId], # check required: to exclude
# parent nodes as children, use
# MPTT logic
'property_order': [], # Determines the order & grouping in
# which attribute(s)/relation(s) are
# displayed in the form
'start_publication': datetime.datetime,
'tags': [unicode],
'featured': bool,
'url': unicode,
'comment_enabled': bool,
'login_required': bool,
# 'password': basestring,
'status': STATUS_CHOICES_TU,
'rating':[{'score':int,
'user_id':int,
'ip_address':basestring}]
}
required_fields = ['name', '_type'] # 'group_set' to be included
# here after the default
# 'Administration' group is
# ready.
default_values = {'created_at': datetime.datetime.utcnow, 'status': u'DRAFT'}
use_dot_notation = True
########## Setter(@x.setter) & Getter(@property) ##########
@property
def user_details_dict(self):
"""Retrieves names of created-by & modified-by users from the given
node, and appends those to 'user_details' dict-variable
"""
user_details = {}
if self.created_by:
user_details['created_by'] = User.objects.get(pk=self.created_by).username
contributor_names = []
for each_pk in self.contributors:
contributor_names.append(User.objects.get(pk=each_pk).username)
# user_details['modified_by'] = contributor_names
user_details['contributors'] = contributor_names
if self.modified_by:
user_details['modified_by'] = User.objects.get(pk=self.modified_by).username
return user_details
@property
def member_of_names_list(self):
"""Returns a list having names of each member (GSystemType, i.e Page,
File, etc.), built from 'member_of' field (list of ObjectIds)
"""
member_of_names = []
if self.member_of:
for each_member_id in self.member_of:
if type(each_member_id) == ObjectId:
_id = each_member_id
else:
_id = each_member_id['$oid']
if _id:
mem = node_collection.one({'_id': ObjectId(_id)})
if mem:
member_of_names.append(mem.name)
else:
if "gsystem_type" in self:
for each_member_id in self.gsystem_type:
if type(each_member_id) == ObjectId:
_id = each_member_id
else:
_id = each_member_id['$oid']
if _id:
mem = node_collection.one({'_id': ObjectId(_id)})
if mem:
member_of_names.append(mem.name)
return member_of_names
@property
def prior_node_dict(self):
"""Returns a dictionary consisting of key-value pair as
ObjectId-Document pair respectively for prior_node objects of
the given node.
"""
obj_dict = {}
i = 0
for each_id in self.prior_node:
i = i + 1
if each_id != self._id:
node_collection_object = node_collection.one({"_id": ObjectId(each_id)})
dict_key = i
dict_value = node_collection_object
obj_dict[dict_key] = dict_value
return obj_dict
@property
def collection_dict(self):
"""Returns a dictionary consisting of key-value pair as
ObjectId-Document pair respectively for collection_set objects
of the given node.
"""
obj_dict = {}
i = 0;
for each_id in self.collection_set:
i = i + 1
if each_id != self._id:
node_collection_object = node_collection.one({"_id": ObjectId(each_id)})
dict_key = i
dict_value = node_collection_object
obj_dict[dict_key] = dict_value
return obj_dict
@property
def html_content(self):
"""Returns the content in proper html-format.
"""
if MARKUP_LANGUAGE == 'markdown':
return markdown(self.content, MARKDOWN_EXTENSIONS)
elif MARKUP_LANGUAGE == 'textile':
return textile(self.content)
elif MARKUP_LANGUAGE == 'restructuredtext':
return restructuredtext(self.content)
return self.content
@property
def current_version(self):
history_manager = HistoryManager()
return history_manager.get_current_version(self)
@property
def version_dict(self):
"""Returns a dictionary containing list of revision numbers of the
given node.
Example:
{
"1": "1.1",
"2": "1.2",
"3": "1.3",
}
"""
history_manager = HistoryManager()
return history_manager.get_version_dict(self)
########## Built-in Functions (Overridden) ##########
def __unicode__(self):
return self._id
def identity(self):
return self.__unicode__()
def save(self, *args, **kwargs):
if "is_changed" in kwargs:
if not kwargs["is_changed"]:
#print "\n ", self.name, "(", self._id, ") -- Nothing has changed !\n\n"
return
is_new = False
if not "_id" in self:
is_new = True # It's a new document, hence yet no ID!"
# On save, set "created_at" to current date
self.created_at = datetime.datetime.today()
self.last_update = datetime.datetime.today()
# Check the fields which are not present in the class
# structure, whether do they exists in their GSystemType's
# "attribute_type_set"; If exists, add them to the document
# Otherwise, throw an error -- " Illegal access: Invalid field
# found!!! "
for key, value in self.iteritems():
if key == '_id':
continue
if not (key in self.structure):
field_found = False
for gst_id in self.member_of:
attribute_set_list = node_collection.one({'_id': gst_id}).attribute_type_set
for attribute in attribute_set_list:
if key == attribute['name']:
field_found = True
# TODO: Check whether type of "value"
# matches with that of
# "attribute['data_type']" Don't continue
# searching from list of remaining
# attributes
break
if field_found:
# Don't continue searching from list of
# remaining gsystem-types
break
if not field_found:
print "\n Invalid field(", key, ") found!!!\n"
# Throw an error: " Illegal access: Invalid field
# found!!! "
super(Node, self).save(*args, **kwargs)
#This is the save method of the node class.It is still not
#known on which objects is this save method applicable We
#still do not know if this save method is called for the
#classes which extend the Node Class or for every class There
#is a very high probability that it is called for classes
#which extend the Node Class only The classes which we have
#i.e. the MyReduce() and ToReduce() class do not extend from
#the node class Hence calling the save method on those objects
#should not create a recursive function
#If it is a new document then Make a new object of ToReduce
#class and the id of this document to that object else Check
#whether there is already an object of ToReduce() with the id
#of this object. If there is an object present pass else add
#that object I have not applied the above algorithm
#Instead what I have done is that I have searched the
#ToReduce() collection class and searched whether the ID of
#this document is present or not. If the id is not present
#then add that id.If it is present then do not add that id
old_doc = node_collection.collection.ToReduceDocs.find_one({'required_for':to_reduce_doc_requirement,'doc_id':self._id})
#print "~~~~~~~~~~~~~~~~~~~~It is not present in the ToReduce() class collection.Message Coming from save() method ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~",self._id
if not old_doc:
z = node_collection.collection.ToReduceDocs()
z.doc_id = self._id
z.required_for = to_reduce_doc_requirement
z.save()
#If you create/edit anything then this code shall add it in the URL
history_manager = HistoryManager()
rcs_obj = RCS()
if is_new:
# Create history-version-file
try:
if history_manager.create_or_replace_json_file(self):
fp = history_manager.get_file_path(self)
user = User.objects.get(pk=self.created_by).username
message = "This document (" + self.name + ") is created by " + user + " on " + self.created_at.strftime("%d %B %Y")
rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")
except Exception as err:
print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be created!!!\n"
node_collection.collection.remove({'_id': self._id})
raise RuntimeError(err)
else:
# Update history-version-file
fp = history_manager.get_file_path(self)
try:
rcs_obj.checkout(fp)
except Exception as err:
try:
if history_manager.create_or_replace_json_file(self):
fp = history_manager.get_file_path(self)
user = User.objects.get(pk=self.created_by).username
message = "This document (" + self.name + ") is re-created by " + user + " on " + self.created_at.strftime("%d %B %Y")
rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")
except Exception as err:
print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be re-created!!!\n"
node_collection.collection.remove({'_id': self._id})
raise RuntimeError(err)
try:
if history_manager.create_or_replace_json_file(self):
user = User.objects.get(pk=self.modified_by).username
message = "This document (" + self.name + ") is lastly updated by " + user + " status:" + self.status + " on " + self.last_update.strftime("%d %B %Y")
rcs_obj.checkin(fp, 1, message.encode('utf-8'))
except Exception as err:
print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be updated!!!\n"
raise RuntimeError(err)
# User-Defined Functions
def get_possible_attributes(self, gsystem_type_id_or_list):
"""Returns user-defined attribute(s) of given node which belongs to
either given single/list of GType(s).
Keyword arguments: gsystem_type_id_or_list -- Single/List of
ObjectId(s) of GSystemTypes' to which the given node (self)
belongs
If node (self) has '_id' -- Node is created; indicating
possible attributes needs to be searched under GAttribute
collection & return value of those attributes (previously
existing) as part of the list along with attribute-data_type
Else -- Node needs to be created; indicating possible
attributes needs to be searched under AttributeType collection
& return default value 'None' of those attributes as part of
the list along with attribute-data_type
Returns: Dictionary that holds follwoing details:- Key -- Name
of the attribute Value, which inturn is a dictionary that
holds key and values as shown below:
{ 'attribute-type-name': { 'altnames': Value of AttributeType
node's altnames field, 'data_type': Value of AttributeType
node's data_type field, 'object_value': Value of GAttribute
node's object_value field } }
"""
gsystem_type_list = []
possible_attributes = {}
# Converts to list, if passed parameter is only single ObjectId
if not isinstance(gsystem_type_id_or_list, list):
gsystem_type_list = [gsystem_type_id_or_list]
else:
gsystem_type_list = gsystem_type_id_or_list
# Code for finding out attributes associated with each gsystem_type_id in the list
for gsystem_type_id in gsystem_type_list:
# Converts string representaion of ObjectId to it's corresponding ObjectId type, if found
if not isinstance(gsystem_type_id, ObjectId):
if ObjectId.is_valid(gsystem_type_id):
gsystem_type_id = ObjectId(gsystem_type_id)
else:
error_message = "\n ObjectIdError: Invalid ObjectId (" + gsystem_type_id + ") found while finding attributes !!!\n"
raise Exception(error_message)
# Case [A]: While editing GSystem
# Checking in Gattribute collection - to collect user-defined attributes' values, if already set!
if "_id" in self:
# If - node has key '_id'
attributes = triple_collection.find({'_type': "GAttribute", 'subject': self._id})
for attr_obj in attributes:
# attr_obj is of type - GAttribute [subject (node._id), attribute_type (AttributeType), object_value (value of attribute)]
# Must convert attr_obj.attribute_type [dictionary] to node_collection(attr_obj.attribute_type) [document-object]
AttributeType.append_attribute(node_collection.collection.AttributeType(attr_obj.attribute_type), possible_attributes, attr_obj.object_value)
# Case [B]: While creating GSystem / if new attributes get added
# Again checking in AttributeType collection - because to collect newly added user-defined attributes, if any!
attributes = node_collection.find({'_type': 'AttributeType', 'subject_type': gsystem_type_id})
for attr_type in attributes:
# Here attr_type is of type -- AttributeType
AttributeType.append_attribute(attr_type, possible_attributes)
# type_of check for current GSystemType to which the node belongs to
gsystem_type_node = node_collection.one({'_id': gsystem_type_id}, {'name': 1, 'type_of': 1})
if gsystem_type_node.type_of:
attributes = node_collection.find({'_type': 'AttributeType', 'subject_type': {'$in': gsystem_type_node.type_of}})
for attr_type in attributes:
# Here attr_type is of type -- AttributeType
AttributeType.append_attribute(attr_type, possible_attributes)
return possible_attributes
def get_possible_relations(self, gsystem_type_id_or_list):
"""Returns relation(s) of given node which belongs to either given
single/list of GType(s).
Keyword arguments: gsystem_type_id_or_list -- Single/List of
ObjectId(s) of GTypes' to which the given node (self) belongs
If node (self) has '_id' -- Node is created; indicating
possible relations need to be searched under GRelation
collection & return value of those relations (previously
existing) as part of the dict along with relation-type details
('object_type' and 'inverse_name')
Else -- Node needs to be created; indicating possible
relations need to be searched under RelationType collection &
return default value 'None' for those relations as part of the
dict along with relation-type details ('object_type' and
'inverse_name')
Returns: Dictionary that holds details as follows:- Key --
Name of the relation Value -- It's again a dictionary that
holds key and values as shown below:
{ // If inverse_relation - False 'relation-type-name': {
'altnames': Value of RelationType node's altnames field [0th
index-element], 'subject_or_object_type': Value of
RelationType node's object_type field, 'inverse_name': Value
of RelationType node's inverse_name field,
'subject_or_right_subject_list': List of Value(s) of GRelation
node's right_subject field }
// If inverse_relation - True 'relation-type-name': {
'altnames': Value of RelationType node's altnames field [1st
index-element], 'subject_or_object_type': Value of
RelationType node's subject_type field, 'inverse_name':
Value of RelationType node's name field,
'subject_or_right_subject_list': List of Value(s) of
GRelation node's subject field } }
"""
gsystem_type_list = []
possible_relations = {}
# Converts to list, if passed parameter is only single ObjectId
if not isinstance(gsystem_type_id_or_list, list):
gsystem_type_list = [gsystem_type_id_or_list]
else:
gsystem_type_list = gsystem_type_id_or_list
# Code for finding out relations associated with each gsystem_type_id in the list
for gsystem_type_id in gsystem_type_list:
# Converts string representaion of ObjectId to it's corresponding ObjectId type, if found
if not isinstance(gsystem_type_id, ObjectId):
if ObjectId.is_valid(gsystem_type_id):
gsystem_type_id = ObjectId(gsystem_type_id)
else:
error_message = "\n ObjectIdError: Invalid ObjectId (" + gsystem_type_id + ") found while finding relations !!!\n"
raise Exception(error_message)
# Relation
inverse_relation = False
# Case - While editing GSystem Checking in GRelation
# collection - to collect relations' values, if already
# set!
if "_id" in self:
# If - node has key '_id'
relations = triple_collection.find({'_type': "GRelation", 'subject': self._id, 'status': u"PUBLISHED"})
for rel_obj in relations:
# rel_obj is of type - GRelation
# [subject(node._id), relation_type(RelationType),
# right_subject(value of related object)] Must
# convert rel_obj.relation_type [dictionary] to
# collection.Node(rel_obj.relation_type)
# [document-object]
RelationType.append_relation(
node_collection.collection.RelationType(rel_obj.relation_type),
possible_relations, inverse_relation, rel_obj.right_subject
)
# Case - While creating GSystem / if new relations get
# added Checking in RelationType collection - because to
# collect newly added user-defined relations, if any!
relations = node_collection.find({'_type': 'RelationType', 'subject_type': gsystem_type_id})
for rel_type in relations:
# Here rel_type is of type -- RelationType
RelationType.append_relation(rel_type, possible_relations, inverse_relation)
# type_of check for current GSystemType to which the node
# belongs to
gsystem_type_node = node_collection.one({'_id': gsystem_type_id}, {'name': 1, 'type_of': 1})
if gsystem_type_node.type_of:
relations = node_collection.find({'_type': 'RelationType', 'subject_type': {'$in': gsystem_type_node.type_of}})
for rel_type in relations:
# Here rel_type is of type -- RelationType
RelationType.append_relation(rel_type, possible_relations, inverse_relation)
# Inverse-Relation
inverse_relation = True
# Case - While editing GSystem Checking in GRelation
# collection - to collect inverse-relations' values, if
# already set!
if "_id" in self:
# If - node has key '_id'
relations = triple_collection.find({'_type': "GRelation", 'right_subject': self._id, 'status': u"PUBLISHED"})
for rel_obj in relations:
# rel_obj is of type - GRelation
# [subject(node._id), relation_type(RelationType),
# right_subject(value of related object)] Must
# convert rel_obj.relation_type [dictionary] to
# collection.Node(rel_obj.relation_type)
# [document-object]
if META_TYPE[4] in rel_obj.relation_type.member_of_names_list:
# We are not handling inverse relation processing for
# Triadic relationship(s)
continue
RelationType.append_relation(
node_collection.collection.RelationType(rel_obj.relation_type),
possible_relations, inverse_relation, rel_obj.subject
)
# Case - While creating GSystem / if new relations get
# added Checking in RelationType collection - because to
# collect newly added user-defined relations, if any!
relations = node_collection.find({'_type': 'RelationType', 'object_type': gsystem_type_id})
for rel_type in relations:
# Here rel_type is of type -- RelationType
RelationType.append_relation(rel_type, possible_relations, inverse_relation)
# type_of check for current GSystemType to which the node
# belongs to
gsystem_type_node = node_collection.one({'_id': gsystem_type_id}, {'name': 1, 'type_of': 1})
if gsystem_type_node.type_of:
relations = node_collection.find({'_type': 'RelationType', 'object_type': {'$in': gsystem_type_node.type_of}})
for rel_type in relations:
# Here rel_type is of type -- RelationType
RelationType.append_relation(rel_type, possible_relations, inverse_relation)
return possible_relations
def get_neighbourhood(self, member_of):
"""Attaches attributes and relations of the node to itself;
i.e. key's types to it's structure and key's values to itself
"""
attributes = self.get_possible_attributes(member_of)
for key, value in attributes.iteritems():
self.structure[key] = value['data_type']
self[key] = value['object_value']
relations = self.get_possible_relations(member_of)
for key, value in relations.iteritems():
self.structure[key] = value['subject_or_object_type']
self[key] = value['subject_or_right_subject_list']
@connection.register
class AttributeType(Node):
'''To define reusable properties that can be set as possible
attributes to a GSystemType. A set of possible properties defines
a GSystemType.
'''
structure = {
'data_type': basestring, # check required: only of the DATA_TYPE_CHOICES
'complex_data_type': [unicode], # can be a list or a dictionary
'subject_type': [ObjectId], # check required: only one of Type
# Nodes. GSystems cannot be set as
# subject_types
'applicable_node_type': [basestring], # can be one or more
# than one of
# NODE_TYPE_CHOICES
'verbose_name': basestring,
'null': bool,
'blank': bool,
'help_text': unicode,
'max_digits': int, # applicable if the datatype is a number
'decimal_places': int, # applicable if the datatype is a float
'auto_now': bool,
'auto_now_add': bool,
'upload_to': unicode,
'path': unicode,
'verify_exist': bool,
'min_length': int,
'required': bool,
'label': unicode,
'unique': bool,
'validators': list,
'default': unicode,
'editable': bool
}
required_fields = ['data_type', 'subject_type']
use_dot_notation = True
########## User-Defined Functions ##########
@staticmethod
def append_attribute(attr_id_or_node, attr_dict, attr_value=None, inner_attr_dict=None):
if isinstance(attr_id_or_node, unicode):
# Convert unicode representation of ObjectId into it's
# corresponding ObjectId type Then fetch
# attribute-type-node from AttributeType collection of
# respective ObjectId
if ObjectId.is_valid(attr_id_or_node):
attr_id_or_node = node_collection.one({'_type': 'AttributeType', '_id': ObjectId(attr_id_or_node)})
else:
print "\n Invalid ObjectId: ", attr_id_or_node, " is not a valid ObjectId!!!\n"
# Throw indicating the same
if not attr_id_or_node.complex_data_type:
# Code for simple data-type Simple data-types: int, float,
# ObjectId, list, dict, basestring, unicode
if inner_attr_dict is not None:
# If inner_attr_dict exists It means node should ne
# added to this inner_attr_dict and not to attr_dict
if not (attr_id_or_node.name in inner_attr_dict):
# If inner_attr_dict[attr_id_or_node.name] key
# doesn't exists, then only add it!
if attr_value is None:
inner_attr_dict[attr_id_or_node.name] = {
'altnames': attr_id_or_node.altnames, '_id': attr_id_or_node._id,
'data_type': eval(attr_id_or_node.data_type),
'object_value': attr_value
}
else:
inner_attr_dict[attr_id_or_node.name] = {
'altnames': attr_id_or_node.altnames, '_id': attr_id_or_node._id,
'data_type': eval(attr_id_or_node.data_type),
'object_value': attr_value[attr_id_or_node.name]
}
if attr_id_or_node.name in attr_dict:
# If this attribute-node exists in outer
# attr_dict, then remove it
del attr_dict[attr_id_or_node.name]
else:
# If inner_attr_dict is None
if not (attr_id_or_node.name in attr_dict):
# If attr_dict[attr_id_or_node.name] key doesn't
# exists, then only add it!
attr_dict[attr_id_or_node.name] = {
'altnames': attr_id_or_node.altnames, '_id': attr_id_or_node._id,
'data_type': eval(attr_id_or_node.data_type),
'object_value': attr_value
}
else:
# Code for complex data-type
# Complex data-types: [...], {...}
if attr_id_or_node.data_type == "dict":
if not (attr_id_or_node.name in attr_dict):
inner_attr_dict = {}
for c_attr_id in attr_id_or_node.complex_data_type:
# NOTE: Here c_attr_id is in unicode format
# Hence, this function first converts attr_id
# to ObjectId format if unicode found
AttributeType.append_attribute(c_attr_id, attr_dict, attr_value, inner_attr_dict)
attr_dict[attr_id_or_node.name] = inner_attr_dict
else:
for remove_attr_name in attr_dict[attr_id_or_node.name].iterkeys():
if remove_attr_name in attr_dict:
# If this attribute-node exists in outer
# attr_dict, then remove it
del attr_dict[remove_attr_name]
elif attr_id_or_node.data_type == "list":
if len(attr_id_or_node.complex_data_type) == 1:
# Represents list of simple data-types
# Ex: [int], [ObjectId], etc.
dt = unicode("[" + attr_id_or_node.complex_data_type[0] + "]")
if not (attr_id_or_node.name in attr_dict):
# If attr_dict[attr_id_or_node.name] key
# doesn't exists, then only add it!
attr_dict[attr_id_or_node.name] = {
'altnames': attr_id_or_node.altnames, '_id': attr_id_or_node._id,
'data_type': eval(dt),
'object_value': attr_value
}
else:
# Represents list of complex data-types Ex:
# [{...}]
for c_attr_id in attr_id_or_node.complex_data_type:
if not ObjectId.is_valid(c_attr_id):
# If basic data-type values are found,
# pass the iteration
continue
# If unicode representation of ObjectId is
# found
AttributeType.append_attribute(c_attr_id, attr_dict, attr_value)
elif attr_id_or_node.data_type == "IS()":
# Below code does little formatting, for example:
# data_type: "IS()" complex_value: [u"ab", u"cd"] dt:
# "IS(u'ab', u'cd')"
dt = "IS("
for v in attr_id_or_node.complex_data_type:
dt = dt + "u'" + v + "'" + ", "
dt = dt[:(dt.rfind(", "))] + ")"
if not (attr_id_or_node.name in attr_dict):
# If attr_dict[attr_id_or_node.name] key doesn't
# exists, then only add it!
attr_dict[attr_id_or_node.name] = {
'altnames': attr_id_or_node.altnames, '_id': attr_id_or_node._id,
'data_type': eval(dt),
'object_value': attr_value
}
@connection.register
class RelationType(Node):
structure = {
'inverse_name': unicode,
'subject_type': [ObjectId], # ObjectId's of Any Class
'object_type': [OR(ObjectId, list)], # ObjectId's of Any Class
'subject_cardinality': int,
'object_cardinality': int,
'subject_applicable_nodetype': basestring, # NODE_TYPE_CHOICES [default (GST)]
'object_applicable_nodetype': basestring,
'slug': basestring,
'is_symmetric': bool,
'is_reflexive': bool,
'is_transitive': bool
}
required_fields = ['inverse_name', 'subject_type', 'object_type']
use_dot_notation = True
# User-Defined Functions ##########
@staticmethod
def append_relation(
rel_type_node, rel_dict, inverse_relation, left_or_right_subject=None
):
"""Appends details of a relation in format described below.
Keyword arguments:
rel_type_node -- Document of RelationType
node rel_dict -- Dictionary to which relation-details are
appended inverse_relation -- Boolean variable that indicates
whether appending an relation or inverse-relation
left_or_right_subject -- Actual value of related-subjects
(only if provided, otherwise by default it's None)
Returns: Dictionary that holds details as follows: Key -- Name
of the relation Value -- It's again a dictionary that holds
key and values as shown below: { // If inverse_relation -
False 'relation-type-name': { 'altnames': Value of
RelationType node's altnames field [0th index-element],
'subject_or_object_type': Value of RelationType node's
object_type field, 'inverse_name': Value of RelationType
node's inverse_name field, 'subject_or_right_subject_list':
List of Value(s) of GRelation node's right_subject field }
// If inverse_relation - True 'relation-type-name': {
'altnames': Value of RelationType node's altnames field [1st
index-element], 'subject_or_object_type': Value of
RelationType node's subject_type field, 'inverse_name':
Value of RelationType node's name field,
'subject_or_right_subject_list': List of Value(s) of
GRelation node's subject field } }
"""
left_or_right_subject_node = None
if left_or_right_subject:
if META_TYPE[3] in rel_type_node.member_of_names_list:
# If Binary relationship found
left_or_right_subject_node = node_collection.one({
'_id': left_or_right_subject
})
else:
left_or_right_subject_node = []
for each in left_or_right_subject:
each_node = node_collection.one({
'_id': each
})
left_or_right_subject_node.append(each_node)
if not left_or_right_subject_node:
error_message = "\n AppendRelationError: Right subject with " \
+ "this ObjectId(" + str(left_or_right_subject) + ") " \
+ "doesn't exists !!!"
raise Exception(error_message)
rel_name = ""
opp_rel_name = ""
alt_names = ""
subject_or_object_type = None
if inverse_relation:
# inverse_relation = True
# Means looking from object type
# relation-type's name & inverse-name will be swapped
rel_name = rel_type_node.inverse_name
opp_rel_name = rel_type_node.name
if rel_type_node.altnames:
if ";" in rel_type_node.altnames:
alt_names = rel_type_node.altnames.split(";")[1]
else:
alt_names = u""
subject_or_object_type = rel_type_node.subject_type
else:
# inverse_relation = False
# Means looking from subject type
# relation-type's name & inverse-name will be as it is
rel_name = rel_type_node.name
opp_rel_name = rel_type_node.inverse_name
if rel_type_node.altnames:
if ";" in rel_type_node.altnames:
alt_names = rel_type_node.altnames.split(";")[0]
else:
alt_names = u""
subject_or_object_type = rel_type_node.object_type
if not (rel_name in rel_dict):
subject_or_right_subject_list = [left_or_right_subject_node] if left_or_right_subject_node else []
rel_dict[rel_name] = {
'altnames': alt_names,
'subject_or_object_type': subject_or_object_type,
'inverse_name': opp_rel_name,
'subject_or_right_subject_list': subject_or_right_subject_list
}
else:
subject_or_right_subject_list = rel_dict[rel_name]["subject_or_right_subject_list"] if rel_dict[rel_name]["subject_or_right_subject_list"] else []
if left_or_right_subject_node:
if not (left_or_right_subject_node in subject_or_right_subject_list):
subject_or_right_subject_list.append(left_or_right_subject_node)
rel_dict[rel_name]["subject_or_right_subject_list"] = subject_or_right_subject_list
rel_dict[rel_name]["_id"] = rel_type_node._id
return rel_dict
@connection.register
class MetaType(Node):
"""MetaType class: Its members are any of GSystemType, AttributeType,
RelationType, ProcessType.
It is used to express the NodeTypes that are part of an
Application developed using GNOWSYS-Studio. E.g, a GSystemType
'Page' or 'File' become applications by expressing them as members
of a MetaType, 'GAPP'.
"""
structure = {
'description': basestring, # Description (name)
'attribute_type_set': [AttributeType], # Embed list of Attribute Type Class as Documents
'relation_type_set': [RelationType], # Holds list of Relation Types
'parent': ObjectId # Foreign key to self
}
use_dot_notation = True
class ProcessType(Node):
"""A kind of nodetype for defining processes or events or temporal
objects involving change.
"""
structure = {
'changing_attributetype_set': [AttributeType], # List of Attribute Types
'changing_relationtype_set': [RelationType] # List of Relation Types
}
use_dot_notation = True
# user should have a list of groups attributeType added should
# automatically be added to the attribute_type_set of GSystemType
@connection.register
class GSystemType(Node):
"""Class to generalize GSystems
"""
structure = {
'meta_type_set': [MetaType], # List of Metatypes
'attribute_type_set': [AttributeType], # Embed list of Attribute Type Class as Documents
'relation_type_set': [RelationType], # Holds list of Relation Types
'process_type_set': [ProcessType], # List of Process Types
'property_order': [] # List of user-defined attributes in template-view order
}
use_dot_notation = True
use_autorefs = True # To support Embedding of Documents
@connection.register
class GSystem(Node):
"""GSystemType instance
"""
use_schemaless = True
structure = {
'attribute_set': [dict], # ObjectIds of GAttributes
'relation_set': [dict], # ObjectIds of GRelations
'module_set': [dict], # Holds the ObjectId & SnapshotID (version_number) of collection elements
# along with their sub-collection elemnts too
'author_set': [int], # List of Authors
'annotations': [dict], # List of json files for annotations on the page
'license': basestring # contains license/s in string format
}
use_dot_notation = True
@connection.register
class File(GSystem):
"""File class to hold any resource
"""
structure = {
'mime_type': basestring, # Holds the type of file
'fs_file_ids': [ObjectId], # Holds the List of ids of file stored in gridfs
'file_size': {
'size': float,
'unit': unicode
} # dict used to hold file size in int and unit palace in term of KB,MB,GB
}
gridfs = {
'containers': ['files']
}
use_dot_notation = True
@connection.register
class Group(GSystem):
"""Group class to create collection (group) of members
"""
structure = {
'group_type': basestring, # Types of groups - Anonymous, public or private
'edit_policy': basestring, # Editing policy of the group - non editable,editable moderated or editable non-moderated
'subscription_policy': basestring, # Subscription policy to this group - open, by invitation, by request
'visibility_policy': basestring, # Existance of the group - announced or not announced
'disclosure_policy': basestring, # Members of this group - disclosed or not
'encryption_policy': basestring, # Encryption - yes or no
'agency_type': basestring, # A choice field such as Pratner,Govt.Agency, NGO etc.
'group_admin': [int], # ObjectId of Author class
'moderation_level': int # range from 0 till any integer level
}
use_dot_notation = True
default_values = {'moderation_level': -1}
validators = {
'group_type': lambda x: x in TYPES_OF_GROUP,
'edit_policy': lambda x: x in EDIT_POLICY,
'subscription_policy': lambda x: x in SUBSCRIPTION_POLICY,
'visibility_policy': lambda x: x in EXISTANCE_POLICY,
'disclosure_policy': lambda x: x in LIST_MEMBER_POLICY,
'encryption_policy': lambda x: x in ENCRYPTION_POLICY,
'agency_type': lambda x: x in GSTUDIO_GROUP_AGENCY_TYPES,
# 'name': lambda x: x not in \
# [ group_obj['name'] for group_obj in \
# node_collection.find({'_type': 'Group'}, {'name': 1, '_id': 0})]
}
def is_gstaff(self, user):
"""
Checks whether given user belongs to GStaff.
GStaff includes only the following users of a group:
1) Super-user (Django's superuser)
2) Creator of the group (created_by field)
3) Admin-user of the group (group_admin field)
Other memebrs (author_set field) doesn't belongs to GStaff.
Arguments:
self -- Node of the currently selected group
user -- User object taken from request object
Returns:
True -- If user is one of them, from the above specified list of categories.
False -- If above criteria is not met (doesn't belongs to any of the category, mentioned above)!
"""
if (user.is_superuser) or (user.id == self.created_by) or (user.id in self.group_admin):
return True
else:
return False
@connection.register
class Author(Group):
"""Author class to store django user instances
"""
structure = {
'email': unicode,
'password': unicode,
'visited_location': [],
'preferred_languages': dict, # preferred languages for users like preferred lang. , fall back lang. etc.
'group_affiliation': basestring
}
use_dot_notation = True
validators = {
'agency_type': lambda x: x in GSTUDIO_AUTHOR_AGENCY_TYPES # agency_type inherited from Group class
}
required_fields = ['name', 'password']
def __init__(self, *args, **kwargs):
super(Author, self).__init__(*args, **kwargs)
def __eq__(self, other_user):
# found that otherwise millisecond differences in created_at is compared
try:
other_id = other_user['_id']
except (AttributeError, TypeError):
return False
return self['_id'] == other_id
@property
def id(self):
return self.name
def password_crypt(self, password):
password_salt = str(len(password))
crypt = hashlib.sha1(password[::-1].upper() + password_salt).hexdigest()
PASSWORD = unicode(crypt, 'utf-8')
return PASSWORD
def is_anonymous(self):
return False
def is_authenticated(self):
return True
# HELPER -- CLASS DEFINITIONS
class HistoryManager():
"""Handles history management for documents of a collection
using Revision Control System (RCS).
"""
objects = models.Manager()
__RCS_REPO_DIR = RCS_REPO_DIR
def __init__(self):
pass
def check_dir_path(self, dir_path):
"""Checks whether path exists; and if not it creates that path.
Arguments:
(1) dir_path -- a string value representing an absolute path
Returns: Nothing
"""
dir_exists = os.path.isdir(dir_path)
if not dir_exists:
os.makedirs(dir_path)
def get_current_version(self, document_object):
"""Returns the current version/revision number of the given document instance.
"""
fp = self.get_file_path(document_object)
rcs = RCS()
return rcs.head(fp)
def get_version_dict(self, document_object):
"""Returns a dictionary containing list of revision numbers.
Example:
{
"1": "1.1",
"2": "1.2",
"3": "1.3",
}
"""
fp = self.get_file_path(document_object)
rcs = RCS()
# current_rev = rcs.head(fp) # Say, 1.4
total_no_of_rev = int(rcs.info(fp)["total revisions"]) # Say, 4
version_dict = {}
for i, j in zip(range(total_no_of_rev), reversed(range(total_no_of_rev))):
version_dict[(j + 1)] = rcs.calculateVersionNumber(fp, (i))
return version_dict
def get_file_path(self, document_object):
"""Returns absolute filesystem path for a json-file.
This path is combination of :-
(a) collection_directory_path: path to the collection-directory
to which the given instance belongs
(b) hashed_directory_structure: path built from object id based
on the set hashed-directory-level
(c) file_name: '.json' extension concatenated with object id of
the given instance
Arguments:
(1) document_object -- an instance of a collection
Returns: a string representing json-file's path
"""
file_name = (document_object._id.__str__() + '.json')
collection_dir = \
(os.path.join(self.__RCS_REPO_DIR, \
document_object.collection_name))
# Example:
# if -- file_name := "523f59685a409213818e3ec6.json"
# then -- collection_hash_dirs := "6/c/3/8/
# -- from last (2^0)pos/(2^1)pos/(2^2)pos/(2^3)pos/../(2^n)pos"
# here n := hash_level_num
collection_hash_dirs = ""
for pos in range(0, RCS_REPO_DIR_HASH_LEVEL):
collection_hash_dirs += \
(document_object._id.__str__()[-2**pos] + "/")
file_path = \
os.path.join(collection_dir, \
(collection_hash_dirs + file_name))
return file_path
def create_rcs_repo_collections(self, *versioning_collections):
"""Creates Revision Control System (RCS) repository.
After creating rcs-repo, it also creates sub-directories
for each collection inside it.
Arguments:
(1) versioning_collections -- a list representing collection-names
Returns: Nothing
"""
try:
self.check_dir_path(self.__RCS_REPO_DIR)
except OSError as ose:
print("\n\n RCS repository not created!!!\n {0}: {1}\n"\
.format(ose.errno, ose.strerror))
else:
print("\n\n RCS repository created @ following path:\n {0}\n"\
.format(self.__RCS_REPO_DIR))
# for collection in versioning_collections:
# rcs_repo_collection = os.path.join(self.__RCS_REPO_DIR, \
# collection)
# try:
# os.makedirs(rcs_repo_collection)
# except OSError as ose:
# print(" {0} collection-directory under RCS repository "\
# "not created!!!\n Error #{1}: {2}\n"\
# .format(collection, ose.errno, ose.strerror))
# else:
# print(" {0} collection-directory under RCS repository "\
# "created @ following path:\n {1}\n"\
# .format(collection, rcs_repo_collection))
def create_or_replace_json_file(self, document_object=None):
"""Creates/Overwrites a json-file for passed document object in
its respective hashed-directory structure.
Arguments:
(1) document_object -- an instance of document of a collection
Returns: A boolean value indicating whether created successfully
(a) True - if created
(b) False - Otherwise
"""
collection_tuple = (MetaType, GSystemType, GSystem, AttributeType, GAttribute, RelationType, GRelation)
file_res = False # True, if no error/exception occurred
if document_object is not None and \
isinstance(document_object, collection_tuple):
file_path = self.get_file_path(document_object)
json_data = document_object.to_json_type()
#------------------------------------------------------------------
# Creating/Overwriting data into json-file and rcs-file
#------------------------------------------------------------------
# file_mode as w:-
# Opens a file for writing only.
# Overwrites the file if the file exists.
# If the file does not exist, creates a new file for writing.
file_mode = 'w'
rcs_file = None
try:
self.check_dir_path(os.path.dirname(file_path))
rcs_file = open(file_path, file_mode)
except OSError as ose:
print("\n\n Json-File not created: Hashed directory "\
"structure doesn't exists!!!")
print("\n {0}: {1}\n".format(ose.errno, ose.strerror))
except IOError as ioe:
print(" " + str(ioe))
print("\n\n Please refer following command from "\
"\"Get Started\" file:\n"\
"\tpython manage.py initrcsrepo\n")
except Exception as e:
print(" Unexpected error : " + str(e))
else:
rcs_file.write(json.dumps(json_data,
sort_keys=True,
indent=4,
separators=(',', ': '),
cls=NodeJSONEncoder
)
)
# TODO: Commit modifications done to the file into
# it's rcs-version-file
file_res = True
finally:
if rcs_file is not None:
rcs_file.close()
else:
# TODO: Throw/raise error having following message!
# if document_object is None or
# !isinstance(document_object, collection_tuple)
msg = " Following instance is either invalid or " \
+ "not matching given instances-type list " + str(collection_tuple) + ":-" \
+ "\n\tObjectId: " + document_object._id.__str__() \
+ "\n\t Type: " + document_object._type \
+ "\n\t Name: " + document_object.name
raise RuntimeError(msg)
return file_res
def get_version_document(self, document_object, version_no=""):
"""Returns an object representing mongodb document instance of a given version number.
"""
if version_no == "":
version_no = self.get_current_version(document_object)
fp = self.get_file_path(document_object)
rcs = RCS()
rcs.checkout((fp, version_no))
json_data = ""
with open(fp, 'r') as version_file:
json_data = version_file.read()
# assigning None value to key, which is not present in json_data compare to Node class keys
null = 0
import json
json_dict = json.loads(json_data)
json_node_keys = document_object.keys()
json_dict_keys = json_dict.keys()
diff_keys = list(set(json_node_keys)-set(json_dict_keys))
if diff_keys:
for each in diff_keys:
json_dict[each]=None
json_data = json.dumps(json_dict)
# Converts the json-formatted data into python-specific format
doc_obj = node_collection.from_json(json_data)
rcs.checkin(fp)
# Below Code temporary resolves the problem of '$oid' This
# problem occurs when we convert mongodb's document into
# json-format using mongokit's to_json_type() function - It
# converts ObjectId() type into corresponding format
# "{u'$oid': u'24-digit-hexstring'}" But actual problem comes
# into picture when we have a field whose data-type is "list
# of ObjectIds" In case of '_id' field (automatically created
# by mongodb), mongokit handles this conversion and does so
# But not in case of "list of ObjectIds", it still remains in
# above given format and causes problem
for k, v in doc_obj.iteritems():
oid_list_str = ""
oid_ObjectId_list = []
if v and type(v) == list:
oid_list_str = v.__str__()
try:
if '$oid' in oid_list_str: #v.__str__():
for oid_dict in v:
oid_ObjectId = ObjectId(oid_dict['$oid'])
oid_ObjectId_list.append(oid_ObjectId)
doc_obj[k] = oid_ObjectId_list
except Exception as e:
print "\n Exception for document's ("+doc_obj.name+") key ("+k+") -- ", str(e), "\n"
return doc_obj
class NodeJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
if isinstance(o, datetime.datetime):
return o.strftime("%d/%m/%Y %H:%M:%S")
return json.JSONEncoder.default(self, o)
# Benchmarking Class Defination
@connection.register
class Benchmark(DjangoDocument):
objects = models.Manager()
collection_name = 'Benchmarks'
structure = {
'_type':unicode,
'name': unicode,
'time_taken':unicode,
'parameters':unicode,
'size_of_parameters':unicode,
'function_output_length':unicode,
'calling_url':unicode,
'last_update': datetime.datetime
}
required_fields = ['name']
use_dot_notation = True
def __unicode__(self):
return self._id
def identity(self):
return self.__unicode__()
# TRIPLE CLASS DEFINITIONS
@connection.register
class Triple(DjangoDocument):
objects = models.Manager()
collection_name = 'Triples'
structure = {
'_type': unicode,
'name': unicode,
'subject_scope': basestring,
'subject': ObjectId, # ObjectId's of GSystem Class
'lang': basestring, # Put validation for standard language codes
'status': STATUS_CHOICES_TU
}
required_fields = ['name', 'subject']
use_dot_notation = True
use_autorefs = True
########## Built-in Functions (Overridden) ##########
def __unicode__(self):
return self._id
def identity(self):
return self.__unicode__()
def save(self, *args, **kwargs):
is_new = False
if "_id" not in self:
is_new = True # It's a new document, hence yet no ID!"
"""
Check for correct GSystemType match in AttributeType and GAttribute, similarly for RelationType and GRelation
"""
subject_system_flag = False
subject_id = self.subject
subject_document = node_collection.one({"_id": self.subject})
subject_name = subject_document.name
subject_type_list = []
subject_member_of_list = []
name_value = u""
if self._type == "GAttribute":
attribute_type_name = self.attribute_type['name']
attribute_object_value = unicode(self.object_value)
self.name = "%(subject_name)s -- %(attribute_type_name)s -- %(attribute_object_value)s" % locals()
name_value = self.name
subject_type_list = self.attribute_type['subject_type']
subject_member_of_list = subject_document.member_of
intersection = set(subject_member_of_list) & set(subject_type_list)
if intersection:
subject_system_flag = True
else:
# If instersection is not found with member_of fields' ObjectIds,
# then check for type_of field of each one of the member_of node
for gst_id in subject_member_of_list:
gst_node = node_collection.one({'_id': gst_id}, {'type_of': 1})
if set(gst_node.type_of) & set(subject_type_list):
subject_system_flag = True
break
elif self._type == "GRelation":
subject_type_list = self.relation_type['subject_type']
object_type_list = self.relation_type['object_type']
left_subject_member_of_list = subject_document.member_of
relation_type_name = self.relation_type['name']
if META_TYPE[4] in self.relation_type.member_of_names_list:
# print META_TYPE[3], self.relation_type.member_of_names_list,"!!!!!!!!!!!!!!!!!!!!!"
# Relationship Other than Binary one found; e.g, Triadic
# Single relation: [ObjectId(), ObjectId(), ...]
# Multi relation: [[ObjectId(), ObjectId(), ...], [ObjectId(), ObjectId(), ...], ...]
right_subject_member_of_list = []
right_subject_member_of_list_append = right_subject_member_of_list.append
right_subject_name_list = []
right_subject_name_list_append = right_subject_name_list.append
print self.right_subject,"%%%%%%%%%%%%%",type(self.right_subject)
for each in self.right_subject:
# Here each is an ObjectId
right_subject_document = node_collection.one({
"_id": each
}, {
"name": 1, "member_of": 1
})
right_subject_member_of_list_append(right_subject_document.member_of)
right_subject_name_list_append(right_subject_document.name)
right_subject_name_list_str = " >> ".join(right_subject_name_list)
self.name = "%(subject_name)s -- %(relation_type_name)s -- %(right_subject_name_list_str)s" % locals()
# Very much required as list comparison using set doesn't work
# with list as it's sub-elements
# Hence, converting list into comma separated values by extending
# with other comma-separated values from another list(s)
object_type_list = list(chain.from_iterable(object_type_list))
right_subject_member_of_list = list(chain.from_iterable(right_subject_member_of_list))
else:
#META_TYPE[3] in self.relation_type.member_of_names_list:
# If Binary relationship found
# Single relation: ObjectId()
# Multi relation: [ObjectId(), ObjectId(), ...]
right_subject_document = node_collection.one({'_id': self.right_subject})
right_subject_member_of_list = right_subject_document.member_of
right_subject_name = right_subject_document.name
self.name = "%(subject_name)s -- %(relation_type_name)s -- %(right_subject_name)s" % locals()
name_value = self.name
left_intersection = set(subject_type_list) & set(left_subject_member_of_list)
right_intersection = set(object_type_list) & set(right_subject_member_of_list)
if left_intersection and right_intersection:
subject_system_flag = True
else:
left_subject_system_flag = False
if left_intersection:
left_subject_system_flag = True
else:
for gst_id in left_subject_member_of_list:
gst_node = node_collection.one({'_id': gst_id}, {'type_of': 1})
if set(gst_node.type_of) & set(subject_type_list):
left_subject_system_flag = True
break
right_subject_system_flag = False
if right_intersection:
right_subject_system_flag = True
else:
for gst_id in right_subject_member_of_list:
gst_node = node_collection.one({'_id': gst_id}, {'type_of': 1})
if set(gst_node.type_of) & set(object_type_list):
right_subject_system_flag = True
break
if left_subject_system_flag and right_subject_system_flag:
subject_system_flag = True
if self._type =="GRelation" and subject_system_flag == False:
# print "The 2 lists do not have any common element"
raise Exception("\n Cannot create the GRelation ("+name_value+") as the subject/object that you have mentioned is not a member of a GSytemType for which this RelationType is defined!!!\n")
if self._type =="GAttribute" and subject_system_flag == False:
# print "\n The 2 lists do not have any common element\n"
error_message = "\n "+name_value+ " -- subject_type_list ("+str(subject_type_list)+") -- subject_member_of_list ("+str(subject_member_of_list)+") \n"
raise Exception(error_message + "Cannot create the GAttribute ("+name_value+") as the subject that you have mentioned is not a member of a GSystemType which this AttributeType is defined")
#it's me
#check for data_type in GAttribute case. Object value of the GAttribute must have the same type as that of the type specified in AttributeType
"""if self._type == "GAttribute": data_type_in_attribute_type =
self.attribute_type['data_type'] data_type_of_object_value =
type(self.object_value) print "Attribute:: " +
str(data_type_in_attribute_type) print "Value:: " +
str(data_type_of_object_value) if data_type_in_attribute_type !=
data_type_of_object_value: raise Exception("The DataType of the
value you have entered for this attribute is not correct. Pls ener
a value with type ---> " + str(data_type_in_attribute_type))
"""
#end of data_type_check
super(Triple, self).save(*args, **kwargs)
history_manager = HistoryManager()
rcs_obj = RCS()
if is_new:
# Create history-version-file
if history_manager.create_or_replace_json_file(self):
fp = history_manager.get_file_path(self)
message = "This document (" + self.name + ") is created on " + datetime.datetime.now().strftime("%d %B %Y")
rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")
else:
# Update history-version-file
fp = history_manager.get_file_path(self)
try:
rcs_obj.checkout(fp)
except Exception as err:
try:
if history_manager.create_or_replace_json_file(self):
fp = history_manager.get_file_path(self)
message = "This document (" + self.name + ") is re-created on " + datetime.datetime.now().strftime("%d %B %Y")
rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")
except Exception as err:
print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be re-created!!!\n"
node_collection.collection.remove({'_id': self._id})
raise RuntimeError(err)
try:
if history_manager.create_or_replace_json_file(self):
message = "This document (" + self.name + ") is lastly updated on " + datetime.datetime.now().strftime("%d %B %Y")
rcs_obj.checkin(fp, 1, message.encode('utf-8'))
except Exception as err:
print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be updated!!!\n"
raise RuntimeError(err)
@connection.register
class GAttribute(Triple):
structure = {
'attribute_type_scope': basestring,
'attribute_type': AttributeType, # DBRef of AttributeType Class
'object_value_scope': basestring,
'object_value': None # value -- it's data-type, is determined by attribute_type field
}
required_fields = ['attribute_type', 'object_value']
use_dot_notation = True
use_autorefs = True # To support Embedding of Documents
@connection.register
class GRelation(Triple):
structure = {
'relation_type_scope': basestring,
'relation_type': RelationType, # DBRef of RelationType Class
'right_subject_scope': basestring,
# ObjectId's of GSystems Class / List of list of ObjectId's of GSystem Class
'right_subject': OR(ObjectId, list)
}
required_fields = ['relation_type', 'right_subject']
use_dot_notation = True
use_autorefs = True # To support Embedding of Documents
####################################### Added on 19th June 2014 for SEARCH ##############################
@connection.register
class ReducedDocs(DjangoDocument):
structure={
'_type': unicode,
'content':dict, #This contains the content in the dictionary format
'orignal_id':ObjectId, #The object ID of the orignal document
'required_for':unicode,
'is_indexed':bool, #This will be true if the map reduced document has been indexed. If it is not then it will be false
}
use_dot_notation = True
@connection.register
class ToReduceDocs(DjangoDocument):
structure={
'_type': unicode,
'doc_id':ObjectId,
'required_for':unicode,
}
use_dot_notation = True
@connection.register
class IndexedWordList(DjangoDocument):
structure={
'_type': unicode,
'word_start_id':float,
'words':dict,
'required_for':unicode,
}
use_dot_notation = True
#word_start_id = 0 --- a ,1---b,2---c .... 25---z,26--misc.
# This is like a temperory holder, where you can hold any node temporarily and later permenently save in database
@connection.register
class node_holder(DjangoDocument):
objects = models.Manager()
structure={
'_type': unicode,
'details_to_hold':dict
}
required_fields = ['details_to_hold']
use_dot_notation = True
"""
@connection.register
class allLinks(DjangoDocument):
structure = {
'member_of':ObjectId,
'link':unicode,
'required_for':unicode,
}
# required_fields = ['member_of', 'link']
use_dot_notation = True
"""
# DATABASE Variables
db = get_database()
node_collection = db[Node.collection_name].Node
triple_collection = db[Triple.collection_name].Triple
gridfs_collection = db["fs.files"]
| supriyasawant/gstudio | gnowsys-ndf/gnowsys_ndf/ndf/models.py | Python | agpl-3.0 | 72,774 | 0.007379 |
# Copyright 2012 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Ryota MIBU
import contextlib
import random
from neutron.common import constants as q_const
from neutron.openstack.common import uuidutils
from neutron.plugins.nec.common import exceptions as nexc
from neutron.plugins.nec.db import api as ndb
from neutron.plugins.nec.db import models as nmodels # noqa
from neutron.tests.unit.nec import test_nec_plugin
class NECPluginV2DBTestBase(test_nec_plugin.NecPluginV2TestCase):
"""Class conisting of NECPluginV2 DB unit tests."""
def setUp(self):
"""Setup for tests."""
super(NECPluginV2DBTestBase, self).setUp()
self.session = self.context.session
def get_ofc_item_random_params(self):
"""create random parameters for ofc_item test."""
ofc_id = uuidutils.generate_uuid()
neutron_id = uuidutils.generate_uuid()
none = uuidutils.generate_uuid()
return ofc_id, neutron_id, none
@contextlib.contextmanager
def portinfo_random_params(self):
with self.port() as port:
params = {'port_id': port['port']['id'],
'datapath_id': hex(random.randint(0, 0xffffffff)),
'port_no': random.randint(1, 100),
'vlan_id': random.randint(q_const.MIN_VLAN_TAG,
q_const.MAX_VLAN_TAG),
'mac': ':'.join(["%02x" % random.randint(0, 0xff)
for x in range(6)])
}
yield params
class NECPluginV2DBOfcMappingTest(NECPluginV2DBTestBase):
def test_add_ofc_item(self):
"""test add OFC item."""
o, q, n = self.get_ofc_item_random_params()
tenant = ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_add_ofc_item_duplicate_entry(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertRaises(nexc.NECDBException,
ndb.add_ofc_item,
self.session, 'ofc_tenant', q, o)
def test_get_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant = ndb.get_ofc_item(self.session, 'ofc_tenant', q)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_get_ofc_item_for_nonexisting_entry(self):
self.assertIsNone(
ndb.get_ofc_item(self.session, 'ofc_tenant', 'non-exist-id'))
def test_get_ofc_id(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant_id = ndb.get_ofc_id(self.session, 'ofc_tenant', q)
self.assertEqual(tenant_id, o)
def test_get_ofc_id_for_nonexisting_entry(self):
self.assertRaises(nexc.OFCMappingNotFound,
ndb.get_ofc_id,
self.session, 'ofc_tenant', 'non-exist-id')
def test_exists_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
self.assertFalse(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertTrue(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
ndb.del_ofc_item(self.session, 'ofc_tenant', q)
self.assertFalse(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
def test_find_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant = ndb.find_ofc_item(self.session, 'ofc_tenant', o)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_find_ofc_item_for_nonexisting_entry(self):
self.assertIsNone(
ndb.find_ofc_item(self.session, 'ofc_tenant', 'non-existi-id'))
def test_del_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertTrue(ndb.del_ofc_item(self.session, 'ofc_tenant', q))
self.assertIsNone(ndb.get_ofc_item(self.session, 'ofc_tenant', q))
self.assertIsNone(ndb.find_ofc_item(self.session, 'ofc_tenant', o))
def test_del_ofc_item_for_nonexisting_entry(self):
self.assertFalse(
ndb.del_ofc_item(self.session, 'ofc_tenant', 'non-existi-id'))
class NECPluginV2DBPortInfoTest(NECPluginV2DBTestBase):
def _compare_portinfo(self, portinfo, expected):
self.assertEqual(portinfo.id, expected['port_id'])
self.assertEqual(portinfo.datapath_id, expected['datapath_id'])
self.assertEqual(portinfo.port_no, expected['port_no'])
self.assertEqual(portinfo.vlan_id, expected['vlan_id'])
self.assertEqual(portinfo.mac, expected['mac'])
def _add_portinfo(self, session, params):
return ndb.add_portinfo(session, params['port_id'],
params['datapath_id'], params['port_no'],
params['vlan_id'], params['mac'])
def testd_add_portinfo(self):
"""test add portinfo."""
with self.portinfo_random_params() as params:
portinfo = self._add_portinfo(self.session, params)
self._compare_portinfo(portinfo, params)
exception_raised = False
try:
self._add_portinfo(self.session, params)
except nexc.NECDBException:
exception_raised = True
self.assertTrue(exception_raised)
def teste_get_portinfo(self):
"""test get portinfo."""
with self.portinfo_random_params() as params:
self._add_portinfo(self.session, params)
portinfo = ndb.get_portinfo(self.session, params['port_id'])
self._compare_portinfo(portinfo, params)
nonexist_id = uuidutils.generate_uuid()
portinfo_none = ndb.get_portinfo(self.session, nonexist_id)
self.assertIsNone(portinfo_none)
def testf_del_portinfo(self):
"""test delete portinfo."""
with self.portinfo_random_params() as params:
self._add_portinfo(self.session, params)
portinfo = ndb.get_portinfo(self.session, params['port_id'])
self.assertEqual(portinfo.id, params['port_id'])
ndb.del_portinfo(self.session, params['port_id'])
portinfo_none = ndb.get_portinfo(self.session, params['port_id'])
self.assertIsNone(portinfo_none)
| virtualopensystems/neutron | neutron/tests/unit/nec/test_db.py | Python | apache-2.0 | 7,264 | 0 |
# -*- coding: utf-8 -*-
r"""
.. _SoftiMAX:
SoftiMAX at MAX IV
------------------
The images below are produced by scripts in
``\examples\withRaycing\14_SoftiMAX``.
The beamline will have two branches:
- STXM (Scanning Transmission X-ray Microscopy) and
- CXI (Coherent X-ray Imaging),
see the scheme provided by K. Thånell.
.. imagezoom:: _images/softiMAX_layout.*
STXM branch
~~~~~~~~~~~
.. rubric:: Rays vs. hybrid
The propagation through the first optical elements – from undulator to front
end (FE) slit, to M1, to M2 and to plane grating (PG) – is done with rays:
+------------+------------+------------+------------+
| FE | M1 | M2 | PG |
+============+============+============+============+
| |st_rFE| | |st_rM1| | |st_rM2| | |st_rPG| |
+------------+------------+------------+------------+
.. |st_rFE| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-00-FE.*
.. |st_rM1| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-01-M1local.*
.. |st_rM2| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-02-M2local.*
.. |st_rPG| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-02a-PGlocal.*
:loc: upper-right-corner
Starting from PG – to M3, to exit slit, to Fresnel zone plate (FZP) and to
variously positioned sample screen – the propagation is done by rays or waves,
as compared below. Despite the M3 footprint looks not perfect (not black at
periphery), the field at normal surfaces (exit slit, FZP (not shown) and sample
screen) is of perfect quality. At the best focus, rays and waves result in a
similar image. Notice a micron-sized depth of focus.
+-----------+---------------------+---------------------+
| | rays | wave |
+===========+=====================+=====================+
| M3 | |st_rM3| | |st_hM3| |
+-----------+---------------------+---------------------+
| exit slit | |st_rES| | |st_hES| |
+-----------+---------------------+---------------------+
| sample | |st_rS| | |st_hS| |
+-----------+---------------------+---------------------+
.. |st_rM3| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-03-M3local.*
.. |st_hM3| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |st_rES| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-04-ExitSlit.*
.. |st_hES| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-04-ExitSlit.*
:loc: upper-right-corner
.. |st_rS| animation:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Influence of emittance
Non-zero emittance radiation is treated in xrt by incoherent addition of single
electron intensities. The single electron (filament) fields are considered as
fully coherent and are resulted from filament trajectories (one per repeat)
that attain positional and angular shifts within the given emittance
distribution. The following images are calculated for the exit slit and the
focus screen for zero and non-zero emittance
(for MAX IV 3 GeV ring: ε\ :sub:`x`\ =263 pm·rad,
β\ :sub:`x`\ =9 m, ε\ :sub:`z`\ =8 pm·rad, β\ :sub:`z`\ =2 m). At the real
emittance, the horizontal focal size increases by ~75%. A finite energy band,
as determined by vertical size of the exit slit, results in somewhat bigger
broadening due to a chromatic dependence of the focal length.
+-----------+---------------------+---------------------+---------------------+
| | 0 emittance | real emittance | |refeb| |
+===========+=====================+=====================+=====================+
| exit slit | |st_hESb| | |st_hES2| | |st_hES3| |
+-----------+---------------------+---------------------+---------------------+
| sample | |st_hSb| | |st_hS2| | |st_hS3| |
+-----------+---------------------+---------------------+---------------------+
.. |refeb| replace:: real emittance, finite energy band
.. |st_hESb| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-04-ExitSlit.*
.. |st_hES2| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-04-ExitSlit.*
.. |st_hS2| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hES3| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-wideE-04-ExitSlit.*
:loc: upper-right-corner
.. |st_hSb| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS3| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-wideE-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Correction of emittance effects
The increased focal size can be amended by closing the exit slit. With flux
loss of about 2/3, the focal size is almost restored.
+-----------+--------------------+--------------------+
| | 80 µm exit slit | 20 µm exit slit |
+===========+====================+====================+
| exit slit | |st_hES2b| | |st_hES4| |
+-----------+--------------------+--------------------+
| sample | |st_hS2b| | |st_hS4| |
+-----------+--------------------+--------------------+
.. |st_hES2b| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-04-ExitSlit.*
.. |st_hES4| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-025H-04-ExitSlit.*
:loc: upper-right-corner
.. |st_hS2b| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS4| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-025H-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Coherence signatures
The beam improvement can also be viewed via the coherence properties by the
four available methods (see :ref:`coh_signs`). As the horizontal exit slit
becomes smaller, one can observe the increase of the coherent fraction ζ and
the increase of the primary (coherent) mode weight. The width of degree of
coherence (DoC) relative to the width of the intensity distribution determines
the coherent beam fraction. Both widths vary with varying screen position
around the focal point such that their ratio is not invariant, so that the
coherent fraction also varies, which is counter-intuitive. An important
advantage of the eigen-mode or PCA methods is a simple definition of the
coherent fraction as the eigenvalue of the zeroth mode (component); this
eigenvalue appears to be invariant around the focal point, see below. Note that
the methods 2 and 3 give equal results. The method 4 that gives the degree of
transverse coherence (DoTC) is also invariant around the focal point, see DoTC
values on the pictures of Principal Components.
+-----------+--------------------------+--------------------------+
| | 80 µm exit slit | 20 µm exit slit |
+===========+==========================+==========================+
| method 1 | |st_hS80m1| | |st_hS20m1| |
+-----------+--------------------------+--------------------------+
| method 2 | |st_hS80m3| | |st_hS20m3| |
+-----------+--------------------------+--------------------------+
| method 3, | |st_hS80m4| | |st_hS20m4| |
| method 4b | | |
+-----------+--------------------------+--------------------------+
.. |st_hS80m1| animation:: _images/stxm-IDOC-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m1| animation:: _images/stxm-IDOC-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
.. |st_hS80m3| animation:: _images/stxm-Modes-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m3| animation:: _images/stxm-Modes-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
.. |st_hS80m4| animation:: _images/stxm-PCA-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m4| animation:: _images/stxm-PCA-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
CXI branch
~~~~~~~~~~
.. rubric:: 2D vs 1D
Although the sample screen images are of good quality (the dark field is almost
black), the mirror footprints may be noisy and not well convergent in the
periphery. Compare the M3 footprint with that in the previous section (STXM
branch) where the difference is in the mirror area and thus in the sample
density. The used 10\ :sup:`6` wave samples (i.e. 10\ :sup:`12` possible paths)
are not enough for the slightly enlarged area in the present example. The
propagation is therefore performed in separated horizontal and vertical
directions, which dramatically improves the quality of the footprints.
Disadvantages of the cuts are losses in visual representation and incorrect
evaluation of the flux.
+------+----------------------+-----------------------+-----------------------+
| | 2D | 1D horizontal cut | 1D vertical cut |
+======+======================+=======================+=======================+
| |M3| | |cxiM32D| | |cxiM31Dh| | |cxiM31Dv| |
+------+----------------------+-----------------------+-----------------------+
| |SS| | |cxiS2D| | |cxiS1Dh| | |cxiS1Dv| |
+------+----------------------+-----------------------+-----------------------+
.. |M3| replace:: M3 footprint
.. |SS| replace:: sample screen
.. |cxiM32D| imagezoom:: _images/cxi_2D-2-hybr-0emit-0enSpread-monoE-03-M3local.*
.. |cxiM31Dh| imagezoom:: _images/cxi_1D-2-hybr-1e6hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxiM31Dv| imagezoom:: _images/cxi_1D-2-hybr-1e6ver-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |cxiS2D| animation:: _images/cxi_S2D
.. |cxiS1Dh| animation:: _images/cxi_S1Dh
.. |cxiS1Dv| animation:: _images/cxi_S1Dv
:loc: upper-right-corner
.. _wavefronts:
.. rubric:: Flat screen vs normal-to-k screen (wave front)
The following images demonstrate the correctness of the directional
Kirchhoff-like integral (see :ref:`seq_prop`). Five diffraction integrals are
calculated on flat screens around the focus position: for two polarizations and
for three directional components. The latter ones define the wave fronts at
every flat screen position; these wave fronts are further used as new curved
screens. The calculated diffraction fields on these curved screens have narrow
phase distributions, as shown by the color histograms, which is indeed expected
for a wave front by its definition. In contrast, the *flat* screens at the same
positions have rapid phase variation over several Fresnel zones.
.. note::
In the process of wave propagation, wave fronts -- surfaces of
constant phase -- are not used in any way. We therefore call it “wave
propagation”, not “wave *front* propagation” as frequently called by
others. The wave fronts in this example were calculated to solely
demonstrate the correctness of the local propagation directions after
having calculated the diffracted field.
+------------------------------+------------------------------+
| flat screen | curved screen (wave front) |
+==============================+==============================+
| |cxiFlat| | |cxiFront| |
+------------------------------+------------------------------+
.. |cxiFlat| animation:: _images/cxi-S1DhFlat
.. |cxiFront| animation:: _images/cxi-S1DhFront
:loc: upper-right-corner
The curvature of the calculated wave fronts varies across the focus position.
The wave fronts become more flat as one approaches the focus, see the figure
below. This is in contrast to *ray* propagation, where the angular ray
distribution is invariant at any position between two optical elements.
.. imagezoom:: _images/cxi_waveFronts.*
.. rubric:: Rays, waves and hybrid
The following images are horizontal cuts at the footprints and sample screens
calculated by
- rays,
- rays + waves hybrid (rays up to PG and wave from PG) and
- purely by waves.
+-----------------+-------------------+-------------------+-------------------+
| | rays | hybrid | waves |
+=================+===================+===================+===================+
| front end slit | |cxi-hFE| | same as rays | |cxi-wFE| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M1 | |cxi-hM1| | same as rays | |cxi-wM1| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M2 | |cxi-hM2| | same as rays | |cxi-wM2| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on PG | |cxi-hPG| | same as rays | |cxi-wPG| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M3 | |cxi-rM3| | |cxi-hM3| | |cxi-wM3| |
+-----------------+-------------------+-------------------+-------------------+
| exit slit | |cxi-rES| | |cxi-hES| | |cxi-wES| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M4 | |cxi-rM4| | |cxi-hM4| | |cxi-wM4| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M5 | |cxi-rM5| | |cxi-hM5| | |cxi-wM5| |
+-----------------+-------------------+-------------------+-------------------+
| sample screen | |cxi-rS| | |cxi-hS| | |cxi-wS| |
+-----------------+-------------------+-------------------+-------------------+
.. |cxi-hFE| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-00-FE.*
.. |cxi-wFE| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-00-FE.*
:loc: upper-right-corner
.. |cxi-hM1| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-01-M1local.*
.. |cxi-wM1| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-01-M1local.*
:loc: upper-right-corner
.. |cxi-hM2| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-02-M2local.*
.. |cxi-wM2| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-02-M2local.*
:loc: upper-right-corner
.. |cxi-hPG| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-02-PGlocal.*
.. |cxi-wPG| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-02-PGlocal.*
:loc: upper-right-corner
.. |cxi-rM3| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxi-hM3| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxi-wM3| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |cxi-rES| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-04-ExitSlit.*
.. |cxi-hES| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-04-ExitSlit.*
.. |cxi-wES| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-04-ExitSlit.*
:loc: upper-right-corner
.. |cxi-rM4| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-05-M4local.*
.. |cxi-hM4| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-05-M4local.*
.. |cxi-wM4| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-05-M4local.*
:loc: upper-right-corner
.. |cxi-rM5| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-06-M5local.*
.. |cxi-hM5| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-06-M5local.*
.. |cxi-wM5| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-06-M5local.*
:loc: upper-right-corner
.. |cxi-rS| animation:: _images/cxi-rS
.. |cxi-hS| animation:: _images/cxi-hS
.. |cxi-wS| animation:: _images/cxi-wS
:loc: upper-right-corner
.. rubric:: Coherence signatures
This section demonstrates the methods 1 and 3 from :ref:`coh_signs`. Notice
again the difficulty in determining the width of DoC owing to its complex shape
(at real emittance) or the restricted field of view (the 0 emittance case). In
contrast, the eigen mode analysis yields an almost invariant well defined
coherent fraction.
+-----------+--------------------------+--------------------------+
| | 0 emittance | real emittance |
+===========+==========================+==========================+
| method 1 | |cxi-coh1-0emit| | |cxi-coh1-non0e| |
+-----------+--------------------------+--------------------------+
| method 3 | |cxi-coh3-0emit| | |cxi-coh3-non0e| |
+-----------+--------------------------+--------------------------+
.. |cxi-coh1-0emit| animation:: _images/cxi-coh1-0emit
.. |cxi-coh1-non0e| animation:: _images/cxi-coh1-non0e
.. |cxi-coh3-0emit| animation:: _images/cxi-coh3-0emit
.. |cxi-coh3-non0e| animation:: _images/cxi-coh3-non0e
:loc: upper-right-corner
"""
pass
| kklmn/xrt | examples/withRaycing/14_SoftiMAX/__init__.py | Python | mit | 16,930 | 0.002662 |
# -*- coding: utf-8 -*-
import requests
from datetime import datetime
from fplassist.models import Team_Info, Player_Info, Player_Basic_Stats, Player_Detailed_Stats, FPL_Config
def get_data(api_url):
api_response = requests.get(api_url)
try:
api_response.raise_for_status()
api_data = api_response.json()
except:
print("Error: There was an error while requesting the http-api. \
errorcode: %s" % (str(api_response.status_code)))
return False
if api_data:
return api_data
else:
return False
def update_database():
print("\n############----------> FPL Helper Script <----------############\n")
print("Initiating script...\n")
print("Connecting to database...")
print("Identifying next game week...")
event_data = get_data("https://fantasy.premierleague.com/drf/events")
next_gameweek = [gw for gw in event_data if gw["is_next"] == True][0]["id"]
if next_gameweek == None:
print("There was a problem identifying next game week!")
return False
print("Collecting team information...")
team_data = get_data("https://fantasy.premierleague.com/drf/teams")
print("Collecting fixture information...")
fixture_data = get_data("https://fantasy.premierleague.com/drf/fixtures")
for team in team_data:
difficulty = 0
for gameweek in range(next_gameweek, next_gameweek+5):
fixtures = [fixture for fixture in fixture_data if fixture["event"] == gameweek]
for fixture in fixtures:
if fixture["team_h"] == team["id"]:
difficulty += (fixture["team_h_difficulty"] - fixture["team_a_difficulty"])
elif fixture["team_a"] == team["id"]:
difficulty += (fixture["team_a_difficulty"] - fixture["team_h_difficulty"])
t_diff = difficulty/5.0
if t_diff <= -4.0:
f_difficulty = 0
elif t_diff < -1.0:
f_difficulty = 1
elif t_diff < 0.0:
f_difficulty = 2
elif t_diff < 2.0:
f_difficulty = 3
elif t_diff <= 4.0:
f_difficulty = 4
try:
team_entry = Team_Info.objects.get(team_id=team["id"])
team_entry.fixture_difficulty = f_difficulty
except Team_Info.DoesNotExist:
team_entry = Team_Info(team_id=team["id"], team_name=team["name"],
short_name=team["short_name"],
fixture_difficulty=f_difficulty)
team_entry.save()
print("Team and Fixture Difficulty information stored successfully!")
print("Collecting player information...")
player_types = get_data("https://fantasy.premierleague.com/drf/element-types")
print("Collecting player base stats...")
players = get_data("https://fantasy.premierleague.com/drf/elements")
for player in players:
print(player["web_name"])
player_cost = "%.1f" % (int(player["now_cost"])/10.0)
position_long = [pos for pos in player_types if pos["id"] == player["element_type"]][0]["singular_name"]
position_short = [pos for pos in player_types if pos["id"] == player["element_type"]][0]["singular_name_short"]
if not player["news"]:
p_news = "Match Fit!"
else:
p_news = player["news"]
player_deep_cumul = {'influence':0, 'creativity':0, 'threat':0, 'ict_index':0,
'open_play_crosses':0, 'big_chances_created':0, 'clearances_blocks_interceptions':0, 'recoveries':0,
'key_passes':0, 'tackles':0, 'winning_goals':0, 'attempted_passes':0, 'completed_passes':0,
'penalties_conceded':0, 'big_chances_missed':0, 'tackled':0, 'offside':0,
'target_missed':0, 'fouls':0, 'dribbles':0}
player_deep = get_data("https://fantasy.premierleague.com/drf/element-summary/%d" % (player["id"]))["history"]
_points_history = []
_ict_history = []
for deep_stat in player_deep:
_points_history.append(deep_stat["total_points"])
_ict_history.append(deep_stat["ict_index"])
for deep_attr in player_deep_cumul:
player_deep_cumul[deep_attr] += float(deep_stat[deep_attr])
try:
player_info = Player_Info.objects.get(player_id=player["id"])
player_info.team_id, player_info.availability, player_info.news = player["team"], player["status"], p_news
except Player_Info.DoesNotExist:
player_info = Player_Info(player_id=player["id"], player_name=player["web_name"],
pos_short=position_short, pos_long=position_long,
team_id=player["team"], availability=player["status"],
news=p_news, player_photo=player["photo"].split(".")[0])
player_info.save()
try:
player_base_stats = Player_Basic_Stats.objects.get(player_id=player["id"])
player_base_stats.points = player["total_points"]
player_base_stats.minutes = player["minutes"]
player_base_stats.cost = player_cost
player_base_stats.tsb = player["selected_by_percent"]
player_base_stats.ppg = player["points_per_game"]
player_base_stats.goals = player["goals_scored"]
player_base_stats.assists = player["assists"]
player_base_stats.cleansheet = player["clean_sheets"]
player_base_stats.saves = player["saves"]
player_base_stats.bps = player["bps"]
player_base_stats.transfer_in = player["transfers_in_event"]
player_base_stats.transfer_out = player["transfers_out_event"]
player_base_stats.form = player["form"]
except Player_Basic_Stats.DoesNotExist:
player_base_stats = Player_Basic_Stats(player_id=player["id"], points=player["total_points"],
minutes=player["minutes"], cost=player_cost,
tsb=player["selected_by_percent"],
ppg=player["points_per_game"], goals=player["goals_scored"],
assists=player["assists"], cleansheet=player["clean_sheets"],
saves=player["saves"], bps=player["bps"],
transfer_in=player["transfers_in_event"],
transfer_out=player["transfers_out_event"],
form=player["form"])
player_base_stats.save()
try:
player_detailed = Player_Detailed_Stats.objects.get(player_id=player["id"])
player_detailed.ict_index = player_deep_cumul["ict_index"]
player_detailed.open_play_crosses = player_deep_cumul["open_play_crosses"]
player_detailed.big_chances_created = player_deep_cumul["big_chances_created"]
player_detailed.clearances_blocks_interceptions = player_deep_cumul["clearances_blocks_interceptions"]
player_detailed.recoveries = player_deep_cumul["recoveries"]
player_detailed.key_passes = player_deep_cumul["key_passes"]
player_detailed.tackles = player_deep_cumul["tackles"]
player_detailed.winning_goals = player_deep_cumul["winning_goals"]
player_detailed.attempted_passes = player_deep_cumul["attempted_passes"]
player_detailed.completed_passes = player_deep_cumul["completed_passes"]
player_detailed.penalties_conceded = player_deep_cumul["penalties_conceded"]
player_detailed.big_chances_missed = player_deep_cumul["big_chances_missed"]
player_detailed.tackled = player_deep_cumul["tackled"]
player_detailed.offside = player_deep_cumul["offside"]
player_detailed.target_missed = player_deep_cumul["target_missed"]
player_detailed.fouls = player_deep_cumul["fouls"]
player_detailed.dribbles = player_deep_cumul["dribbles"]
player_detailed.points_history = _points_history
player_detailed.ict_history = _ict_history
except Player_Detailed_Stats.DoesNotExist:
player_detailed = Player_Detailed_Stats(player_id=player["id"], ict_index=player_deep_cumul["ict_index"],
open_play_crosses=player_deep_cumul["open_play_crosses"],
big_chances_created=player_deep_cumul["big_chances_created"],
clearances_blocks_interceptions=player_deep_cumul["clearances_blocks_interceptions"],
recoveries=player_deep_cumul["recoveries"],
key_passes=player_deep_cumul["key_passes"],
tackles=player_deep_cumul["tackles"],
winning_goals=player_deep_cumul["winning_goals"],
attempted_passes=player_deep_cumul["attempted_passes"],
completed_passes=player_deep_cumul["completed_passes"],
penalties_conceded=player_deep_cumul["penalties_conceded"],
big_chances_missed=player_deep_cumul["big_chances_missed"],
tackled=player_deep_cumul["tackled"],
offside=player_deep_cumul["offside"],
target_missed=player_deep_cumul["target_missed"],
fouls=player_deep_cumul["fouls"],
dribbles=player_deep_cumul["dribbles"],
points_history = _points_history,
ict_history = _ict_history)
player_detailed.save()
p = FPL_Config.objects.get(id=1)
p.bg_active = False
p.last_updated = datetime.now()
p.save()
return | mdameenh/elysia | fplassist/update_database.py | Python | bsd-3-clause | 10,609 | 0.010651 |
# -*- coding: utf-8 -*-
#
# RADICAL-Pilot documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 3 21:55:42 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import glob
import imp
import sys
import os
import radical.utils as ru
import pprint
import subprocess as sp
script_dir = os.path.dirname(os.path.realpath(__file__))
################################################################################
cmd = "git branch | grep '*' | cut -f 2 -d \ " \
+ " | sed -e 's/readthedocs.tutorial/tutorial/g' " \
+ " | sed -e 's/readthedocs/release/g'"
mytag = sp.Popen(cmd, shell=True, stdout=sp.PIPE).stdout.read().strip()
if 'detached' in mytag :
cmd = "git branch | grep '*' | cut -f 2 -d '/' | cut -f 1 -d ')'" \
+ " | sed -e 's/readthedocs.tutorial/tutorial/g' " \
+ " | sed -e 's/readthedocs/release/g'"
mytag = sp.Popen(cmd, shell=True, stdout=sp.PIPE).stdout.read().strip()
tags.add (mytag)
################################################################################
##
print "* Generating resource configuration docs: resources.rst"
print "* using tag: %s" % mytag
try:
os.remove("{0}/resources.rst".format(script_dir))
except OSError:
pass
with open("{0}/resources.rst".format(script_dir), "w") as resources_rst:
resources_rst.write("""
.. _chapter_resources:
List of Pre-Configured Resources
================================
""")
configs = os.listdir("{0}/../../src/radical/pilot/configs/".format(script_dir))
for config in configs:
if config.endswith(".json") is False:
continue # skip all non-python files
if config.startswith("aliases") is True:
continue # skip alias files
print " * %s" % config
try:
json_data = ru.read_json_str("../../src/radical/pilot/configs/%s" % config)
except Exception, ex:
print " * JSON PARSING ERROR: %s" % str(ex)
continue
resources_rst.write("{0}\n".format(config[:-5].upper()))
resources_rst.write("{0}\n\n".format("="*len(config[:-5])))
for host_key, resource_config in json_data.iteritems():
resource_key = "%s.%s" % (config[:-5], host_key)
print " * %s" % resource_key
try:
default_queue = resource_config["default_queue"]
except Exception, ex:
default_queue = None
try:
working_dir = resource_config["default_remote_workdir"]
except Exception, ex:
working_dir = "$HOME"
try:
python_interpreter = resource_config["python_interpreter"]
except Exception, ex:
python_interpreter = None
try:
access_schemas = resource_config["schemas"]
except Exception, ex:
access_schemas = ['n/a']
resources_rst.write("{0}\n".format(host_key.upper()))
resources_rst.write("{0}\n\n".format("*"*len(host_key)))
resources_rst.write("{0}\n\n".format(resource_config["description"]))
resources_rst.write("* **Resource label** : ``{0}``\n".format(resource_key))
resources_rst.write("* **Raw config** : :download:`{0} <../../src/radical/pilot/configs/{0}>`\n".format(config))
if resource_config["notes"] != "None":
resources_rst.write("* **Note** : {0}\n".format(resource_config["notes"]))
resources_rst.write("* **Default values** for ComputePilotDescription attributes:\n\n")
resources_rst.write(" * ``queue : {0}``\n".format(default_queue))
resources_rst.write(" * ``sandbox : {0}``\n".format(working_dir))
resources_rst.write(" * ``access_schema : {0}``\n\n".format(access_schemas[0]))
resources_rst.write("* **Available schemas** : ``{0}``\n".format(', '.join(access_schemas)))
resources_rst.write("\n")
##
################################################################################
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../src/'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.extlinks']
[extensions]
todo_include_todos=True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
rst_epilog = """
"""
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'RADICAL-Pilot'
copyright = u'2014, The RADICAL Group at Rutgers University'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
try:
import radical.pilot
version = radical.pilot.version
release = radical.pilot.version
except Exception as e:
print 'Could not determine version: %s' % e
version = "UNKNOWN"
release = "UNKNOWN"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_themes"]
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
extlinks = {'issue': ('https://github.com/radical-cybertools/radical.pilot/issues/%s',
'issue ')}
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
html_theme = "armstrong"
html_theme_path = ["_themes", ]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"collapsiblesidebar" : "true",
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = 'images/logo.jpg'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'radical.pilot.doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'RADICAL-Pilot.tex', u'RADICAL-Pilot Documentation',
u'The RADICAL Group at Rutgers University', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = 'images/logo.jpg'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
latex_show_pagerefs = True
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'radical.pilot', u'RADICAL-Pilot Documentation',
[u'The RADICAL Group at Rutgers University'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'RADICAL-Pilot', u'RADICAL-Pilot Documentation',
u'The RADICAL Group at Rutgers University', 'RADICAL-Pilot', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members'] #, 'undoc-members', 'show-inheritance']
| JensTimmerman/radical.pilot | docs/source/conf.py | Python | mit | 12,807 | 0.006637 |
import _plotly_utils.basevalidators
class CliponaxisValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(self, plotly_name="cliponaxis", parent_name="funnel", **kwargs):
super(CliponaxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "info"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/funnel/_cliponaxis.py | Python | mit | 450 | 0.002222 |
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) Philippe Biondi <phil@secdev.org>
## Copyright (C) 6WIND <olivier.matz@6wind.com>
## This program is published under a GPLv2 license
"""
SCTP (Stream Control Transmission Protocol).
"""
import struct
from scapy.packet import *
from scapy.fields import *
from scapy.layers.inet import IP
from scapy.layers.inet6 import IP6Field
from scapy.layers.inet6 import IPv6
IPPROTO_SCTP=132
# crc32-c (Castagnoli) (crc32c_poly=0x1EDC6F41)
crc32c_table = [
0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4,
0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB,
0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B,
0x4D43CFD0, 0xBF284CD3, 0xAC78BF27, 0x5E133C24,
0x105EC76F, 0xE235446C, 0xF165B798, 0x030E349B,
0xD7C45070, 0x25AFD373, 0x36FF2087, 0xC494A384,
0x9A879FA0, 0x68EC1CA3, 0x7BBCEF57, 0x89D76C54,
0x5D1D08BF, 0xAF768BBC, 0xBC267848, 0x4E4DFB4B,
0x20BD8EDE, 0xD2D60DDD, 0xC186FE29, 0x33ED7D2A,
0xE72719C1, 0x154C9AC2, 0x061C6936, 0xF477EA35,
0xAA64D611, 0x580F5512, 0x4B5FA6E6, 0xB93425E5,
0x6DFE410E, 0x9F95C20D, 0x8CC531F9, 0x7EAEB2FA,
0x30E349B1, 0xC288CAB2, 0xD1D83946, 0x23B3BA45,
0xF779DEAE, 0x05125DAD, 0x1642AE59, 0xE4292D5A,
0xBA3A117E, 0x4851927D, 0x5B016189, 0xA96AE28A,
0x7DA08661, 0x8FCB0562, 0x9C9BF696, 0x6EF07595,
0x417B1DBC, 0xB3109EBF, 0xA0406D4B, 0x522BEE48,
0x86E18AA3, 0x748A09A0, 0x67DAFA54, 0x95B17957,
0xCBA24573, 0x39C9C670, 0x2A993584, 0xD8F2B687,
0x0C38D26C, 0xFE53516F, 0xED03A29B, 0x1F682198,
0x5125DAD3, 0xA34E59D0, 0xB01EAA24, 0x42752927,
0x96BF4DCC, 0x64D4CECF, 0x77843D3B, 0x85EFBE38,
0xDBFC821C, 0x2997011F, 0x3AC7F2EB, 0xC8AC71E8,
0x1C661503, 0xEE0D9600, 0xFD5D65F4, 0x0F36E6F7,
0x61C69362, 0x93AD1061, 0x80FDE395, 0x72966096,
0xA65C047D, 0x5437877E, 0x4767748A, 0xB50CF789,
0xEB1FCBAD, 0x197448AE, 0x0A24BB5A, 0xF84F3859,
0x2C855CB2, 0xDEEEDFB1, 0xCDBE2C45, 0x3FD5AF46,
0x7198540D, 0x83F3D70E, 0x90A324FA, 0x62C8A7F9,
0xB602C312, 0x44694011, 0x5739B3E5, 0xA55230E6,
0xFB410CC2, 0x092A8FC1, 0x1A7A7C35, 0xE811FF36,
0x3CDB9BDD, 0xCEB018DE, 0xDDE0EB2A, 0x2F8B6829,
0x82F63B78, 0x709DB87B, 0x63CD4B8F, 0x91A6C88C,
0x456CAC67, 0xB7072F64, 0xA457DC90, 0x563C5F93,
0x082F63B7, 0xFA44E0B4, 0xE9141340, 0x1B7F9043,
0xCFB5F4A8, 0x3DDE77AB, 0x2E8E845F, 0xDCE5075C,
0x92A8FC17, 0x60C37F14, 0x73938CE0, 0x81F80FE3,
0x55326B08, 0xA759E80B, 0xB4091BFF, 0x466298FC,
0x1871A4D8, 0xEA1A27DB, 0xF94AD42F, 0x0B21572C,
0xDFEB33C7, 0x2D80B0C4, 0x3ED04330, 0xCCBBC033,
0xA24BB5A6, 0x502036A5, 0x4370C551, 0xB11B4652,
0x65D122B9, 0x97BAA1BA, 0x84EA524E, 0x7681D14D,
0x2892ED69, 0xDAF96E6A, 0xC9A99D9E, 0x3BC21E9D,
0xEF087A76, 0x1D63F975, 0x0E330A81, 0xFC588982,
0xB21572C9, 0x407EF1CA, 0x532E023E, 0xA145813D,
0x758FE5D6, 0x87E466D5, 0x94B49521, 0x66DF1622,
0x38CC2A06, 0xCAA7A905, 0xD9F75AF1, 0x2B9CD9F2,
0xFF56BD19, 0x0D3D3E1A, 0x1E6DCDEE, 0xEC064EED,
0xC38D26C4, 0x31E6A5C7, 0x22B65633, 0xD0DDD530,
0x0417B1DB, 0xF67C32D8, 0xE52CC12C, 0x1747422F,
0x49547E0B, 0xBB3FFD08, 0xA86F0EFC, 0x5A048DFF,
0x8ECEE914, 0x7CA56A17, 0x6FF599E3, 0x9D9E1AE0,
0xD3D3E1AB, 0x21B862A8, 0x32E8915C, 0xC083125F,
0x144976B4, 0xE622F5B7, 0xF5720643, 0x07198540,
0x590AB964, 0xAB613A67, 0xB831C993, 0x4A5A4A90,
0x9E902E7B, 0x6CFBAD78, 0x7FAB5E8C, 0x8DC0DD8F,
0xE330A81A, 0x115B2B19, 0x020BD8ED, 0xF0605BEE,
0x24AA3F05, 0xD6C1BC06, 0xC5914FF2, 0x37FACCF1,
0x69E9F0D5, 0x9B8273D6, 0x88D28022, 0x7AB90321,
0xAE7367CA, 0x5C18E4C9, 0x4F48173D, 0xBD23943E,
0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81,
0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E,
0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E,
0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351,
]
def crc32c(buf):
crc = 0xffffffff
for c in buf:
crc = (crc>>8) ^ crc32c_table[(crc^(ord(c))) & 0xFF]
crc = (~crc) & 0xffffffff
# reverse endianness
return struct.unpack(">I",struct.pack("<I", crc))[0]
# old checksum (RFC2960)
"""
BASE = 65521 # largest prime smaller than 65536
def update_adler32(adler, buf):
s1 = adler & 0xffff
s2 = (adler >> 16) & 0xffff
print s1,s2
for c in buf:
print ord(c)
s1 = (s1 + ord(c)) % BASE
s2 = (s2 + s1) % BASE
print s1,s2
return (s2 << 16) + s1
def sctp_checksum(buf):
return update_adler32(1, buf)
"""
sctpchunktypescls = {
0 : "SCTPChunkData",
1 : "SCTPChunkInit",
2 : "SCTPChunkInitAck",
3 : "SCTPChunkSACK",
4 : "SCTPChunkHeartbeatReq",
5 : "SCTPChunkHeartbeatAck",
6 : "SCTPChunkAbort",
7 : "SCTPChunkShutdown",
8 : "SCTPChunkShutdownAck",
9 : "SCTPChunkError",
10 : "SCTPChunkCookieEcho",
11 : "SCTPChunkCookieAck",
14 : "SCTPChunkShutdownComplete",
}
sctpchunktypes = {
0 : "data",
1 : "init",
2 : "init-ack",
3 : "sack",
4 : "heartbeat-req",
5 : "heartbeat-ack",
6 : "abort",
7 : "shutdown",
8 : "shutdown-ack",
9 : "error",
10 : "cookie-echo",
11 : "cookie-ack",
14 : "shutdown-complete",
}
sctpchunkparamtypescls = {
1 : "SCTPChunkParamHearbeatInfo",
5 : "SCTPChunkParamIPv4Addr",
6 : "SCTPChunkParamIPv6Addr",
7 : "SCTPChunkParamStateCookie",
8 : "SCTPChunkParamUnrocognizedParam",
9 : "SCTPChunkParamCookiePreservative",
11 : "SCTPChunkParamHostname",
12 : "SCTPChunkParamSupportedAddrTypes",
32768 : "SCTPChunkParamECNCapable",
49152 : "SCTPChunkParamFwdTSN",
49158 : "SCTPChunkParamAdaptationLayer",
}
sctpchunkparamtypes = {
1 : "heartbeat-info",
5 : "IPv4",
6 : "IPv6",
7 : "state-cookie",
8 : "unrecognized-param",
9 : "cookie-preservative",
11 : "hostname",
12 : "addrtypes",
32768 : "ecn-capable",
49152 : "fwd-tsn-supported",
49158 : "adaptation-layer",
}
############## SCTP header
# Dummy class to guess payload type (variable parameters)
class _SCTPChunkGuessPayload:
def default_payload_class(self,p):
if len(p) < 4:
return conf.padding_layer
else:
t = ord(p[0])
return globals().get(sctpchunktypescls.get(t, "Raw"), conf.raw_layer)
class SCTP(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ShortField("sport", None),
ShortField("dport", None),
XIntField("tag", None),
XIntField("chksum", None), ]
def answers(self, other):
if not isinstance(other, SCTP):
return 0
if conf.checkIPsrc:
if not ((self.sport == other.dport) and
(self.dport == other.sport)):
return 0
return 1
def post_build(self, p, pay):
p += pay
if self.chksum is None:
crc = crc32c(str(p))
p = p[:8]+struct.pack(">I", crc)+p[12:]
return p
############## SCTP Chunk variable params
class ChunkParamField(PacketListField):
def __init__(self, name, default, count_from=None, length_from=None):
PacketListField.__init__(self, name, default, conf.raw_layer, count_from=count_from, length_from=length_from)
def m2i(self, p, m):
cls = conf.raw_layer
if len(m) >= 4:
t = ord(m[0]) * 256 + ord(m[1])
cls = globals().get(sctpchunkparamtypescls.get(t, "Raw"), conf.raw_layer)
return cls(m)
# dummy class to avoid Raw() after Chunk params
class _SCTPChunkParam:
def extract_padding(self, s):
return "",s[:]
class SCTPChunkParamHearbeatInfo(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 1, sctpchunkparamtypes),
FieldLenField("len", None, length_of="data",
adjust = lambda pkt,x:x+4),
PadField(StrLenField("data", "",
length_from=lambda pkt: pkt.len-4),
4, padwith="\x00"),]
class SCTPChunkParamIPv4Addr(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 5, sctpchunkparamtypes),
ShortField("len", 8),
IPField("addr","127.0.0.1"), ]
class SCTPChunkParamIPv6Addr(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 6, sctpchunkparamtypes),
ShortField("len", 20),
IP6Field("addr","::1"), ]
class SCTPChunkParamStateCookie(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 7, sctpchunkparamtypes),
FieldLenField("len", None, length_of="cookie",
adjust = lambda pkt,x:x+4),
PadField(StrLenField("cookie", "",
length_from=lambda pkt: pkt.len-4),
4, padwith="\x00"),]
class SCTPChunkParamUnrocognizedParam(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 8, sctpchunkparamtypes),
FieldLenField("len", None, length_of="param",
adjust = lambda pkt,x:x+4),
PadField(StrLenField("param", "",
length_from=lambda pkt: pkt.len-4),
4, padwith="\x00"),]
class SCTPChunkParamCookiePreservative(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 9, sctpchunkparamtypes),
ShortField("len", 8),
XIntField("sug_cookie_inc", None), ]
class SCTPChunkParamHostname(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 11, sctpchunkparamtypes),
FieldLenField("len", None, length_of="hostname",
adjust = lambda pkt,x:x+4),
PadField(StrLenField("hostname", "",
length_from=lambda pkt: pkt.len-4),
4, padwith="\x00"), ]
class SCTPChunkParamSupportedAddrTypes(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 12, sctpchunkparamtypes),
FieldLenField("len", None, length_of="addr_type_list",
adjust = lambda pkt,x:x+4),
PadField(FieldListField("addr_type_list", [ "IPv4" ],
ShortEnumField("addr_type", 5, sctpchunkparamtypes),
length_from=lambda pkt: pkt.len-4),
4, padwith="\x00"), ]
class SCTPChunkParamECNCapable(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 32768, sctpchunkparamtypes),
ShortField("len", 4), ]
class SCTPChunkParamFwdTSN(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 49152, sctpchunkparamtypes),
ShortField("len", 4), ]
class SCTPChunkParamAdaptationLayer(_SCTPChunkParam, Packet):
fields_desc = [ ShortEnumField("type", 49158, sctpchunkparamtypes),
ShortField("len", 8),
XIntField("indication", None), ]
############## SCTP Chunks
class SCTPChunkData(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 0, sctpchunktypes),
BitField("reserved", None, 4),
BitField("delay_sack", 0, 1),
BitField("unordered", 0, 1),
BitField("beginning", 0, 1),
BitField("ending", 0, 1),
FieldLenField("len", None, length_of="data", adjust = lambda pkt,x:x+16),
XIntField("tsn", None),
XShortField("stream_id", None),
XShortField("stream_seq", None),
XIntField("proto_id", None),
PadField(StrLenField("data", None, length_from=lambda pkt: pkt.len-16),
4, padwith="\x00"),
]
class SCTPChunkInit(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 1, sctpchunktypes),
XByteField("flags", None),
FieldLenField("len", None, length_of="params", adjust = lambda pkt,x:x+20),
XIntField("init_tag", None),
IntField("a_rwnd", None),
ShortField("n_out_streams", None),
ShortField("n_in_streams", None),
XIntField("init_tsn", None),
ChunkParamField("params", None, length_from=lambda pkt:pkt.len-20),
]
class SCTPChunkInitAck(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 2, sctpchunktypes),
XByteField("flags", None),
FieldLenField("len", None, length_of="params", adjust = lambda pkt,x:x+20),
XIntField("init_tag", None),
IntField("a_rwnd", None),
ShortField("n_out_streams", None),
ShortField("n_in_streams", None),
XIntField("init_tsn", None),
ChunkParamField("params", None, length_from=lambda pkt:pkt.len-20),
]
class GapAckField(Field):
def __init__(self, name, default):
Field.__init__(self, name, default, "4s")
def i2m(self, pkt, x):
if x is None:
return "\0\0\0\0"
sta, end = map(int, x.split(":"))
args = tuple([">HH", sta, end])
return struct.pack(*args)
def m2i(self, pkt, x):
return "%d:%d"%(struct.unpack(">HH", x))
def any2i(self, pkt, x):
if type(x) is tuple and len(x) == 2:
return "%d:%d"%(x)
return x
class SCTPChunkSACK(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 3, sctpchunktypes),
XByteField("flags", None),
ShortField("len", None),
XIntField("cumul_tsn_ack", None),
IntField("a_rwnd", None),
FieldLenField("n_gap_ack", None, count_of="gap_ack_list"),
FieldLenField("n_dup_tsn", None, count_of="dup_tsn_list"),
FieldListField("gap_ack_list", [ ], GapAckField("gap_ack", None), count_from=lambda pkt:pkt.n_gap_ack),
FieldListField("dup_tsn_list", [ ], XIntField("dup_tsn", None), count_from=lambda pkt:pkt.n_dup_tsn),
]
def post_build(self, p, pay):
if self.len is None:
p = p[:2] + struct.pack(">H", len(p)) + p[4:]
return p+pay
class SCTPChunkHeartbeatReq(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 4, sctpchunktypes),
XByteField("flags", None),
FieldLenField("len", None, length_of="params", adjust = lambda pkt,x:x+4),
ChunkParamField("params", None, length_from=lambda pkt:pkt.len-4),
]
class SCTPChunkHeartbeatAck(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 5, sctpchunktypes),
XByteField("flags", None),
FieldLenField("len", None, length_of="params", adjust = lambda pkt,x:x+4),
ChunkParamField("params", None, length_from=lambda pkt:pkt.len-4),
]
class SCTPChunkAbort(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 6, sctpchunktypes),
BitField("reserved", None, 7),
BitField("TCB", 0, 1),
FieldLenField("len", None, length_of="error_causes", adjust = lambda pkt,x:x+4),
PadField(StrLenField("error_causes", "", length_from=lambda pkt: pkt.len-4),
4, padwith="\x00"),
]
class SCTPChunkShutdown(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 7, sctpchunktypes),
XByteField("flags", None),
ShortField("len", 8),
XIntField("cumul_tsn_ack", None),
]
class SCTPChunkShutdownAck(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 8, sctpchunktypes),
XByteField("flags", None),
ShortField("len", 4),
]
class SCTPChunkError(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 9, sctpchunktypes),
XByteField("flags", None),
FieldLenField("len", None, length_of="error_causes", adjust = lambda pkt,x:x+4),
PadField(StrLenField("error_causes", "", length_from=lambda pkt: pkt.len-4),
4, padwith="\x00"),
]
class SCTPChunkCookieEcho(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 10, sctpchunktypes),
XByteField("flags", None),
FieldLenField("len", None, length_of="cookie", adjust = lambda pkt,x:x+4),
PadField(StrLenField("cookie", "", length_from=lambda pkt: pkt.len-4),
4, padwith="\x00"),
]
class SCTPChunkCookieAck(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 11, sctpchunktypes),
XByteField("flags", None),
ShortField("len", 4),
]
class SCTPChunkShutdownComplete(_SCTPChunkGuessPayload, Packet):
fields_desc = [ ByteEnumField("type", 12, sctpchunktypes),
BitField("reserved", None, 7),
BitField("TCB", 0, 1),
ShortField("len", 4),
]
bind_layers( IP, SCTP, proto=IPPROTO_SCTP)
bind_layers( IPv6, SCTP, nh=IPPROTO_SCTP)
| mytliulei/Scapy | scapy/layers/sctp.py | Python | apache-2.0 | 17,954 | 0.011474 |
"""Module to debug python programs"""
import sys
import traceback
def getAllStacks():
code = []
for threadId, stack in sys._current_frames().iteritems():
code.append("\n# ThreadID: %s" % threadId)
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename,
lineno, name))
if line:
code.append(" %s" % (line.strip()))
return code
def strStacks():
out = "\n*** STACKTRACE - START ***\n"
out += "\n".join(getAllStacks())
out += "\n*** STACKTRACE - END ***\n"
return out
| netixx/python-tools | tools/debugger.py | Python | apache-2.0 | 674 | 0.004451 |
from panda3d.core import TrueClock
from direct.directnotify.DirectNotifyGlobal import directNotify
from direct.showbase.PythonUtil import (
StdoutCapture, _installProfileCustomFuncs,_removeProfileCustomFuncs,
_getProfileResultFileInfo, _setProfileResultsFileInfo)
import __builtin__
import profile
import pstats
class PercentStats(pstats.Stats):
# prints more useful output when sampled durations are shorter than a millisecond
# lots of this is copied from Python's pstats.py
def setTotalTime(self, tt):
# use this to set 'total time' to base time percentages on
# allows profiles to show timing based on percentages of duration of another profile
self._totalTime = tt
def add(self, *args, **kArgs):
pstats.Stats.add(self, *args, **kArgs)
# DCR -- don't need to record filenames
self.files = []
def print_stats(self, *amount):
for filename in self.files:
print filename
if self.files: print
indent = ' ' * 8
for func in self.top_level:
print indent, func_get_function_name(func)
print indent, self.total_calls, "function calls",
if self.total_calls != self.prim_calls:
print "(%d primitive calls)" % self.prim_calls,
# DCR
#print "in %.3f CPU seconds" % self.total_tt
print "in %s CPU milliseconds" % (self.total_tt * 1000.)
if self._totalTime != self.total_tt:
print indent, 'percentages are of %s CPU milliseconds' % (self._totalTime * 1000)
print
width, list = self.get_print_list(amount)
if list:
self.print_title()
for func in list:
self.print_line(func)
print
# DCR
#print
return self
def f8(self, x):
if self._totalTime == 0.:
# profiling was too quick for clock resolution...
return ' Inf%'
return "%7.2f%%" % ((x*100.) / self._totalTime)
@staticmethod
def func_std_string(func_name): # match what old profile produced
return "%s:%d(%s)" % func_name
def print_line(self, func):
cc, nc, tt, ct, callers = self.stats[func]
c = str(nc)
# DCR
f8 = self.f8
if nc != cc:
c = c + '/' + str(cc)
print c.rjust(9),
print f8(tt),
if nc == 0:
print ' '*8,
else:
print f8(tt/nc),
print f8(ct),
if cc == 0:
print ' '*8,
else:
print f8(ct/cc),
# DCR
#print func_std_string(func)
print PercentStats.func_std_string(func)
class ProfileSession:
# class that encapsulates a profile of a single callable using Python's standard
# 'profile' module
#
# defers formatting of profile results until they are requested
#
# implementation sidesteps memory leak in Python profile module,
# and redirects file output to RAM file for efficiency
TrueClock = TrueClock.getGlobalPtr()
notify = directNotify.newCategory("ProfileSession")
def __init__(self, name, func=None, logAfterProfile=False):
self._func = func
self._name = name
self._logAfterProfile = logAfterProfile
self._filenameBase = 'profileData-%s-%s' % (self._name, id(self))
self._refCount = 0
# if true, accumulate profile results every time we run
# if false, throw out old results every time we run
self._aggregate = False
self._lines = 500
self._sorts = ['cumulative', 'time', 'calls']
self._callInfo = True
self._totalTime = None
self._reset()
self.acquire()
def getReference(self):
# call this when you want to store a new reference to this session that will
# manage its acquire/release reference count independently of an existing reference
self.acquire()
return self
def acquire(self):
self._refCount += 1
def release(self):
self._refCount -= 1
if not self._refCount:
self._destroy()
def _destroy(self):
del self._func
del self._name
del self._filenameBase
del self._filenameCounter
del self._filenames
del self._duration
del self._filename2ramFile
del self._resultCache
del self._successfulProfiles
def _reset(self):
self._filenameCounter = 0
self._filenames = []
# index of next file to be added to stats object
self._statFileCounter = 0
self._successfulProfiles = 0
self._duration = None
self._filename2ramFile = {}
self._stats = None
self._resultCache = {}
def _getNextFilename(self):
filename = '%s-%s' % (self._filenameBase, self._filenameCounter)
self._filenameCounter += 1
return filename
def run(self):
# make sure this instance doesn't get destroyed inside self._func
self.acquire()
if not self._aggregate:
self._reset()
# if we're already profiling, just run the func and don't profile
if 'globalProfileSessionFunc' in __builtin__.__dict__:
self.notify.warning('could not profile %s' % self._func)
result = self._func()
if self._duration is None:
self._duration = 0.
else:
# put the function in the global namespace so that profile can find it
assert hasattr(self._func, '__call__')
__builtin__.globalProfileSessionFunc = self._func
__builtin__.globalProfileSessionResult = [None]
# set up the RAM file
self._filenames.append(self._getNextFilename())
filename = self._filenames[-1]
_installProfileCustomFuncs(filename)
# do the profiling
Profile = profile.Profile
statement = 'globalProfileSessionResult[0]=globalProfileSessionFunc()'
sort = -1
retVal = None
# this is based on profile.run, the code is replicated here to allow us to
# eliminate a memory leak
prof = Profile()
try:
prof = prof.run(statement)
except SystemExit:
pass
# this has to be run immediately after profiling for the timings to be accurate
# tell the Profile object to generate output to the RAM file
prof.dump_stats(filename)
# eliminate the memory leak
del prof.dispatcher
# store the RAM file for later
profData = _getProfileResultFileInfo(filename)
self._filename2ramFile[filename] = profData
# calculate the duration (this is dependent on the internal Python profile data format.
# see profile.py and pstats.py, this was copied from pstats.Stats.strip_dirs)
maxTime = 0.
for cc, nc, tt, ct, callers in profData[1].itervalues():
if ct > maxTime:
maxTime = ct
self._duration = maxTime
# clean up the RAM file support
_removeProfileCustomFuncs(filename)
# clean up the globals
result = globalProfileSessionResult[0]
del __builtin__.__dict__['globalProfileSessionFunc']
del __builtin__.__dict__['globalProfileSessionResult']
self._successfulProfiles += 1
if self._logAfterProfile:
self.notify.info(self.getResults())
self.release()
return result
def getDuration(self):
return self._duration
def profileSucceeded(self):
return self._successfulProfiles > 0
def _restoreRamFile(self, filename):
# set up the RAM file
_installProfileCustomFuncs(filename)
# install the stored RAM file from self.run()
_setProfileResultsFileInfo(filename, self._filename2ramFile[filename])
def _discardRamFile(self, filename):
# take down the RAM file
_removeProfileCustomFuncs(filename)
# and discard it
del self._filename2ramFile[filename]
def setName(self, name):
self._name = name
def getName(self):
return self._name
def setFunc(self, func):
self._func = func
def getFunc(self):
return self._func
def setAggregate(self, aggregate):
self._aggregate = aggregate
def getAggregate(self):
return self._aggregate
def setLogAfterProfile(self, logAfterProfile):
self._logAfterProfile = logAfterProfile
def getLogAfterProfile(self):
return self._logAfterProfile
def setLines(self, lines):
self._lines = lines
def getLines(self):
return self._lines
def setSorts(self, sorts):
self._sorts = sorts
def getSorts(self):
return self._sorts
def setShowCallInfo(self, showCallInfo):
self._showCallInfo = showCallInfo
def getShowCallInfo(self):
return self._showCallInfo
def setTotalTime(self, totalTime=None):
self._totalTime = totalTime
def resetTotalTime(self):
self._totalTime = None
def getTotalTime(self):
return self._totalTime
def aggregate(self, other):
# pull in stats from another ProfileSession
other._compileStats()
self._compileStats()
self._stats.add(other._stats)
def _compileStats(self):
# make sure our stats object exists and is up-to-date
statsChanged = (self._statFileCounter < len(self._filenames))
if self._stats is None:
for filename in self._filenames:
self._restoreRamFile(filename)
self._stats = PercentStats(*self._filenames)
self._statFileCounter = len(self._filenames)
for filename in self._filenames:
self._discardRamFile(filename)
else:
while self._statFileCounter < len(self._filenames):
filename = self._filenames[self._statFileCounter]
self._restoreRamFile(filename)
self._stats.add(filename)
self._discardRamFile(filename)
if statsChanged:
self._stats.strip_dirs()
# throw out any cached result strings
self._resultCache = {}
return statsChanged
def getResults(self,
lines=Default,
sorts=Default,
callInfo=Default,
totalTime=Default):
if not self.profileSucceeded():
output = '%s: profiler already running, could not profile' % self._name
else:
if lines is Default:
lines = self._lines
if sorts is Default:
sorts = self._sorts
if callInfo is Default:
callInfo = self._callInfo
if totalTime is Default:
totalTime = self._totalTime
self._compileStats()
if totalTime is None:
totalTime = self._stats.total_tt
# make sure the arguments will hash efficiently if callers provide different types
lines = int(lines)
sorts = list(sorts)
callInfo = bool(callInfo)
totalTime = float(totalTime)
k = str((lines, sorts, callInfo, totalTime))
if k in self._resultCache:
# we've already created this output string, get it from the cache
output = self._resultCache[k]
else:
# now get human-readable output from the profile stats
# capture print output to a string
sc = StdoutCapture()
# print the info to stdout
s = self._stats
# make sure our percentages are relative to the correct total time
s.setTotalTime(totalTime)
for sort in sorts:
s.sort_stats(sort)
s.print_stats(lines)
if callInfo:
s.print_callees(lines)
s.print_callers(lines)
# make a copy of the print output
output = sc.getString()
# restore stdout to what it was before
sc.destroy()
# cache this result
self._resultCache[k] = output
return output
| mgracer48/panda3d | direct/src/showbase/ProfileSession.py | Python | bsd-3-clause | 12,549 | 0.002789 |
'''
Mac OS X file chooser
---------------------
'''
from plyer.facades import FileChooser
from pyobjus import autoclass, objc_arr, objc_str
from pyobjus.dylib_manager import load_framework, INCLUDE
load_framework(INCLUDE.AppKit)
NSURL = autoclass('NSURL')
NSOpenPanel = autoclass('NSOpenPanel')
NSSavePanel = autoclass('NSSavePanel')
NSOKButton = 1
class MacFileChooser(object):
'''A native implementation of file chooser dialogs using Apple's API
through pyobjus.
Not implemented features:
* filters (partial, wildcards are converted to extensions if possible.
Pass the Mac-specific "use_extensions" if you can provide
Mac OS X-compatible to avoid automatic conversion)
* multiple (only for save dialog. Available in open dialog)
* icon
* preview
'''
mode = "open"
path = None
multiple = False
filters = []
preview = False
title = None
icon = None
show_hidden = False
use_extensions = False
def __init__(self, **kwargs):
# Simulate Kivy's behavior
for i in kwargs:
setattr(self, i, kwargs[i])
def run(self):
panel = None
if self.mode in ("open", "dir"):
panel = NSOpenPanel.openPanel()
else:
panel = NSSavePanel.savePanel()
panel.setCanCreateDirectories_(True)
panel.setCanChooseDirectories_(self.mode == "dir")
panel.setCanChooseFiles_(self.mode != "dir")
panel.setShowsHiddenFiles_(self.show_hidden)
if self.title:
panel.setTitle_(objc_str(self.title))
if self.mode != "save" and self.multiple:
panel.setAllowsMultipleSelection_(True)
# Mac OS X does not support wildcards unlike the other platforms.
# This tries to convert wildcards to "extensions" when possible,
# ans sets the panel to also allow other file types, just to be safe.
if len(self.filters) > 0:
filthies = []
for f in self.filters:
if type(f) == str:
if not self.use_extensions:
if f.strip().endswith("*"):
continue
pystr = f.strip().split("*")[-1].split(".")[-1]
filthies.append(objc_str(pystr))
else:
for _ in f[1:]:
if not self.use_extensions:
if f.strip().endswith("*"):
continue
pystr = f.strip().split("*")[-1].split(".")[-1]
filthies.append(objc_str(pystr))
ftypes_arr = objc_arr(filthies)
panel.setAllowedFileTypes_(ftypes_arr)
panel.setAllowsOtherFileTypes_(not self.use_extensions)
if self.path:
url = NSURL.fileURLWithPath_(self.path)
panel.setDirectoryURL_(url)
if panel.runModal():
if self.mode == "save" or not self.multiple:
return [panel.filename().UTF8String()]
else:
return [i.UTF8String() for i in panel.filenames()]
return None
class MacOSXFileChooser(FileChooser):
'''FileChooser implementation for Windows, using win3all.
'''
def _file_selection_dialog(self, **kwargs):
return MacFileChooser(**kwargs).run()
def instance():
return MacOSXFileChooser()
| KeyWeeUsr/plyer | plyer/platforms/macosx/filechooser.py | Python | mit | 3,427 | 0 |
#!/usr/bin/python
################################################################################
##3456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789
## 10 20 30 40 50 60 70 80
##
## Info:
## Example of how to use libnotify correctly and at the same time comply to
## the new jaunty notification spec (read: visual guidelines)
##
## Run:
## chmod +x summary-body.py
## ./summary-body.py
##
## Copyright 2009 Canonical Ltd.
##
## Author:
## Mirco "MacSlow" Mueller <mirco.mueller@canonical.com>
##
## This program is free software: you can redistribute it and/or modify it
## under the terms of the GNU General Public License version 3, as published
## by the Free Software Foundation.
##
## This program is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranties of
## MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
## PURPOSE. See the GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License along
## with this program. If not, see <http://www.gnu.org/licenses/>.
##
################################################################################
import sys
import pynotify
# even in Python this is globally nasty :), do something nicer in your own code
capabilities = {'actions': False,
'body': False,
'body-hyperlinks': False,
'body-images': False,
'body-markup': False,
'icon-multi': False,
'icon-static': False,
'sound': False,
'image/svg+xml': False,
'x-canonical-private-synchronous': False,
'x-canonical-append': False,
'x-canonical-private-icon-only': False,
'x-canonical-truncation': False}
def initCaps ():
caps = pynotify.get_server_caps ()
if caps is None:
print "Failed to receive server caps."
sys.exit (1)
for cap in caps:
capabilities[cap] = True
def printCaps ():
info = pynotify.get_server_info ()
print "Name: " + info["name"]
print "Vendor: " + info["vendor"]
print "Version: " + info["version"]
print "Spec. Version: " + info["spec-version"]
caps = pynotify.get_server_caps ()
if caps is None:
print "Failed to receive server caps."
sys.exit (1)
print "Supported capabilities/hints:"
if capabilities['actions']:
print "\tactions"
if capabilities['body']:
print "\tbody"
if capabilities['body-hyperlinks']:
print "\tbody-hyperlinks"
if capabilities['body-images']:
print "\tbody-images"
if capabilities['body-markup']:
print "\tbody-markup"
if capabilities['icon-multi']:
print "\ticon-multi"
if capabilities['icon-static']:
print "\ticon-static"
if capabilities['sound']:
print "\tsound"
if capabilities['image/svg+xml']:
print "\timage/svg+xml"
if capabilities['x-canonical-private-synchronous']:
print "\tx-canonical-private-synchronous"
if capabilities['x-canonical-append']:
print "\tx-canonical-append"
if capabilities['x-canonical-private-icon-only']:
print "\tx-canonical-private-icon-only"
if capabilities['x-canonical-truncation']:
print "\tx-canonical-truncation"
print "Notes:"
if info["name"] == "notify-osd":
print "\tx- and y-coordinates hints are ignored"
print "\texpire-timeout is ignored"
print "\tbody-markup is accepted but filtered"
else:
print "\tnone"
if __name__ == '__main__':
if not pynotify.init ("summary-body"):
sys.exit (1)
# call this so we can savely use capabilities dictionary later
initCaps ()
# show what's supported
printCaps ()
# try the summary-body case
n = pynotify.Notification ("Totem",
"This is a superfluous notification")
n.show ()
| dkasak/notify-osd-customizable | examples/summary-body.py | Python | gpl-3.0 | 3,917 | 0.032678 |
# OpenSCAD gui init module
#
# Gathering all the information to start FreeCAD
# This is the second one of three init scripts, the third one
# runs when the gui is up
#***************************************************************************
#* (c) Juergen Riegel (juergen.riegel@web.de) 2002 *
#* *
#* This file is part of the FreeCAD CAx development system. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* FreeCAD is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Lesser General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with FreeCAD; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#* Juergen Riegel 2002 *
#***************************************************************************/
import FreeCAD
class OpenSCADWorkbench ( Workbench ):
"OpenSCAD workbench object"
def __init__(self):
self.__class__.Icon = FreeCAD.getResourceDir() + "Mod/OpenSCAD/Resources/icons/OpenSCADWorkbench.svg"
self.__class__.MenuText = "OpenSCAD"
self.__class__.ToolTip = (
"OpenSCAD is an application for creating solid 3D CAD.\n"
"FreeCAD utizes OpenSCAD's capability as a script-only based modeller that uses its own description language\n"
"Note: the Mesh workbench heavily uses the boolean operations of this workbench because they are quite robust"
)
def Initialize(self):
def QT_TRANSLATE_NOOP(scope, text):
return text
import OpenSCAD_rc,OpenSCADCommands
commands = ['OpenSCAD_ReplaceObject','OpenSCAD_RemoveSubtree',
'OpenSCAD_RefineShapeFeature','OpenSCAD_MirrorMeshFeature',
'OpenSCAD_ScaleMeshFeature','OpenSCAD_ResizeMeshFeature','OpenSCAD_IncreaseToleranceFeature',
'OpenSCAD_Edgestofaces', 'OpenSCAD_ExpandPlacements','OpenSCAD_ExplodeGroup']
toolbarcommands = ['OpenSCAD_ReplaceObject','OpenSCAD_RemoveSubtree',
'OpenSCAD_ExplodeGroup','OpenSCAD_RefineShapeFeature',
'OpenSCAD_IncreaseToleranceFeature']
import PartGui
parttoolbarcommands = ['Part_CheckGeometry','Part_Primitives',
'Part_Builder','Part_Cut','Part_Fuse','Part_Common',
'Part_Extrude','Part_Revolve']
import FreeCAD
param = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/OpenSCAD")
openscadfilename = param.GetString('openscadexecutable')
if not openscadfilename:
import OpenSCADUtils
openscadfilename = OpenSCADUtils.searchforopenscadexe()
if openscadfilename: #automatic search was succsessful
FreeCAD.addImportType("OpenSCAD Format (*.scad)","importCSG")
param.SetString('openscadexecutable',openscadfilename) #save the result
if openscadfilename:
commands.extend(['OpenSCAD_AddOpenSCADElement', 'OpenSCAD_MeshBoolean',
'OpenSCAD_Hull','OpenSCAD_Minkowski'])
toolbarcommands.extend(['OpenSCAD_AddOpenSCADElement', 'OpenSCAD_MeshBoolean',
'OpenSCAD_Hull','OpenSCAD_Minkowski'])
else:
FreeCAD.Console.PrintWarning('OpenSCAD executable not found\n')
self.appendToolbar(QT_TRANSLATE_NOOP('Workbench','OpenSCADTools'),toolbarcommands)
self.appendMenu('OpenSCAD',commands)
self.appendToolbar(QT_TRANSLATE_NOOP('Workbech','OpenSCAD Part tools'),parttoolbarcommands)
#self.appendMenu('OpenSCAD',["AddOpenSCADElement"])
###self.appendCommandbar("&Generic Tools",["ColorCodeShape"])
FreeCADGui.addIconPath(":/icons")
FreeCADGui.addLanguagePath(":/translations")
FreeCADGui.addPreferencePage(":/ui/openscadprefs-base.ui","OpenSCAD")
def GetClassName(self):
return "Gui::PythonWorkbench"
Gui.addWorkbench(OpenSCADWorkbench())
# Not all of the GUI tests will require an OpenSCAD binary (CSG import and export don't)
FreeCAD.__unit_test__ += ["TestOpenSCADGui"]
| sanguinariojoe/FreeCAD | src/Mod/OpenSCAD/InitGui.py | Python | lgpl-2.1 | 5,293 | 0.013603 |
__all__ = [
'diff', 'dirs', 'flushfile', 'id',
'stats', 'sync'
]
| bkahlert/seqan-research | raw/workshop11/workshop2011-data-20110925/trunk/misc/seqan_instrumentation/py2exe/dist/classes/__init__.py | Python | mit | 81 | 0 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A concrete prensor tree.
PrensorValue represents a tree where all the nodes are represented as ndarrays,
instead of tensors.
prensor = ...
assert isinstance(prensor, struct2tensor.Prensor)
with tf.Session() as sess:
prensor_value = sess.run(prensor)
assert isinstance(prensor_value, struct2tensor.PrensorValue)
"""
import collections
from typing import FrozenSet, Iterator, Mapping, Optional, Sequence, Union
import numpy as np
from struct2tensor import path
from struct2tensor import prensor
import tensorflow as tf
from tensorflow.python.client import session as session_lib # pylint: disable=g-direct-tensorflow-import
class RootNodeValue(object):
"""The value of the root."""
__slots__ = ["_size"]
def __init__(self, size: np.int64):
"""Creates a root node.
Args:
size: how many root objects there are.
"""
self._size = size
@property
def size(self):
return self._size
@property
def is_repeated(self):
return True
def schema_string(self):
return "repeated"
def data_string(self):
return "size: {}".format(self._size)
def __str__(self):
return "RootNode"
class ChildNodeValue(object):
"""The value of an intermediate node."""
__slots__ = ["_parent_index", "_is_repeated"]
def __init__(self, parent_index: np.ndarray, is_repeated: bool):
"""Creates a child node.
Args:
parent_index: a 1-D int64 ndarray where parent_index[i] represents the
parent index of the ith child.
is_repeated: a bool indicating if there can be more than one child per
parent.
"""
self._parent_index = parent_index
self._is_repeated = is_repeated
@property
def size(self):
"""Returns the size, as if this was the root prensor.
Returns:
A 1-D ndarray of size 1.
"""
return tf.shape(self.parent_index, out_type=tf.int64)
@property
def parent_index(self):
return self._parent_index
@property
def is_repeated(self):
return self._is_repeated
def schema_string(self) -> str:
return "repeated" if self.is_repeated else "optional"
def data_string(self):
return "parent_index: {}".format(self._parent_index)
def __str__(self):
return "ChildNode {} {}".format(self.schema_string(), self.data_string())
class LeafNodeValue(object):
"""The value of a leaf node."""
__slots__ = ["_parent_index", "_values", "_is_repeated"]
def __init__(self, parent_index: np.ndarray, values: np.ndarray,
is_repeated: bool):
"""Creates a leaf node.
Args:
parent_index: a 1-D int64 ndarray where parent_index[i] represents the
parent index of values[i]
values: a 1-D ndarray of equal length to parent_index.
is_repeated: a bool indicating if there can be more than one child per
parent.
"""
self._parent_index = parent_index
self._values = values
self._is_repeated = is_repeated
@property
def parent_index(self):
return self._parent_index
@property
def is_repeated(self):
return self._is_repeated
@property
def values(self):
return self._values
def data_string(self):
return "parent_index: {} values: {}".format(self._parent_index,
self._values)
def schema_string(self) -> str:
return u"{} {}".format("repeated" if self.is_repeated else "optional",
str(self.values.dtype))
def __str__(self):
return "{} {}".format("repeated" if self.is_repeated else "optional",
str(self.values.dtype))
NodeValue = Union[RootNodeValue, ChildNodeValue, LeafNodeValue] # pylint: disable=invalid-name
class PrensorValue(object):
"""A tree of NodeValue objects."""
__slots__ = ["_node", "_children"]
def __init__(self, node: NodeValue,
children: "collections.OrderedDict[path.Step, PrensorValue]"):
"""Construct a PrensorValue.
Do not call directly, instead call materialize(...) below.
Args:
node: the NodeValue of the root.
children: a map from edge to subtree.
"""
self._node = node
self._children = children
# TODO(martinz): This could be Value.
@property
def node(self) -> NodeValue:
"""The node of the root of the subtree."""
return self._node
def get_child(self, field_name: path.Step) -> Optional["PrensorValue"]:
"""Gets the child at field_name."""
return self._children.get(field_name)
def is_leaf(self) -> bool:
"""True iff the node value is a LeafNodeValue."""
return isinstance(self._node, LeafNodeValue)
def get_child_or_error(self, field_name: path.Step) -> "PrensorValue":
"""Gets the child at field_name."""
result = self._children.get(field_name)
if result is not None:
return result
raise ValueError("Field not found: {}".format(str(field_name)))
def get_descendant(self, p: path.Path) -> Optional["PrensorValue"]:
"""Finds the descendant at the path."""
result = self
for field_name in p.field_list:
result = result.get_child(field_name)
if result is None:
return None
return result
def get_descendant_or_error(self, p: path.Path) -> "PrensorValue":
"""Finds the descendant at the path."""
result = self.get_descendant(p)
if result is None:
raise ValueError("Missing path: {}".format(str(p)))
return result
def get_children(self) -> Mapping[path.Step, "PrensorValue"]:
"""A map from field name to subtree."""
return self._children
def get_descendants(self) -> Mapping[path.Path, "PrensorValue"]:
"""A map from paths to all subtrees."""
result = {path.Path([]): self}
for k, v in self._children.items():
subtree_descendants = v.get_descendants()
for k2, v2 in subtree_descendants.items():
result[path.Path([k]).concat(k2)] = v2
return result
def field_names(self) -> FrozenSet[path.Step]:
"""Returns the field names of the children."""
return frozenset(self._children.keys())
def _string_helper(self, field_name: str) -> Sequence[str]:
"""Helper for __str__ that outputs a list of lines."""
result = [
"{} {} {}".format(self.node.schema_string(), str(field_name),
self.node.data_string())
]
for k, v in self._children.items():
recursive = v._string_helper(k) # pylint: disable=protected-access
result.extend([" {}".format(x) for x in recursive])
return result
def _schema_string_helper(self, field_name: str) -> Sequence[str]:
"""Helper for __str__ that outputs a list of lines."""
result = [u"{} {}".format(self.node.schema_string(), str(field_name))]
for k, v in self._children.items():
recursive = v._string_helper(k) # pylint: disable=protected-access
result.extend([u" {}".format(x) for x in recursive])
return result
def schema_string(self):
"""Returns a string representing the schema of the Prensor."""
return u"\n".join(self._schema_string_helper(""))
def __str__(self):
"""Returns a string representing the schema of the Prensor."""
return "\n".join(self._string_helper(""))
def _prensor_value_from_type_spec_and_component_values(
prensor_type_spec: prensor._PrensorTypeSpec,
component_values: Iterator[Union[int, np.ndarray]]) -> PrensorValue:
"""Creates a PrensorValue from a _PrensorTypeSpec and components."""
# pylint: disable=protected-access
if prensor_type_spec._node_type == prensor_type_spec._NodeType.ROOT:
node = RootNodeValue(next(component_values))
elif prensor_type_spec._node_type == prensor_type_spec._NodeType.CHILD:
node = ChildNodeValue(next(component_values),
prensor_type_spec._is_repeated)
else:
parent_index = next(component_values)
values = next(component_values)
node = LeafNodeValue(parent_index, values, prensor_type_spec._is_repeated)
step_to_child = collections.OrderedDict()
for step, child_spec in prensor_type_spec._children_specs:
step_to_child[step] = _prensor_value_from_type_spec_and_component_values(
child_spec, component_values)
return PrensorValue(node, step_to_child)
def _prensor_value_fetch(prensor_tree: prensor.Prensor):
"""Fetch function for PrensorValue. See the document in session_lib."""
# pylint: disable=protected-access
type_spec = prensor_tree._type_spec
components = type_spec._to_components(prensor_tree)
def _construct_prensor_value(component_values):
return _prensor_value_from_type_spec_and_component_values(
type_spec, iter(component_values))
return components, _construct_prensor_value
session_lib.register_session_run_conversion_functions(
prensor.Prensor,
_prensor_value_fetch,
feed_function=None,
feed_function_for_partial_run=None)
| google/struct2tensor | struct2tensor/prensor_value.py | Python | apache-2.0 | 9,379 | 0.00853 |
'''
Copyleft Oct 24, 2015 Arya Iranmehr, PhD Student, Bafna's Lab, UC San Diego, Email: airanmehr@gmail.com
'''
import pandas as pd
import pylab as plt
import matplotlib as mpl
from matplotlib.cm import *
import os,sys;home=os.path.expanduser('~') +'/'
mpl.rc('font', **{'family': 'serif', 'serif': ['Computer Modern'], 'size':20}) ;
mpl.rc('text', usetex=True)
x=np.arange(0,1,1e-5)[1:-1]
s=0.01
def sig(z): return 1./(1+np.exp(-z))
fig=plt.figure(figsize=(30,10), dpi=100)
fig.hold(True)
y_appr=np.log(x)- np.log(1-x)
y=np.log(x)- (1+s)*np.log(1-x)
x.shape
df=pd.DataFrame([x,y,y_appr],index=['x','y','z']).T
plt.subplot(1,2,1)
plt.plot(y_appr,x, color='red',linewidth=2, label='$\sigma(st/2-c)$')
plt.plot(y,x, color='blue',linewidth=2,label='$x_t$')
plt.xlim([-5,5]);plt.legend(loc='upper left')
plt.ylabel('$x_t$')
plt.xlabel('$st/2-c$')
plt.grid()
plt.subplot(1,2,2)
print (y_appr-y)
plt.plot(y,sig(y)-x, linewidth=2,label='Error');plt.legend(loc='upper left')
plt.ylabel('$|x_t-\sigma(st/2-c)|$')
plt.xlabel('$st/2-c$')
plt.xlim([-5,10])
plt.grid()
plt.suptitle('Approximation vs Exact Value of $x_t$ for $s=${}'.format(s))
# plt.savefig(home+'out/vineet/plots/apprx.png')
plt.show()
| airanmehr/bio | Scripts/TimeSeriesPaper/Plot/DirectvsRNN.py | Python | mit | 1,198 | 0.040902 |
from __future__ import print_function
###################################################################
# Copyright 2013-2017 All Rights Reserved
# Authors: The Paradrop Team
###################################################################
import sys
from paradrop.base import settings
from paradrop.lib.utils.pd_storage import PDStorage
from .chute import Chute
class ChuteStorage(PDStorage):
"""
ChuteStorage class.
This class holds onto the list of Chutes on this AP.
It implements the PDStorage class which allows us to save the chuteList to disk transparently
"""
# Class variable of chute list so all instances see the same thing
chuteList = dict()
def __init__(self, filename=None, save_timer=settings.FC_CHUTESTORAGE_SAVE_TIMER):
if(not filename):
filename = settings.FC_CHUTESTORAGE_FILE
PDStorage.__init__(self, filename, save_timer)
# Has it been loaded?
if(len(ChuteStorage.chuteList) == 0):
self.loadFromDisk()
def setAttr(self, attr):
"""Save our attr however we want (as class variable for all to see)"""
ChuteStorage.chuteList = attr
def getAttr(self):
"""Get our attr (as class variable for all to see)"""
return ChuteStorage.chuteList
def getChuteList(self):
"""Return a list of the names of the chutes we know of."""
return ChuteStorage.chuteList.values()
def getChute(self, name):
"""Returns a reference to a chute we have in our cache, or None."""
return ChuteStorage.chuteList.get(name, None)
def deleteChute(self, ch):
"""Deletes a chute from the chute storage. Can be sent the chute object, or the chute name."""
if (isinstance(ch, Chute)):
del ChuteStorage.chuteList[ch.name]
else:
del ChuteStorage.chuteList[ch]
self.saveToDisk()
def saveChute(self, ch):
"""
Saves the chute provided in our internal chuteList.
Also since we just received a new chute to hold onto we should save our ChuteList to disk.
"""
# check if there is a version of the chute already
oldch = ChuteStorage.chuteList.get(ch.name, None)
if(oldch != None):
# we should merge these chutes so we don't lose any data
oldch.__dict__.update(ch.__dict__)
# TODO: do we need to deal with cache separate? Old code we did
else:
ChuteStorage.chuteList[ch.name] = ch
self.saveToDisk()
def clearChuteStorage(self):
ChuteStorage.chuteList.clear()
self.saveToDisk()
#
# Functions we override to implement PDStorage Properly
#
def attrSaveable(self):
"""Returns True if we should save the ChuteList, otherwise False."""
return (type(ChuteStorage.chuteList) == dict)
@classmethod
def get_chute(cls, name):
return cls.chuteList[name]
if(__name__ == '__main__'): # pragma: no cover
def usage():
print('Usage: $0 -ls : print chute storage details')
exit(0)
try:
if(sys.argv[1] != '-ls'):
usage()
except Exception as e:
print(e)
usage()
cs = ChuteStorage()
chutes = cs.getChuteList()
for ch in chutes:
print(ch)
| ParadropLabs/Paradrop | paradrop/daemon/paradrop/core/chute/chute_storage.py | Python | apache-2.0 | 3,342 | 0.001795 |
# Copyright 2014 Kylincloud
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.template import defaultfilters as filters
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from horizon.utils import filters as utils_filters
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class DeleteDHCPAgent(tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete DHCP Agent",
u"Delete DHCP Agents",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted DHCP Agent",
u"Deleted DHCP Agents",
count
)
policy_rules = (("network", "delete_agent"),)
def delete(self, request, obj_id):
network_id = self.table.kwargs['network_id']
network_info = api.neutron.network_get(request, network_id)
try:
api.neutron.remove_network_from_dhcp_agent(request, obj_id,
network_id)
api.nova.systemlogs_create(request,
network_info.name,
record_action.DELETEAGENT)
except Exception as e:
msg = _('Failed to delete agent: %s') % e
LOG.info(msg)
api.nova.systemlogs_create(request, network_info.name,
record_action.DELETEAGENT, result=False, detail=msg)
redirect = reverse('horizon:admin:networks:detail',
args=[network_id])
exceptions.handle(request, msg, redirect=redirect)
class AddDHCPAgent(tables.LinkAction):
name = "add"
verbose_name = _("Add DHCP Agent")
url = "horizon:admin:networks:adddhcpagent"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "update_agent"),)
def get_link_url(self, datum=None):
network_id = self.table.kwargs['network_id']
return reverse(self.url, args=(network_id,))
def get_agent_status(agent):
if agent.admin_state_up:
return _('Enabled')
return _('Disabled')
def get_agent_state(agent):
if agent.alive:
return _('Up')
return _('Down')
class DHCPAgentsFilterAction(tables.FilterAction):
name = "agents"
class DHCPAgentsTable(tables.DataTable):
id = tables.Column('id', verbose_name=_('ID'), hidden=True)
host = tables.Column('host', verbose_name=_('Host'))
status = tables.Column(get_agent_status, verbose_name=_('Status'))
state = tables.Column(get_agent_state, verbose_name=_('Admin State'))
heartbeat_timestamp = tables.Column('heartbeat_timestamp',
verbose_name=_('Updated At'),
filters=(utils_filters.parse_isotime,
filters.timesince))
class Meta(object):
name = "agents"
verbose_name = _("DHCP Agents")
table_actions = (AddDHCPAgent, DeleteDHCPAgent,
DHCPAgentsFilterAction,)
row_actions = (DeleteDHCPAgent,)
hidden_title = False
| xuweiliang/Codelibrary | openstack_dashboard/dashboards/admin/networks/agents/tables.py | Python | apache-2.0 | 3,868 | 0.001551 |
#!/usr/bin/env python
# encoding: utf-8
"""
Detuned waveguide bank.
"""
from pyo import *
import random
s = Server(sr=44100, nchnls=2, buffersize=512, duplex=0).boot()
src = SfPlayer("../snds/ounkmaster.aif", loop=True, mul=.1)
lf = Sine(freq=[random.uniform(.005, .015) for i in range(8)],
mul=[.02,.04,.06,.08,.1,.12,.14,.16],
add=[50,100,150,200,250,300,350,400])
lf2 = Sine(.005, mul=.2, add=.7)
det_wg = AllpassWG(src, freq=lf, feed=.999, detune=lf2, mul=.25).out()
s.gui(locals())
| xyproto/gosignal | pyo/examples/effects/03_detuned_waveguides.py | Python | gpl-3.0 | 513 | 0.02729 |
from __future__ import unicode_literals, division, absolute_import
from urlparse import urlparse
import logging
from requests import RequestException
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number', 'default': 80},
'api_key': {'type': 'string'},
'include_ended': {'type': 'boolean', 'default': True},
'only_monitored': {'type': 'boolean', 'default': True},
'include_data': {'type': 'boolean', 'default': False}
},
'required': ['api_key', 'base_url'],
'additionalProperties': False
}
def on_task_input(self, task, config):
"""
This plugin returns ALL of the shows monitored by Sonarr.
Return ended shows by default and does not return unmonitored
show by default.
Syntax:
sonarr:
base_url=<value>
port=<value>
api_key=<value>
include_ended=<yes|no>
only_monitored=<yes|no>
include_data=<yes|no>
Options base_url and api_key are required.
Use with input plugin like discover and/or cofnigure_series.
Example:
download-tv-task:
configure_series:
settings:
quality:
- 720p
from:
sonarr:
base_url: http://localhost
port: 8989
api_key: MYAPIKEY1123
discover:
what:
- emit_series: yes
from:
torrentz: any
download:
/download/tv
Note that when using the configure_series plugin with Sonarr
you are basically synced to it, so removing a show in Sonarr will
remove it in flexget as well,which good be positive or negative,
depending on your usage.
"""
parsedurl = urlparse(config.get('base_url'))
url = '%s://%s:%s%s/api/series' % (parsedurl.scheme, parsedurl.netloc, config.get('port'), parsedurl.path)
headers = {'X-Api-Key': config['api_key']}
try:
json = task.requests.get(url, headers=headers).json()
except RequestException as e:
raise plugin.PluginError('Unable to connect to Sonarr at %s://%s:%s%s. Error: %s'
% (parsedurl.scheme, parsedurl.netloc, config.get('port'),
parsedurl.path, e))
entries = []
# Dictionary based on Sonarr's quality list.
qualities = {0: '',
1: 'sdtv',
2: 'dvdrip',
3: '1080p webdl',
4: '720p hdtv',
5: '720p webdl',
6: '720p bluray',
7: '1080p bluray',
8: '480p webdl',
9: '1080p hdtv',
10: '1080p bluray'}
# Retrieves Sonarr's profile list if include_data is set to true
if config.get('include_data'):
url2 = '%s://%s:%s%s/api/profile' % (parsedurl.scheme, parsedurl.netloc, config.get('port'), parsedurl.path)
try:
profiles_json = task.requests.get(url2, headers=headers).json()
except RequestException as e:
raise plugin.PluginError('Unable to connect to Sonarr at %s://%s:%s%s. Error: %s'
% (parsedurl.scheme, parsedurl.netloc, config.get('port'),
parsedurl.path, e))
for show in json:
fg_quality = '' # Initializes the quality parameter
entry = None
if show['monitored'] or not config.get('only_monitored'): # Checks if to retrieve just monitored shows
if config.get('include_ended') or show['status'] != 'ended': # Checks if to retrieve ended shows
if config.get('include_data'): # Check if to retrieve quality & path
for profile in profiles_json:
if profile['id'] == show['profileId']: # Get show's profile data from all possible profiles
current_profile = profile
fg_quality = qualities[current_profile['cutoff']['id']] # Sets profile cutoff quality as show's quality
entry = Entry(title=show['title'],
url='',
series_name=show['title'],
tvdb_id=show.get('tvdbId'),
tvrage_id=show.get('tvRageId'),
# configure_series plugin requires that all settings will have the configure_series prefix
configure_series_quality=fg_quality)
if entry.isvalid():
entries.append(entry)
else:
log.error('Invalid entry created? %s' % entry)
# Test mode logging
if entry and task.options.test:
log.info("Test mode. Entry includes:")
log.info(" Title: %s" % entry["title"])
log.info(" URL: %s" % entry["url"])
log.info(" Show name: %s" % entry["series_name"])
log.info(" TVDB ID: %s" % entry["tvdb_id"])
log.info(" TVRAGE ID: %s" % entry["tvrage_id"])
log.info(" Quality: %s" % entry["configure_series_quality"])
# continue
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
| xfouloux/Flexget | flexget/plugins/input/sonarr.py | Python | mit | 5,907 | 0.00237 |
from __future__ import print_function
import numpy
import time
import traceback
import colorsys
import random
class EffectLayer(object):
"""Abstract base class for one layer of an LED light effect. Layers operate on a shared framebuffer,
adding their own contribution to the buffer and possibly blending or overlaying with data from
prior layers.
The 'frame' passed to each render() function is an array of LEDs. Each LED is a 3-element list
with the red, green, and blue components each as floating point values with a normalized
brightness range of [0, 1]. If a component is beyond this range, it will be clamped during
conversion to the hardware color format.
"""
transitionFadeTime = 1.0
maximum_errors = 5
def render(self, params, frame):
raise NotImplementedError("Implement render() in your EffectLayer subclass")
def safely_render(self, params, frame):
if not hasattr(self, 'error_count'):
self.error_count = 0
try:
if self.error_count < EffectLayer.maximum_errors:
self.render(params, frame)
except Exception as err:
error_log = open('error.log','a')
error_log.write(time.asctime(time.gmtime()) + " UTC" + " : ")
traceback.print_exc(file=error_log)
print("ERROR:", err, "in", self)
self.error_count += 1
if self.error_count >= EffectLayer.maximum_errors:
print("Disabling", self, "for throwing too many errors")
class HeadsetResponsiveEffectLayer(EffectLayer):
"""A layer effect that responds to the MindWave headset in some way.
Two major differences from EffectLayer:
1) Constructor expects four paramters:
-- respond_to: the name of a field in EEGInfo (threads.HeadsetThread.EEGInfo).
Currently this means either 'attention' or 'meditation'
-- smooth_response_over_n_secs: to avoid rapid fluctuations from headset
noise, averages the response metric over this many seconds
-- minimum_response_level: if the response level is below this, the layer isn't rendered
-- inverse: If this is true, the layer will respond to (1-response_level)
instead of response_level
2) Subclasses now only implement the render_responsive() function, which
is the same as EffectLayer's render() function but has one extra
parameter, response_level, which is the current EEG value of the indicated
field (assumed to be on a 0-1 scale, or None if no value has been read yet).
"""
def __init__(self, respond_to, smooth_response_over_n_secs=0, minimum_response_level=None, inverse=False):
# Name of the eeg field to influence this effect
if respond_to not in ('attention', 'meditation'):
raise Exception('respond_to was "%s" -- should be "attention" or "meditation"'
% respond_to)
self.respond_to = respond_to
self.smooth_response_over_n_secs = smooth_response_over_n_secs
self.measurements = []
self.timestamps = []
self.last_eeg = None
self.last_response_level = None
self.minimum_response_level = minimum_response_level
# We want to smoothly transition between values instead of jumping
# (as the headset typically gives one reading per second)
self.fading_to = None
self.inverse = inverse
def start_fade(self, new_level):
if not self.last_response_level:
self.last_response_level = new_level
else:
self.fading_to = new_level
def end_fade(self):
self.last_response_level = self.fading_to
self.fading_to = None
def calculate_response_level(self, params, use_eeg2=False):
now = time.time()
response_level = None
# Update our measurements, if we have a new one
eeg = params.eeg2 if use_eeg2 else params.eeg1
if eeg and eeg != self.last_eeg and eeg.on:
if self.fading_to:
self.end_fade()
# Prepend newest measurement and timestamp
self.measurements[:0] = [getattr(eeg, self.respond_to)]
self.timestamps[:0] = [now]
self.last_eeg = eeg
# Compute the parameter to send to our rendering function
N = len(self.measurements)
idx = 0
while idx < N:
dt = self.timestamps[0] - self.timestamps[idx]
if dt >= self.smooth_response_over_n_secs:
self.measurements = self.measurements[:(idx + 1)]
self.timestamps = self.timestamps[:(idx + 1)]
break
idx += 1
self.start_fade(sum(self.measurements) * 1.0 / len(self.measurements))
response_level = self.last_response_level
elif self.fading_to:
# We assume one reading per second, so a one-second fade
fade_progress = now - self.timestamps[0]
if fade_progress >= 1:
self.end_fade()
response_level = self.last_response_level
else:
response_level = (
fade_progress * self.fading_to +
(1 - fade_progress) * self.last_response_level)
if response_level and self.inverse:
response_level = 1 - response_level
return response_level
def render(self, params, frame):
response_level = self.calculate_response_level(params)
if self.minimum_response_level == None or response_level >= self.minimum_response_level:
self.render_responsive(params, frame, response_level)
def render_responsive(self, params, frame, response_level):
raise NotImplementedError(
"Implement render_responsive() in your HeadsetResponsiveEffectLayer subclass")
########################################################
# Simple EffectLayer implementations and examples
########################################################
class ColorLayer(EffectLayer):
"""Simplest layer, draws a static RGB color"""
def __init__(self, color):
self.color = color
def render(self, params, frame):
frame[:] += self.color
class RGBLayer(EffectLayer):
"""Simplest layer, draws a static RGB color cube."""
def render(self, params, frame):
length = len(frame)
step_size = 1.0 / length
hue = 0.0
for pixel in xrange(0, length):
frame[pixel] = colorsys.hsv_to_rgb(hue, 1, 1)
hue += step_size
class MultiplierLayer(EffectLayer):
""" Renders two layers in temporary frames, then adds the product of those frames
to the frame passed into its render method
"""
def __init__(self, layer1, layer2):
self.layer1 = layer1
self.layer2 = layer2
def render(self, params, frame):
temp1 = numpy.zeros(frame.shape)
temp2 = numpy.zeros(frame.shape)
self.layer1.render(params, temp1)
self.layer2.render(params, temp2)
numpy.multiply(temp1, temp2, temp1)
numpy.add(frame, temp1, frame)
class BlinkyLayer(EffectLayer):
"""Test our timing accuracy: Just blink everything on and off every other frame."""
on = False
def render(self, params, frame):
self.on = not self.on
frame[:] += self.on
class ColorBlinkyLayer(EffectLayer):
on = False
def render(self, params, frame):
self.on = not self.on
color = numpy.array(colorsys.hsv_to_rgb(random.random(),1,1))
if self.on:
frame[:] += color
class SnowstormLayer(EffectLayer):
transitionFadeTime = 1.0
def render(self, params, frame):
numpy.add(frame, numpy.random.rand(params.num_pixels, 1), frame)
class TechnicolorSnowstormLayer(EffectLayer):
transitionFadeTime = 1.5
def render(self, params, frame):
numpy.add(frame, numpy.random.rand(params.num_pixels, 3), frame)
class WhiteOutLayer(EffectLayer):
""" Sets everything to white """
transitionFadeTime = 0.5
def render(self, params, frame):
frame += numpy.ones(frame.shape)
class GammaLayer(EffectLayer):
"""Apply a gamma correction to the brightness, to adjust for the eye's nonlinear sensitivity."""
def __init__(self, gamma):
# Build a lookup table
self.lutX = numpy.arange(0, 1, 0.01)
self.lutY = numpy.power(self.lutX, gamma)
def render(self, params, frame):
frame[:] = numpy.interp(frame.reshape(-1), self.lutX, self.lutY).reshape(frame.shape)
######################################################################
# Simple HeadsetResponsiveEffectLayer implementations and examples
######################################################################
class ResponsiveGreenHighRedLow(HeadsetResponsiveEffectLayer):
"""Colors everything green if the response metric is high, red if low.
Interpolates in between.
"""
def __init__(self, respond_to='attention', smooth_response_over_n_secs=3):
super(ResponsiveGreenHighRedLow,self).__init__(
respond_to, smooth_response_over_n_secs=smooth_response_over_n_secs)
def render_responsive(self, params, frame, response_level):
if response_level is None:
# No signal (blue)
frame[:,2] += 1
else:
frame[:,0] += 1 - response_level
frame[:,1] += response_level
class BrainStaticLayer(HeadsetResponsiveEffectLayer):
def __init__(self, minFactor = 0.3, respond_to='meditation', smooth_response_over_n_secs=0):
super(BrainStaticLayer,self).__init__(respond_to, smooth_response_over_n_secs)
self.minFactor = minFactor
def render_responsive(self, params, frame, response_level):
r = 1-response_level if response_level else 1
numpy.multiply(frame, 1-numpy.random.rand(params.num_pixels, 1)*r*self.minFactor, frame)
| chillpop/RELAX-HARDER | effects/base.py | Python | mit | 10,042 | 0.004979 |
#!/usr/bin/python3
#
# Copyright (c) 2020 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os
import sys
import time
import subprocess
import mmap
# Log reader for the trace output buffer on a ADSP device.
#
# When run with no arguments, it will detect the device, dump the
# contents of the trace buffer and continue to poll for more output.
# The "--no-history" argument can be passed to suppress emission of the
# history, and emit only new output. This can be useful for test
# integration where the user does not want to see any previous runs in
# the log output.
#
# The trace buffer is inside a shared memory region exposed by the
# audio PCI device as a BAR at index 4. The hardware provides 4 128k
# "windows" starting at 512kb in the BAR which the DSP firmware can
# map to 4k-aligned locations within its own address space. By
# protocol convention log output is an 8k region at window index 3.
#
# The 8k window is treated as an array of 64-byte "slots", each of
# which is prefixed by a magic number, which should be 0x55aa for log
# data, followed a 16 bit "ID" number, followed by a null-terminated
# string in the final 60 bytes (or 60 non-null bytes of log data).
# The DSP firmware will write sequential IDs into the buffer starting
# from an ID of zero in the first slot, and wrapping at the end.
MAP_SIZE = 8192
SLOT_SIZE = 64
NUM_SLOTS = int(MAP_SIZE / SLOT_SIZE)
SLOT_MAGIC = 0x55aa
WIN_OFFSET = 0x80000
WIN_IDX = 3
WIN_SIZE = 0x20000
LOG_OFFSET = WIN_OFFSET + WIN_IDX * WIN_SIZE
# List of known ADSP devices by their PCI IDs
DEVICES = ["8086:5a98"]
mem = None
for dev in DEVICES:
# Find me a way to do this detection as cleanly in python as shell, I
# dare you.
barfile = subprocess.Popen(["sh", "-c",
"echo -n "
"$(dirname "
f" $(fgrep PCI_ID={dev.upper()} "
" /sys/bus/pci/devices/*/uevent))"
"/resource4"],
stdout=subprocess.PIPE).stdout.read()
if not os.path.exists(barfile):
continue
if not os.access(barfile, os.R_OK):
sys.stderr.write(f"ERROR: Cannot open {barfile} for reading.")
sys.exit(1)
fd = open(barfile)
mem = mmap.mmap(fd.fileno(), MAP_SIZE, offset=LOG_OFFSET,
prot=mmap.PROT_READ)
if mem is None:
sys.stderr.write("ERROR: No ADSP device found.")
sys.exit(1)
# Returns a tuple of (id, msg) if the slot is valid, or (-1, "") if
# the slot does not contain firmware trace data
def read_slot(slot, mem):
off = slot * SLOT_SIZE
magic = (mem[off + 1] << 8) | mem[off]
sid = (mem[off + 3] << 8) | mem[off + 2]
if magic != SLOT_MAGIC:
return (-1, "")
# This dance because indexing large variable-length slices of
# the mmap() array seems to produce garbage....
msgbytes = []
for i in range(4, SLOT_SIZE):
b = mem[off+i]
if b == 0:
break
msgbytes.append(b)
msg = bytearray(len(msgbytes))
for i, elem in enumerate(msgbytes):
msg[i] = elem
return (sid, msg.decode(encoding="utf-8", errors="ignore"))
def read_hist(start_slot):
id0, msg = read_slot(start_slot, mem)
# An invalid slot zero means no data has ever been placed in the
# trace buffer, which is likely a system reset condition. Back
# off for one second, because continuing to read the buffer has
# been observed to hang the flash process (which I think can only
# be a hardware bug).
if start_slot == 0 and id0 < 0:
sys.stdout.write("===\n=== [ADSP Device Reset]\n===\n")
sys.stdout.flush()
time.sleep(1)
return (0, 0, "")
# Start at zero and read forward to get the last data in the
# buffer. We are always guaranteed that slot zero will contain
# valid data if any slot contains valid data.
last_id = id0
final_slot = start_slot
for i in range(start_slot + 1, NUM_SLOTS):
id, s = read_slot(i, mem)
if id != ((last_id + 1) & 0xffff):
break
msg += s
final_slot = i
last_id = id
final_id = last_id
# Now read backwards from the end to get the prefix blocks from
# the last wraparound
last_id = id0
for i in range(NUM_SLOTS - 1, final_slot, -1):
id, s = read_slot(i, mem)
if id < 0:
break
# Race protection: the other side might have clobbered the
# data after we read the ID, make sure it hasn't changed.
id_check = read_slot(i, mem)[0]
if id_check != id:
break
if ((id + 1) & 0xffff) == last_id:
msg = s + msg
last_id = id
# If we were unable to read forward from slot zero, but could read
# backward, then this is a wrapped buffer being currently updated
# into slot zero. See comment below.
if final_slot == start_slot and last_id != id0:
return None
return ((final_slot + 1) % NUM_SLOTS, (final_id + 1) & 0xffff, msg)
# Returns a tuple containing the next slot to expect data in, the ID
# that slot should hold, and the full string history of trace data
# from the buffer. Start with slot zero (which is always part of the
# current string if there is any data at all) and scan forward and
# back to find the maximum extent.
def trace_history():
# This loop is a race protection for the situation where the
# buffer has wrapped and new data is currently being placed into
# slot zero. In those circumstances, slot zero will have a valid
# magic number but its sequence ID will not correlate with the
# previous and next slots.
ret = None
while ret is None:
ret = read_hist(0)
if ret is None:
ret = read_hist(1)
return ret
# Loop, reading the next slot if it has new data. Otherwise check the
# full buffer and see if history is discontiguous (i.e. what is in the
# buffer should be a proper suffix of what we have stored). If it
# doesn't match, then just print it (it's a reboot or a ring buffer
# overrun). If nothing has changed, then sleep a bit and keep
# polling.
def main():
next_slot, next_id, last_hist = trace_history()
# We only have one command line argument, to suppress the history
# dump at the start (so CI runs don't see e.g. a previous device
# state containing logs from another test, and get confused)
if len(sys.argv) < 2 or sys.argv[1] != "--no-history":
sys.stdout.write(last_hist)
while True:
id, smsg = read_slot(next_slot, mem)
if id == next_id:
next_slot = int((next_slot + 1) % NUM_SLOTS)
next_id = (id + 1) & 0xffff
last_hist += smsg
sys.stdout.write(smsg)
else:
slot2, id2, msg2 = trace_history()
# Device reset:
if slot2 == 0 and id2 == 0 and msg2 == "":
next_id = 1
next_slot = slot2
last_hist = ""
if not last_hist.endswith(msg2):
# On a mismatch, go back and check one last time to
# address the race where a new slot wasn't present
# just JUST THEN but is NOW.
id3, s3 = read_slot(next_slot, mem)
if id3 == next_id:
next_slot = int((next_slot + 1) % NUM_SLOTS)
next_id = (next_id + 1) & 0xffff
last_hist += s3
sys.stdout.write(s3)
continue
# Otherwise it represents discontiguous data, either a
# reset of an overrun, just dump what we have and
# start over.
next_slot = slot2
last_hist = msg2
sys.stdout.write(msg2)
else:
sys.stdout.flush()
time.sleep(0.10)
if __name__ == "__main__":
main()
| nashif/zephyr | boards/xtensa/intel_adsp_cavs15/tools/adsplog.py | Python | apache-2.0 | 8,018 | 0.000499 |
from django.conf import settings
from django.contrib.auth import logout
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.views.generic.base import View, TemplateView
from socialregistration.clients.oauth import OAuthError
from socialregistration.mixins import SocialRegistration
GENERATE_USERNAME = getattr(settings, 'SOCIALREGISTRATION_GENERATE_USERNAME', False)
USERNAME_FUNCTION = getattr(settings, 'SOCIALREGISTRATION_GENERATE_USERNAME_FUNCTION',
'socialregistration.utils.generate_username')
FORM_CLASS = getattr(settings, 'SOCIALREGISTRATION_SETUP_FORM',
'socialregistration.forms.UserForm')
INITAL_DATA_FUNCTION = getattr(settings, 'SOCIALREGISTRATION_INITIAL_DATA_FUNCTION',
None)
class Setup(SocialRegistration, View):
"""
Setup view to create new Django users from third party APIs.
"""
template_name = 'socialregistration/setup.html'
def get_form(self):
"""
Return the form to be used. The return form is controlled
with ``SOCIALREGISTRATION_SETUP_FORM``.
"""
return self.import_attribute(FORM_CLASS)
def get_username_function(self):
"""
Return a function that can generate a username. The function
is controlled with ``SOCIALREGISTRATION_GENERATE_USERNAME_FUNCTION``.
"""
return self.import_attribute(USERNAME_FUNCTION)
def get_initial_data(self, request, user, profile, client):
"""
Return initial data for the setup form. The function can be
controlled with ``SOCIALREGISTRATION_INITIAL_DATA_FUNCTION``.
:param request: The current request object
:param user: The unsaved user object
:param profile: The unsaved profile object
:param client: The API client
"""
if INITAL_DATA_FUNCTION:
func = self.import_attribute(INITAL_DATA_FUNCTION)
return func(request, user, profile, client)
return {}
def generate_username_and_redirect(self, request, user, profile, client):
"""
Generate a username and then redirect the user to the correct place.
This method is called when ``SOCIALREGISTRATION_GENERATE_USERNAME``
is set.
:param request: The current request object
:param user: The unsaved user object
:param profile: The unsaved profile object
:param client: The API client
"""
func = self.get_username_function()
user.username = func(user, profile, client)
user.set_unusable_password()
user.save()
profile.user = user
profile.save()
user = profile.authenticate()
self.send_connect_signal(request, user, profile, client)
self.login(request, user)
self.send_login_signal(request, user, profile, client)
self.delete_session_data(request)
return HttpResponseRedirect(self.get_next(request))
def get(self, request):
"""
When signing a new user up - either display a setup form, or
generate the username automatically.
"""
try:
user, profile, client = self.get_session_data(request)
except KeyError:
return self.render_to_response(dict(
error=_("Social profile is missing from your session.")))
if GENERATE_USERNAME:
return self.generate_username_and_redirect(request, user, profile, client)
form = self.get_form()(initial=self.get_initial_data(request, user, profile, client))
return self.render_to_response(dict(form=form))
def post(self, request):
"""
Save the user and profile, login and send the right signals.
"""
try:
user, profile, client = self.get_session_data(request)
except KeyError:
return self.render_to_response(dict(
error=_("A social profile is missing from your session.")))
form = self.get_form()(request.POST, request.FILES,
initial=self.get_initial_data(request, user, profile, client))
if not form.is_valid():
return self.render_to_response(dict(form=form))
user, profile = form.save(request, user, profile, client)
user = profile.authenticate()
self.send_connect_signal(request, user, profile, client)
self.login(request, user)
self.send_login_signal(request, user, profile, client)
self.delete_session_data(request)
return HttpResponseRedirect(self.get_next(request))
class Logout(View):
"""
Log the user out of Django. This **does not** log the user out
of third party sites.
"""
def get(self, request):
logout(request)
url = getattr(settings, 'LOGOUT_REDIRECT_URL', '/')
return HttpResponseRedirect(url)
class OAuthRedirect(SocialRegistration, View):
"""
Base class for both OAuth and OAuth2 redirects.
:param client: The API client class that should be used.
:param template_name: The error template.
"""
# The OAuth{1,2} client to be used
client = None
# The template to render in case of errors
template_name = None
def post(self, request):
"""
Create a client, store it in the user's session and redirect the user
to the API provider to authorize our app and permissions.
"""
request.session['next'] = self.get_next(request)
client = self.get_client()()
request.session[self.get_client().get_session_key()] = client
try:
return HttpResponseRedirect(client.get_redirect_url())
except OAuthError, error:
return self.render_to_response({'error': error})
class OAuthCallback(SocialRegistration, View):
"""
Base class for OAuth and OAuth2 callback views.
:param client: The API client class that should be used.
:param template_name: The error template.
"""
# The OAuth{1,2} client to be used
client = None
# The template to render in case of errors
template_name = None
def get_redirect(self):
"""
Return a URL that will set up the correct models if the
OAuth flow succeeded. Subclasses **must** override this
method.
"""
raise NotImplementedError
def get(self, request):
"""
Called after the user is redirected back to our application.
Tries to:
- Complete the OAuth / OAuth2 flow
- Redirect the user to another view that deals with login, connecting
or user creation.
"""
try:
client = request.session[self.get_client().get_session_key()]
client.complete(dict(request.GET.items()))
request.session[self.get_client().get_session_key()] = client
return HttpResponseRedirect(self.get_redirect())
except KeyError:
return self.render_to_response({'error': "Session expired."})
except OAuthError, error:
return self.render_to_response({'error': error})
class SetupCallback(SocialRegistration, TemplateView):
"""
Base class for OAuth and OAuth2 login / connects / registration.
"""
template_name = 'socialregistration/setup.error.html'
def get(self, request):
"""
Called after authorization was granted and the OAuth flow
successfully completed.
Tries to:
- Connect the remote account if the user is logged in already
- Log the user in if a local profile of the remote account
exists already
- Create a user and profile object if none of the above succeed
and redirect the user further to either capture some data via
form or generate a username automatically
"""
try:
client = request.session[self.get_client().get_session_key()]
except KeyError:
return self.render_to_response({'error': "Session expired."})
# Get the lookup dictionary to find the user's profile
lookup_kwargs = self.get_lookup_kwargs(request, client)
# Logged in user (re-)connecting an account
if request.user.is_authenticated():
try:
profile = self.get_profile(**lookup_kwargs)
# Make sure that there is only *one* account per profile.
if not profile.user == request.user:
self.delete_session_data(request)
return self.render_to_response({
'error': _('This profile is already connected to another user account.')
})
except self.get_model().DoesNotExist:
profile, created = self.get_or_create_profile(request.user,
save=True, **lookup_kwargs)
self.send_connect_signal(request, request.user, profile, client)
return self.redirect(request)
# Logged out user - let's see if we've got the identity saved already.
# If so - just log the user in. If not, create profile and redirect
# to the setup view
user = self.authenticate(**lookup_kwargs)
# No user existing - create a new one and redirect to the final setup view
if user is None:
user = self.create_user()
profile = self.create_profile(user, **lookup_kwargs)
self.store_user(request, user)
self.store_profile(request, profile)
self.store_client(request, client)
return HttpResponseRedirect(reverse('socialregistration:setup'))
# Inactive user - displaying / redirect to the appropriate place.
if not user.is_active:
return self.inactive_response()
# Active user with existing profile: login, send signal and redirect
self.login(request, user)
profile = self.get_profile(user=user, **lookup_kwargs)
self.send_login_signal(request, user, profile, client)
return self.redirect(request)
| vinco/django-socialregistration | socialregistration/views.py | Python | mit | 10,506 | 0.006663 |
from django.contrib import admin
from locker.models import Gif
class GifAdmin(admin.ModelAdmin):
pass
admin.site.register(Gif, GifAdmin)
| tiradoe/Giflocker | giflocker/locker/admin.py | Python | lgpl-3.0 | 143 | 0.013986 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from erpnext.education.api import get_grade
# test_records = frappe.get_test_records('Assessment Result')
class TestAssessmentResult(unittest.TestCase):
def test_grade(self):
grade = get_grade("_Test Grading Scale", 80)
self.assertEquals("A", grade)
grade = get_grade("_Test Grading Scale", 70)
self.assertEquals("B", grade)
| indictranstech/erpnext | erpnext/education/doctype/assessment_result/test_assessment_result.py | Python | agpl-3.0 | 521 | 0.017274 |
# http://stats.stackexchange.com/questions/28904/how-to-cluster-lda-lsi-topics-generated-by-gensim
# coding:utf-8
import cPickle as pickle
import glob
import logging
import os
import scipy
import scipy.sparse
import string
import sys
import time
from collections import defaultdict
import gensim.matutils
import gensim.utils
import numpy
from gensim import corpora, models, similarities
from nltk.corpus import stopwords
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("english")
data_dir = os.path.join(os.getcwd(), 'data/')
output_dir = os.path.join(os.getcwd(), 'output/')
work_dir = os.path.join(os.getcwd(), 'model', os.path.basename(__file__).rstrip('.py'))
if not os.path.exists(work_dir):
os.mkdir(work_dir)
os.chdir(work_dir)
logger = logging.getLogger('text_similar')
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
# convert to unicode
def to_unicode(arg_text):
result = arg_text.lower()
if not isinstance(result, unicode):
result = result.decode('utf-8', 'ignore')
result = ' '.join(
["".join([character for character in unicode(word) if character not in string.punctuation]) for word in
result.split(' ') if not any([word.startswith('http:'), word.startswith('https:'),
word.startswith('mailto:'), word.endswith('.com'),
word.endswith('.org')])])
return result
def to_unicode_unrolled(arg_text):
t = arg_text.lower()
result = []
if not isinstance(t, unicode):
t = t.decode('utf-8', 'ignore')
for word in t.split(' '):
b0 = word.startswith(u'http:')
b6 = word.startswith(u'<http:')
b1 = word.startswith(u'https:')
b2 = word.startswith(u'mailto:')
b3 = word.endswith(u'.com')
b4 = word.endswith(u'.org')
b5 = any([b0, b1, b2, b3, b4, b6])
if not b5:
word = ' '.join(
["".join([character for character in unicode(word) if character not in string.punctuation])])
result.append(word)
return " ".join(result)
def remove_stopwords_and_stem(arg_text):
result = [stemmer.stem(item) for item in arg_text if item not in stopwords.words('english')]
return result
class TextSimilar(gensim.utils.SaveLoad):
def __init__(self):
self.conf = {}
self.dictionary = None
self.docs = None
self.fname = None
self.lda = None
self.lda_similarity_index = None
self.lda_tfidf = None
self.lda_tfidf_similarity_index = None
self.logent = None
self.logent_similarity_index = None
self.lsi = None
self.lsi_similarity_index = None
self.method = None
self.para = None
self.similar_index = None
self.tfidf = None
def _preprocess(self):
# todo write a more pythonic version of this function and use it
docs = [to_unicode_unrolled(open(f, 'r').read().strip()).split() for f in glob.glob(self.fname)]
logger.debug('ingested files into big array with length %d' % len(docs))
docs = [remove_stopwords_and_stem(item) for item in docs]
logger.debug('removed stopwords and stemmed')
pickle.dump(docs, open(self.conf['fname_docs'], 'wb'))
logger.debug('pickle dump to %s done' % self.conf['fname_docs'])
dictionary = corpora.Dictionary(docs)
dictionary.save(self.conf['fname_dict'])
logger.debug('dictionary save to %s done' % self.conf['fname_dict'])
corpus = [dictionary.doc2bow(doc) for doc in docs]
corpora.MmCorpus.serialize(self.conf['fname_corpus'], corpus)
logger.debug('corpus serialize to %s done' % self.conf['fname_corpus'])
return docs, dictionary, corpus
def _generate_conf(self):
fname = self.fname[self.fname.rfind('/') + 1:]
self.conf['fname_docs'] = '%s.docs' % fname
self.conf['fname_dict'] = '%s.dict' % fname
self.conf['fname_corpus'] = '%s.mm' % fname
def train(self, arg_fname, is_pre=True, method='lsi', **params):
self.fname = arg_fname
self.method = method
self._generate_conf()
if is_pre:
self.docs, self.dictionary, corpus = self._preprocess()
else:
self.docs = pickle.load(open(self.conf['fname_docs']))
self.dictionary = corpora.Dictionary.load(self.conf['fname_dict'])
corpus = corpora.MmCorpus(self.conf['fname_corpus'])
if params is None:
params = {}
logger.info("training TF-IDF model")
self.tfidf = models.TfidfModel(corpus, id2word=self.dictionary)
corpus_tfidf = self.tfidf[corpus]
if method == 'lsi':
logger.info("training LSI model")
self.lsi = models.LsiModel(corpus_tfidf, id2word=self.dictionary, **params)
self.lsi.print_topics(-1)
self.lsi_similarity_index = similarities.MatrixSimilarity(self.lsi[corpus_tfidf])
self.para = self.lsi[corpus_tfidf]
elif method == 'lda_tfidf':
logger.info("training LDA model")
# try 6 workers here instead of original 8
self.lda_tfidf = models.LdaMulticore(corpus_tfidf, id2word=self.dictionary, workers=6, **params)
self.lda_tfidf.print_topics(-1)
self.lda_tfidf_similarity_index = similarities.MatrixSimilarity(self.lda[corpus_tfidf])
self.para = self.lda[corpus_tfidf]
elif method == 'lda':
logger.info("training LDA model")
# try 6 workers here instead of original 8
self.lda = models.LdaMulticore(corpus, id2word=self.dictionary, workers=6, **params)
self.lda.print_topics(-1)
self.lda_similarity_index = similarities.MatrixSimilarity(self.lda[corpus])
self.para = self.lda[corpus]
elif method == 'logentropy':
logger.info("training a log-entropy model")
self.logent = models.LogEntropyModel(corpus, id2word=self.dictionary)
self.logent_similarity_index = similarities.MatrixSimilarity(self.logent[corpus])
self.para = self.logent[corpus]
else:
msg = "unknown semantic method %s" % method
logger.error(msg)
raise NotImplementedError(msg)
def doc2vec(self, doc):
bow = self.dictionary.doc2bow(to_unicode(doc).split())
if self.method == 'lsi':
return self.lsi[self.tfidf[bow]]
elif self.method == 'lda':
return self.lda[bow]
elif self.method == 'lda_tfidf':
return self.lda[self.tfidf[bow]]
elif self.method == 'logentropy':
return self.logent[bow]
def find_similar(self, doc, n=10):
vec = self.doc2vec(doc)
sims = self.similar_index[vec]
sims = sorted(enumerate(sims), key=lambda item: -item[1])
for elem in sims[:n]:
idx, value = elem
print (' '.join(self.docs[idx]), value)
def get_vectors(self):
return self._get_vector(self.para)
@staticmethod
def _get_vector(corpus):
def get_max_id():
maxid = -1
for document in corpus:
maxid = max(maxid, max(
[-1] + [fieldid for fieldid, _ in document])) # [-1] to avoid exceptions from max(empty)
return maxid
num_features = 1 + get_max_id()
index = numpy.empty(shape=(len(corpus), num_features), dtype=numpy.float32)
for docno, vector in enumerate(corpus):
if docno % 1000 == 0:
logger.info("PROGRESS: at document #%i/%i" % (docno, len(corpus)))
if isinstance(vector, numpy.ndarray):
pass
elif scipy.sparse.issparse(vector):
vector = vector.toarray().flatten()
else:
vector = gensim.matutils.unitvec(gensim.matutils.sparse2full(vector, num_features))
index[docno] = vector
return index
def cluster(vectors, ts, k=30, arg_method=None):
from sklearn.cluster import k_means
x = numpy.array(vectors)
cluster_center, result, inertia = k_means(x.astype(numpy.float), n_clusters=k, init="k-means++")
x__y_dic = defaultdict(set)
for i, pred_y in enumerate(result):
x__y_dic[pred_y].add(''.join(ts.docs[i]))
logger.info ('len(x__y_dic): %d' % len(x__y_dic))
output_file_name = arg_method + '-cluster.txt'
with open(output_dir + output_file_name, 'w') as fo:
for y in x__y_dic:
fo.write(str() + '\n')
fo.write('{word}\n'.format(word='\n'.join(list(x__y_dic[y])[:100])))
def main(arg_is_train=True):
# todo make the data directory an input parameter
# file_name = data_dir + '/files.tar'
file_name = data_dir + '/*'
# todo make this an input parameter
topics_count = 100
# todo make this an input parameter
methods = ['lda', 'lda_tfidf', 'lsi'] # leaving out logentropy due to memory issues
for method in methods:
text_similar = TextSimilar()
if arg_is_train:
text_similar.train(file_name, method=method, num_topics=topics_count, is_pre=True, iterations=100)
text_similar.save(method)
else:
text_similar = TextSimilar().load(method)
index = text_similar.get_vectors()
cluster(index, text_similar, k=topics_count, arg_method=method)
if __name__ == '__main__':
is_train = True if len(sys.argv) > 1 else False
start_time = time.time()
main(is_train)
finish_time = time.time()
elapsed_hours, elapsed_remainder = divmod(finish_time - start_time, 3600)
elapsed_minutes, elapsed_seconds = divmod(elapsed_remainder, 60)
logging.info(
"Elapsed time: {:0>2}:{:0>2}:{:05.2f}".format(int(elapsed_hours), int(elapsed_minutes), elapsed_seconds))
| mikedelong/aarhus | demos/clusters_from_topics.py | Python | apache-2.0 | 9,957 | 0.002913 |
"""
pygments.styles.abap
~~~~~~~~~~~~~~~~~~~~
ABAP workbench like style.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator
class AbapStyle(Style):
default_style = ""
styles = {
Comment: 'italic #888',
Comment.Special: '#888',
Keyword: '#00f',
Operator.Word: '#00f',
Name: '#000',
Number: '#3af',
String: '#5a2',
Error: '#F00',
}
| sonntagsgesicht/regtest | .aux/venv/lib/python3.9/site-packages/pygments/styles/abap.py | Python | apache-2.0 | 727 | 0 |
import matplotlib as mpl
#mpl.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import os
from scipy.constants import c
import h5py
import sys
import warnings
warnings.simplefilter("ignore", UserWarning)
font = {'size': 18}
mpl.rc('font', **font)
def w2dbm(W, floor=-100):
"""This function converts a power given in W to a power given in dBm.
Inputs::
W(float): power in units of W
Returns::
Power in units of dBm(float)
"""
if type(W) != np.ndarray:
if W > 0:
return 10. * np.log10(W) + 30
elif W == 0:
return floor
else:
print(W)
raise(ZeroDivisionError)
a = 10. * (np.ma.log10(W)).filled(floor/10-3) + 30
return a
class Plotter_saver(object):
def __init__(self, plots, filesaves, fv, t):
if plots and filesaves:
self.exporter = self.plotter_saver_both
elif plots and not(filesaves):
self.exporter = self.plotter_only
elif not(plots) and filesaves:
self.exporter = self.saver_only
else:
sys.exit("You are not exporting anything,\
wasted calculation")
#t = t[np.newaxis,3,:]
self.fv, self.t,self.lv = [self.reshape_x_axis(x) for x in (fv,t, 1e-3*c/fv)]
return None
def reshape_x_axis(self, x):
return np.reshape(x, int(x.shape[0]*x.shape[1]))
def initiate_reshape(self, u, U,nm):
u, U = (np.reshape(i, [nm,int(u.shape[0]*u.shape[1])]) for i in (u, U))
return u, U
def plotter_saver_both(self, index, int_fwm, sim_wind, u, U, P0_p, P0_s,
f_p, f_s, ro, mode_names, pump_wave='',
filename=None, title=None, im=0, plots=True):
u,U = self.initiate_reshape(u,U,int_fwm.nm)
self.plotter(index, int_fwm, sim_wind, u, U, P0_p, P0_s,
f_p, f_s, ro, mode_names, pump_wave,
filename, title, im, plots)
self.saver(index, int_fwm, sim_wind, u, U, P0_p, P0_s, f_p, f_s,
ro, mode_names, pump_wave, filename, title,
im, plots)
return None
def plotter_only(self, index, int_fwm, sim_wind, u, U, P0_p, P0_s,
f_p, f_s, ro, mode_names, pump_wave='',
filename=None, title=None, im=0, plots=True):
u,U = self.initiate_reshape(u,U,int_fwm.nm)
self.plotter(index, int_fwm, sim_wind, u, U, P0_p, P0_s,
f_p, f_s, ro, mode_names, pump_wave,
filename, title, im, plots)
return None
def saver_only(self, index, int_fwm, sim_wind, u, U, P0_p, P0_s,
f_p, f_s, ro, mode_names, pump_wave='',
filename=None, title=None, im=0, plots=True):
u,U = self.initiate_reshape(u,U,int_fwm.nm)
self.saver(index, int_fwm, sim_wind, u, U, P0_p, P0_s, f_p, f_s,
ro, mode_names, pump_wave, filename, title,
im, plots)
return None
def plotter(self, index, int_fwm, sim_wind, u, U, P0_p, P0_s,
f_p, f_s, ro, mode_names, pump_wave='',
filename=None, title=None, im=0, plots=True):
"""Plots many modes"""
x, y = 1e-3*c/self.fv, w2dbm(np.abs(U)**2)
xlim, ylim = [800, 1400], [-80, 100]
xlabel, ylabel = r'$\lambda (nm)$', r'$Spectrum (a.u.)$'
filesave = 'output'+pump_wave+'/output' + \
str(index) + '/figures/wavelength/'+filename
plot_multiple_modes(int_fwm.nm, x, y, mode_names,
ylim, xlim, xlabel, ylabel, title, filesave, im)
# Frequency
x, y = self.fv, w2dbm(sim_wind.dt[0]**2*np.abs(U)**2)# - np.max(w2dbm(sim_wind.dt[0]**2*np.abs(U)**2))
xlim, ylim = [np.min(x), np.max(x)], [np.min(y) + 0.1*np.min(y), 1]
xlim, ylim = [np.min(x), np.max(x)], [-50,100]
xlabel, ylabel = r'$f (THz)$', r'$Spectrum (a.u.)$'
filesave = 'output'+pump_wave+'/output' + \
str(index) + '/figures/frequency/'+filename
plot_multiple_modes(int_fwm.nm, x, y, mode_names,
ylim, xlim, xlabel, ylabel, title, filesave, im)
# Time
x, y = self.t, np.abs(u)**2
xlim, ylim = [np.min(x), np.max(x)], [6.8, 7.8]
xlabel, ylabel = r'$time (ps)$', r'$Spectrum (W)$'
filesave = 'output'+pump_wave+'/output' + \
str(index) + '/figures/time/'+filename
plot_multiple_modes(int_fwm.nm, x, y, mode_names,
ylim, xlim, xlabel, ylabel, title, filesave, im)
return None
def saver(self, index, int_fwm, sim_wind, u, U, P0_p, P0_s, f_p, f_s
, ro, mode_names, pump_wave='', filename=None, title=None,
im=0, plots=True):
"""Dump to HDF5 for postproc"""
if filename[:4] != 'port':
layer = filename[-1]+'/'+filename[:-1]
else:
layer = filename
if layer[0] is '0':
extra_data = np.array([int_fwm.z, int_fwm.nm,P0_p, P0_s, f_p, f_s, ro])
save_variables('data_large', layer, filepath='output'+pump_wave+'/output'+str(index)+'/data/', U=U, t=self.t,
fv=self.fv, extra_data = extra_data)
else:
save_variables('data_large', layer, filepath='output'+pump_wave+'/output'+str(index)+'/data/', U=U)
return None
def plot_multiple_modes(nm, x, y, mode_names, ylim, xlim, xlabel, ylabel, title, filesave=None, im=None):
"""
Dynamically plots what is asked of it for multiple modes given at set point.
"""
fig = plt.figure(figsize=(20.0, 10.0))
plt.subplots_adjust(hspace=0.1)
for i, v in enumerate(range(nm)):
v = v+1
ax1 = plt.subplot(nm, 1, v)
plt.plot(x, y[i, :], '-', label=mode_names[i])
ax1.legend(loc=2)
ax1.set_ylim(ylim)
ax1.set_xlim(xlim)
if i != nm - 1:
ax1.get_xaxis().set_visible(False)
ax = fig.add_subplot(111, frameon=False)
ax.axes.get_xaxis().set_ticks([])
ax.axes.get_yaxis().set_ticks([])
ax.set_title(title)
plt.grid(True)
ax.yaxis.set_label_coords(-0.05, 0.5)
ax.xaxis.set_label_coords(0.5, -0.05)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
if type(im) != int:
newax = fig.add_axes([0.8, 0.8, 0.2, 0.2], anchor='NE')
newax.imshow(im)
newax.axis('off')
if filesave == None:
plt.show()
else:
plt.savefig(filesave, bbox_inched='tight')
plt.close(fig)
return None
def animator_pdf_maker(rounds, pump_index):
"""
Creates the animation and pdf of the FOPO at different parts of the FOPO
using convert from imagemagic. Also removes the pngs so be carefull
"""
print("making pdf's and animations.")
space = ('wavelength', 'freequency', 'time')
for sp in space:
file_loc = 'output/output'+str(pump_index)+'/figures/'+sp+'/'
strings_large = ['convert '+file_loc+'00.png ']
for i in range(4):
strings_large.append('convert ')
for ro in range(rounds):
for i in range(4):
strings_large[i+1] += file_loc+str(ro)+str(i+1)+'.png '
for w in range(1, 4):
if i == 5:
break
strings_large[0] += file_loc+str(ro)+str(w)+'.png '
for i in range(4):
os.system(strings_large[i]+file_loc+str(i)+'.pdf')
file_loca = file_loc+'portA/'
file_locb = file_loc+'portB/'
string_porta = 'convert '
string_portb = 'convert '
for i in range(rounds):
string_porta += file_loca + str(i) + '.png '
string_portb += file_locb + str(i) + '.png '
string_porta += file_loca+'porta.pdf '
string_portb += file_locb+'portb.pdf '
os.system(string_porta)
os.system(string_portb)
for i in range(4):
os.system(
'convert -delay 30 '+file_loc+str(i)+'.pdf '+file_loc+str(i)+'.mp4')
os.system('convert -delay 30 ' + file_loca +
'porta.pdf ' + file_loca+'porta.mp4 ')
os.system('convert -delay 30 ' + file_locb +
'portb.pdf ' + file_locb+'portb.mp4 ')
for i in (file_loc, file_loca, file_locb):
print('rm ' + i + '*.png')
os.system('rm ' + i + '*.png')
os.system('sleep 5')
return None
def read_variables(filename, layer, filepath=''):
with h5py.File(filepath+str(filename)+'.hdf5', 'r') as f:
D = {}
for i in f.get(layer).keys():
try:
D[str(i)] = f.get(layer + '/' + str(i)).value
except AttributeError:
pass
return D
def save_variables(filename, layers, filepath='', **variables):
with h5py.File(filepath + filename + '.hdf5', 'a') as f:
for i in (variables):
f.create_dataset(layers+'/'+str(i), data=variables[i])
return None
def consolidate(max_rounds, int_fwm,master_index, index, filename = 'data_large'):
"""
Loads the HDF5 data and consolidates them for storage size
reduction after the oscillations are done.
"""
layer_0 = '0/0'
filepath = 'output{}/output{}/data/'.format(master_index, index)
file_read = filepath + filename
file_save = filepath + filename+'_conc'
# Input data, small, no need to cons
D = read_variables(file_read, '0/0')
save_variables(file_save, 'input', **D)
if max_rounds ==0:
max_rounds +=1
U_cons = np.zeros([4,max_rounds, 7*int_fwm.nt], dtype = np.complex128)
# Reading of all the oscillating spectra and sending them to a 3D array
unfortmated_string = '{}/{}/U'
with h5py.File(file_read+'.hdf5', 'r') as f:
for pop in range(1,5):
for r in range(max_rounds):
U_cons[pop - 1,r,:] = f.get(unfortmated_string.format(pop,r)).value
save_variables(file_save, 'results', U = U_cons)
os.system('mv '+file_save+'.hdf5 '+file_read+'.hdf5')
return None | ibegleris/Single-mode-FOPO | src/data_plotters_animators.py | Python | bsd-3-clause | 10,230 | 0.006549 |
# -*- coding: utf-8 -*-
# Scrapy settings for spider project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'spider'
SPIDER_MODULES = ['spider.spiders']
NEWSPIDER_MODULE = 'spider.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'spider (+http://www.yourdomain.com)'
| sanxiatianma/spider | src/spider/settings.py | Python | apache-2.0 | 483 | 0.004141 |
"""Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Function to provide a logger that writes to stdout, memory, and files.
Logger objects are used to control how and when messages are logged. This
function will perform some general housekeeping and organization. It will also
existing loggers don't get extra handlers added to them if this code is called
multiple times.
"""
import logging
import os
from StringIO import StringIO
import sys
import time
def GetLogger(name, logdir=None, loglevel='info', stdout=False):
"""Return a new logger, or reference to an existing logger.
Args:
name: string, name of logger.
logdir: string, path to a directly to place log files.
loglevel: string, debug level of logger.
stdout: boolean, True = send messages to stdout and logfile.
False = only send messages to log file.
Returns:
initialized logger.
Since Python loggers are a singleton, logging.getLogger() will always return
a reference to the current logger with identical names. This function uses
3 handlers, so if handlers == 0 the logger requires proper configuration
of handlers and log files.
"""
logger = logging.getLogger(name)
if not logger.handlers:
datetime_str = time.strftime('%Y%B%d_%H%M%S', time.localtime())
log_filename = '%s%s%s' % (name, datetime_str, '.log')
if not logdir:
logdir = '/tmp/logfiles'
if not os.path.isdir(logdir):
try:
os.makedirs(logdir)
except IOError:
print 'Error creating log directory!'
sys.exit(1)
logfile = os.path.join(logdir, log_filename)
strlog = StringIO()
c = logging.StreamHandler()
s = logging.StreamHandler(strlog)
h = logging.FileHandler(logfile)
hf = logging.Formatter('%(asctime)s, %(name)s %(levelname)s: %(message)s')
cf = logging.Formatter('%(name)s %(levelname)s: %(message)s')
sf = logging.Formatter('%(name)s %(levelname)s: %(message)s')
logger.addHandler(h)
logger.addHandler(s)
h.setFormatter(hf)
s.setFormatter(sf)
if stdout:
logger.addHandler(c)
c.setFormatter(cf)
levels = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL,
}
logger.setLevel(levels.get(loglevel, logging.INFO))
logger.debug(
'Invocation started. Logger %s\nLogger Name: %s\nLog Mode: %s',
logfile, name, loglevel)
else:
logger.debug('Logger %s is already initialized', name)
return logger
| google/cloudprint_logocert | _log.py | Python | apache-2.0 | 3,118 | 0.003849 |
# -*- coding: utf-8 -*-
from contextlib import contextmanager
import pytest
from django.contrib.auth.models import AnonymousUser
from django.test.client import RequestFactory
from shuup.apps.provides import override_provides
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme.layout import Layout
from shuup.xtheme.models import SavedViewConfig, SavedViewConfigStatus
from shuup.xtheme.plugins.consts import FALLBACK_LANGUAGE_CODE
from shuup.xtheme.testing import override_current_theme_class
from shuup.xtheme.views.editor import EditorView, ROW_CELL_LIMIT
from shuup_tests.utils import printable_gibberish
from shuup_tests.utils.faux_users import SuperUser
from shuup_tests.utils.forms import get_form_data
from shuup_tests.xtheme.utils import FauxTheme, plugin_override
@contextmanager
def initialize_editor_view(view_name, placeholder_name, request=None):
if request is None:
request = RequestFactory().get("/")
request.user = SuperUser()
if hasattr(request.GET, "_mutable"):
request.GET._mutable = True # Ahem
request.GET.update({
"theme": FauxTheme.identifier,
"view": view_name,
"ph": placeholder_name
})
with plugin_override():
with override_provides("xtheme", ["shuup_tests.xtheme.utils:FauxTheme"]):
with override_current_theme_class(FauxTheme):
yield EditorView(request=request, args=(), kwargs={})
def get_test_layout_and_svc():
svc = SavedViewConfig(
theme_identifier=FauxTheme.identifier,
view_name=printable_gibberish(),
status=SavedViewConfigStatus.CURRENT_DRAFT
)
layout = Layout(FauxTheme, "ph")
layout.add_plugin("text", {"text": "hello"})
svc.set_layout_data(layout.placeholder_name, layout)
svc.save()
return layout, svc
def test_anon_cant_edit(rf):
request = rf.get("/")
request.user = AnonymousUser()
with pytest.raises(Problem):
EditorView.as_view()(request)
def test_unknown_theme_fails(rf):
request = rf.get("/", {"theme": printable_gibberish()})
request.user = SuperUser()
with pytest.raises(Problem):
EditorView.as_view()(request)
@pytest.mark.django_db
def test_editor_view_functions():
layout, svc = get_test_layout_and_svc()
with initialize_editor_view(svc.view_name, layout.placeholder_name) as view_obj:
assert isinstance(view_obj, EditorView)
view_obj.request.GET.update({"x": 0, "y": 0})
view_obj.dispatch(view_obj.request)
assert view_obj.current_cell
assert view_obj.current_cell.serialize() == layout.get_cell(0, 0).serialize()
# Go through the motions of adding and removing stuff programmatically
view_obj.dispatch_change_plugin(plugin="text") # Well it was text to begin with, but...
assert len(view_obj.layout.rows[0]) == 1
view_obj.dispatch_add_cell(y=-1)
assert len(view_obj.layout.rows[0]) == 1
view_obj.dispatch_add_cell(y=0)
assert len(view_obj.layout.rows[0]) == 2
view_obj.dispatch_add_row()
assert len(view_obj.layout) == 2
assert len(view_obj.layout.rows[1]) == 1
view_obj.dispatch_add_cell(y=1)
assert len(view_obj.layout.rows[1]) == 2
view_obj.dispatch_del_cell(x=1, y=1)
assert len(view_obj.layout.rows[1]) == 1
view_obj.dispatch_del_row(y=1)
assert len(view_obj.layout) == 1
@pytest.mark.django_db
def test_editor_save(rf):
layout, svc = get_test_layout_and_svc()
with initialize_editor_view(svc.view_name, layout.placeholder_name) as view_obj:
view_obj.request.GET.update({"x": 0, "y": 0})
view_obj.dispatch(view_obj.request)
assert view_obj.current_cell
assert view_obj.form
assert "general" in view_obj.form.forms
assert "plugin" in view_obj.form.forms
form_data = get_form_data(view_obj.form, prepared=True)
new_text = printable_gibberish()
form_data["plugin-text_%s" % FALLBACK_LANGUAGE_CODE] = new_text
form_data["save"] = "1"
request = rf.post("/pepe/", data=form_data) # sort of rare pepe
request.GET = dict(request.GET, x=0, y=0)
with initialize_editor_view(svc.view_name, layout.placeholder_name, request) as view_obj:
view_obj.dispatch(request)
assert view_obj.form
assert not view_obj.form.errors
assert view_obj.current_cell.config["text"] == {FALLBACK_LANGUAGE_CODE: new_text}
@pytest.mark.django_db
def test_editor_view_commands():
with initialize_editor_view(printable_gibberish(), printable_gibberish()) as view_obj:
view_obj.request.method = "POST"
view_obj.request.POST = {"command": "add_row"}
view_obj._populate_vars() # don't tell anyone we're calling a private method here
assert len(view_obj.layout) == 0
view_obj.dispatch(view_obj.request)
assert len(view_obj.layout) == 1
@pytest.mark.django_db
def test_editor_view_unknown_command():
with initialize_editor_view(printable_gibberish(), printable_gibberish()) as view_obj:
view_obj.request.method = "POST"
view_obj.request.POST = {"command": printable_gibberish()}
with pytest.raises(Problem):
view_obj.dispatch(view_obj.request)
@pytest.mark.django_db
def test_editor_cell_limits():
layout, svc = get_test_layout_and_svc()
with initialize_editor_view(svc.view_name, layout.placeholder_name) as view_obj:
view_obj.request.GET.update({"x": 0, "y": 0})
view_obj.dispatch(view_obj.request)
for i in range(1, ROW_CELL_LIMIT):
view_obj.dispatch_add_cell(y=0)
assert len(view_obj.layout.rows[0]) == ROW_CELL_LIMIT
with pytest.raises(ValueError):
view_obj.dispatch_add_cell(y=0)
@pytest.mark.django_db
def test_get_global_placeholder():
request = RequestFactory().get("/")
layout, svc = get_test_layout_and_svc()
with initialize_editor_view(svc.view_name, layout.placeholder_name, request=request) as view_obj:
view_name_1 = view_obj.dispatch(view_obj.request).context_data["view"].view_config.view_name
view_obj.request.GET.update({"x": 0, "y": 0, "global_type": True})
view_name_2 = view_obj.dispatch(view_obj.request).context_data["view"].view_config.view_name
assert view_name_1 != view_name_2
assert view_name_2 == XTHEME_GLOBAL_VIEW_NAME
| hrayr-artunyan/shuup | shuup_tests/xtheme/test_editor_view.py | Python | agpl-3.0 | 6,457 | 0.002168 |
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Canonical
#
# Authors:
# Didier Roche <didrocks@ubuntu.com>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUTa
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
try:
from configparser import NoSectionError, NoOptionError, RawConfigParser
except ImportError:
# Python 2
from ConfigParser import NoSectionError, NoOptionError, RawConfigParser
from xdg import BaseDirectory as xdg
ONECONF_OVERRIDE_FILE = "/tmp/oneconf.override"
ONECONF_DATADIR = '/usr/share/oneconf/data'
ONECONF_CACHE_DIR = os.path.join(xdg.xdg_cache_home, "oneconf")
PACKAGE_LIST_PREFIX = "package_list"
OTHER_HOST_FILENAME = "other_hosts"
PENDING_UPLOAD_FILENAME = "pending_upload"
HOST_DATA_FILENAME = "host"
LOGO_PREFIX = "logo"
LAST_SYNC_DATE_FILENAME = "last_sync"
_datadir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "data")
# In both Python 2 and 3, _datadir will be a relative path, however, in Python
# 3 it will start with "./" while in Python 2 it will start with just the file
# name. Normalize this, since the path string is used in the logo_checksum
# calculation.
if not os.path.isabs(_datadir) and not _datadir.startswith('./'):
_datadir = os.path.join(os.curdir, _datadir)
if not os.path.exists(_datadir):
# take the paths file if loaded from networksync module
#
# 2014-03-17 barry: It's probably not a good idea to use __file__, since
# the behavior of that has changed between Python 3.3 and 3.4. Prior to
# 3.4, __file__ was a relative path, but in 3.4 it became absolute (which
# it always should have been). Because the file's *path* is the input to
# the logo checksum (as opposed to the file's contents, because...?) this
# value actually matters.
#
# However, making the FAKE_WALLPAPER path below absolute breaks the
# package's build because inside a chroot, the absolute path of __file__
# is unpredictable. LP: #1269898.
#
# The solution then is to make the FAKE_WALLPAPER path relative to the
# current working directory, via os.path.relpath(). So first, we ensure
# it's absolute (for older Pythons) and then relpath it. *That's* the
# path that will be the input to the SHA224 checksum.
parent = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
_datadir = os.path.join(parent, "data")
if not os.path.exists(_datadir):
_datadir = ONECONF_DATADIR
LOGO_BASE_FILENAME = os.path.join(_datadir, 'images', 'computer.png')
WEBCATALOG_SILO_DIR = "/tmp"
FAKE_WALLPAPER = None # Fake wallpaper for tests
FAKE_WALLPAPER_MTIME = None # Fake wallpaper for tests
config = RawConfigParser()
try:
config.read(ONECONF_OVERRIDE_FILE)
ONECONF_CACHE_DIR = config.get('TestSuite', 'ONECONF_CACHE_DIR')
WEBCATALOG_SILO_DIR = config.get('TestSuite', 'WEBCATALOG_SILO_DIR')
FAKE_WALLPAPER = os.path.relpath(os.path.abspath(os.path.join(
os.path.dirname(_datadir), config.get('TestSuite', 'FAKE_WALLPAPER'))))
try:
FAKE_WALLPAPER_MTIME = config.get('TestSuite', 'FAKE_WALLPAPER_MTIME')
except NoOptionError:
FAKE_WALLPAPER_MTIME = None
except NoSectionError:
pass
WEBCATALOG_SILO_SOURCE = os.path.join(WEBCATALOG_SILO_DIR, "source")
WEBCATALOG_SILO_RESULT = os.path.join(WEBCATALOG_SILO_DIR, "result")
| yasoob/PythonRSSReader | venv/lib/python2.7/dist-packages/oneconf/paths.py | Python | mit | 3,861 | 0.000518 |
"""SCons.Tool.tar
Tool-specific initialization for tar.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/tar.py rel_2.4.0:3365:9259ea1c13d7 2015/09/21 14:03:43 bdbaddog"
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Node.FS
import SCons.Util
tars = ['tar', 'gtar']
TarAction = SCons.Action.Action('$TARCOM', '$TARCOMSTR')
TarBuilder = SCons.Builder.Builder(action = TarAction,
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$TARSUFFIX',
multi = 1)
def generate(env):
"""Add Builders and construction variables for tar to an Environment."""
try:
bld = env['BUILDERS']['Tar']
except KeyError:
bld = TarBuilder
env['BUILDERS']['Tar'] = bld
env['TAR'] = env.Detect(tars) or 'gtar'
env['TARFLAGS'] = SCons.Util.CLVar('-c')
env['TARCOM'] = '$TAR $TARFLAGS -f $TARGET $SOURCES'
env['TARSUFFIX'] = '.tar'
def exists(env):
return env.Detect(tars)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| Uli1/mapnik | scons/scons-local-2.4.0/SCons/Tool/tar.py | Python | lgpl-2.1 | 2,503 | 0.006392 |
from Task import Task
import os.path
import yaml
import botocore
import name_constructor
import time
import sys
class DynamoTable(Task):
"""Create or remove a table by yaml definition file"""
known_params = {
'name': 'name of the table to be created or removed',
'source': 'full name of the file with the definition (see demo/sample_reservation.yml)',
'state': 'table can be in two states: present (it is the default state) or absent'
}
required_params = ( 'name', 'source' )
required_configs = ('user', 'branch')
task_name = 'dynamo-table'
def __str__(self):
if self.name:
return self.name
else:
return "Create a table '%s' from '%s'" % (self.params['name'], os.path.abspath(self.params['source']))
def run(self, clients, cache):
client = clients.get('dynamodb')
table_name = name_constructor.table_name(self.params['name'], self.config)
if 'state' in self.params and self.params['state'] == 'absent':
return self.make_table_absent(client, table_name)
else:
return self.make_table_present(client, table_name)
def make_table_absent(self, client, table_name):
try:
table_def = client.describe_table(TableName=table_name)['Table']
except botocore.exceptions.ClientError as e:
return (True, '')
self.wait_for_table(client, table_name)
client.delete_table(TableName=table_name)
return (True, self.CHANGED)
def make_table_present(self, client, table_name):
try:
new_def = yaml.load(open(self.params['source']).read())
except Exception as e:
return (False, str(e))
try:
table_def = client.describe_table(TableName=table_name)['Table']
except botocore.exceptions.ClientError as e:
return self.create(client, table_name, new_def)
request = self.build_update_request(table_def, new_def)
if not request:
return (True, '')
self.process_update_request(client, request, table_name)
return (True, self.CHANGED)
def process_update_request(self, client, request, table_name):
if 'GlobalSecondaryIndexUpdates' in request:
for index_request in request['GlobalSecondaryIndexUpdates']:
new_request = { 'TableName': table_name, 'AttributeDefinitions': request['AttributeDefinitions'], 'GlobalSecondaryIndexUpdates': [index_request]}
self.modify_table(client, new_request, table_name)
if 'ProvisionedThroughput' in request:
new_request = { 'TableName': table_name, 'ProvisionedThroughput': request['ProvisionedThroughput'] }
self.modify_table(client, new_request, table_name)
def wait_for_table(self, client, table_name):
def rotate(t):
# animation = ('|', '\\', '-', '/')
animation = (':.. ', '.:. ', '..: ', '.:. ')
sys.stdout.write('\b'*len(animation)+animation[t % len(animation)])
sys.stdout.flush()
retry = 600
sys.stdout.write('\r')
while True:
table_def = client.describe_table(TableName=table_name)['Table']
busy_reason = self.table_busy_reason(table_def)
if busy_reason == '':
break
retry -= 1
if retry < 1:
raise Exception("%s too long." % busy_reason)
rotate(retry)
time.sleep(1)
def modify_table(self, client, request, table_name):
self.wait_for_table(client, table_name)
client.update_table(**request)
def table_busy_reason(self, table_def):
if table_def['TableStatus'] != 'ACTIVE':
return 'Table is in state %s' % table_def['TableStatus']
if 'GlobalSecondaryIndexes' in table_def:
for index in table_def['GlobalSecondaryIndexes']:
if index['IndexStatus'] != 'ACTIVE':
return 'Index %s is in state %s' % (index['IndexName'], index['IndexStatus'])
return ''
def build_update_request(self, table_def, new_def):
request = {}
old_indexes = self.get_indexes_by_name(self.construct_secondary_indexes(table_def['GlobalSecondaryIndexes']))
new_indexes = self.get_indexes_by_name(self.construct_secondary_indexes(new_def['GlobalSecondaryIndexes']))
updates = []
for index_name in old_indexes:
if index_name not in new_indexes:
updates.append({ 'Delete': { 'IndexName': index_name }})
for (index_name, index) in new_indexes.iteritems():
if index_name in old_indexes:
if index != old_indexes[index_name]:
updates.append({ 'Delete': { 'IndexName': index_name }})
updates.append({ 'Create': index})
else:
updates.append({ 'Create': index})
if updates:
request['GlobalSecondaryIndexUpdates'] = updates
request['AttributeDefinitions'] = new_def['AttributeDefinitions']
old_provisioning = self.construct_provisioned_throughput(table_def['ProvisionedThroughput'])
new_provisioning = self.construct_provisioned_throughput(new_def['ProvisionedThroughput'])
if old_provisioning != new_provisioning:
request['ProvisionedThroughput'] = new_provisioning
return request
def get_indexes_by_name(self, indexes):
out = {}
for index in indexes:
out[index['IndexName']] = index
return out
def construct_provisioned_throughput(self, idef):
return {
'ReadCapacityUnits': idef['ReadCapacityUnits'],
'WriteCapacityUnits': idef['WriteCapacityUnits']
}
def construct_secondary_indexes(self, idefs):
outs = []
for idef in idefs:
out = {
'IndexName': idef['IndexName'],
'KeySchema': idef['KeySchema'],
'Projection': idef['Projection']
}
if 'ProvisionedThroughput' in idef:
out['ProvisionedThroughput'] = self.construct_provisioned_throughput(idef['ProvisionedThroughput'])
outs.append(out)
return outs
def create(self, client, table_name, new_def):
params = {
'AttributeDefinitions': new_def['AttributeDefinitions'],
'TableName': table_name,
'KeySchema': new_def['KeySchema'] if 'KeySchema' in new_def else [],
'ProvisionedThroughput': self.construct_provisioned_throughput(new_def['ProvisionedThroughput'])
}
if 'LocalSecondaryIndexes' in new_def:
params['LocalSecondaryIndexes'] = self.construct_secondary_indexes(new_def['LocalSecondaryIndexes'])
if 'GlobalSecondaryIndexes' in new_def:
params['GlobalSecondaryIndexes'] = self.construct_secondary_indexes(new_def['GlobalSecondaryIndexes'])
if 'StreamSpecification' in new_def:
params['StreamSpecification'] = new_def['StreamSpecification']
try:
client.create_table(**params)
except botocore.exceptions.ClientError as e:
return (False, str(e))
return (True, self.CREATED)
| Travelport-Czech/apila | tasks/DynamoTable.py | Python | mit | 6,621 | 0.014197 |
from nose.tools import eq_
from douglas.tests import PluginTest
from douglas.plugins import yeararchives
class Test_yeararchives(PluginTest):
def setUp(self):
PluginTest.setUp(self, yeararchives)
def tearDown(self):
PluginTest.tearDown(self)
def test_parse_path_info(self):
testdata = [
('', None),
('/', None),
('/2003', ('2003', None)),
('/2003/', ('2003', None)),
('/2003/index', ('2003', None)),
('/2003/index.theme', ('2003', 'theme')),
]
for data, expected in testdata:
eq_(yeararchives.parse_path_info(data), expected)
| willkg/douglas | douglas/tests/test_yeararchives.py | Python | mit | 668 | 0 |
from datetime import datetime
from xmodule.modulestore import Location, namedtuple_to_son
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import own_metadata
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore.mongo.base import MongoModuleStore
from pytz import UTC
DRAFT = 'draft'
# Things w/ these categories should never be marked as version='draft'
DIRECT_ONLY_CATEGORIES = ['course', 'chapter', 'sequential', 'about', 'static_tab', 'course_info']
def as_draft(location):
"""
Returns the Location that is the draft for `location`
"""
return Location(location).replace(revision=DRAFT)
def as_published(location):
"""
Returns the Location that is the published version for `location`
"""
return Location(location).replace(revision=None)
def wrap_draft(item):
"""
Sets `item.is_draft` to `True` if the item is a
draft, and `False` otherwise. Sets the item's location to the
non-draft location in either case
"""
setattr(item, 'is_draft', item.location.revision == DRAFT)
item.location = item.location.replace(revision=None)
return item
class DraftModuleStore(MongoModuleStore):
"""
This mixin modifies a modulestore to give it draft semantics.
That is, edits made to units are stored to locations that have the revision DRAFT,
and when reads are made, they first read with revision DRAFT, and then fall back
to the baseline revision only if DRAFT doesn't exist.
This module also includes functionality to promote DRAFT modules (and optionally
their children) to published modules.
"""
def get_item(self, location, depth=0):
"""
Returns an XModuleDescriptor instance for the item at location.
If location.revision is None, returns the item with the most
recent revision
If any segment of the location is None except revision, raises
xmodule.modulestore.exceptions.InsufficientSpecificationError
If no object is found at that location, raises
xmodule.modulestore.exceptions.ItemNotFoundError
location: Something that can be passed to Location
depth (int): An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
try:
return wrap_draft(super(DraftModuleStore, self).get_item(as_draft(location), depth=depth))
except ItemNotFoundError:
return wrap_draft(super(DraftModuleStore, self).get_item(location, depth=depth))
def get_instance(self, course_id, location, depth=0):
"""
Get an instance of this location, with policy for course_id applied.
TODO (vshnayder): this may want to live outside the modulestore eventually
"""
try:
return wrap_draft(super(DraftModuleStore, self).get_instance(course_id, as_draft(location), depth=depth))
except ItemNotFoundError:
return wrap_draft(super(DraftModuleStore, self).get_instance(course_id, location, depth=depth))
def get_items(self, location, course_id=None, depth=0):
"""
Returns a list of XModuleDescriptor instances for the items
that match location. Any element of location that is None is treated
as a wildcard that matches any value
location: Something that can be passed to Location
depth: An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
draft_loc = as_draft(location)
draft_items = super(DraftModuleStore, self).get_items(draft_loc, course_id=course_id, depth=depth)
items = super(DraftModuleStore, self).get_items(location, course_id=course_id, depth=depth)
draft_locs_found = set(item.location.replace(revision=None) for item in draft_items)
non_draft_items = [
item
for item in items
if (item.location.revision != DRAFT
and item.location.replace(revision=None) not in draft_locs_found)
]
return [wrap_draft(item) for item in draft_items + non_draft_items]
def clone_item(self, source, location):
"""
Clone a new item that is a copy of the item at the location `source`
and writes it to `location`
"""
if Location(location).category in DIRECT_ONLY_CATEGORIES:
raise InvalidVersionError(location)
return wrap_draft(super(DraftModuleStore, self).clone_item(source, as_draft(location)))
def update_item(self, location, data, allow_not_found=False):
"""
Set the data in the item specified by the location to
data
location: Something that can be passed to Location
data: A nested dictionary of problem data
"""
draft_loc = as_draft(location)
try:
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
except ItemNotFoundError, e:
if not allow_not_found:
raise e
return super(DraftModuleStore, self).update_item(draft_loc, data)
def update_children(self, location, children):
"""
Set the children for the item specified by the location to
children
location: Something that can be passed to Location
children: A list of child item identifiers
"""
draft_loc = as_draft(location)
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
return super(DraftModuleStore, self).update_children(draft_loc, children)
def update_metadata(self, location, metadata):
"""
Set the metadata for the item specified by the location to
metadata
location: Something that can be passed to Location
metadata: A nested dictionary of module metadata
"""
draft_loc = as_draft(location)
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
if 'is_draft' in metadata:
del metadata['is_draft']
return super(DraftModuleStore, self).update_metadata(draft_loc, metadata)
def delete_item(self, location, delete_all_versions=False):
"""
Delete an item from this modulestore
location: Something that can be passed to Location
"""
super(DraftModuleStore, self).delete_item(as_draft(location))
if delete_all_versions:
super(DraftModuleStore, self).delete_item(as_published(location))
return
def get_parent_locations(self, location, course_id):
'''Find all locations that are the parents of this location. Needed
for path_to_location().
returns an iterable of things that can be passed to Location.
'''
return super(DraftModuleStore, self).get_parent_locations(location, course_id)
def publish(self, location, published_by_id):
"""
Save a current draft to the underlying modulestore
"""
draft = self.get_item(location)
draft.cms.published_date = datetime.now(UTC)
draft.cms.published_by = published_by_id
super(DraftModuleStore, self).update_item(location, draft._model_data._kvs._data)
super(DraftModuleStore, self).update_children(location, draft._model_data._kvs._children)
super(DraftModuleStore, self).update_metadata(location, own_metadata(draft))
self.delete_item(location)
def unpublish(self, location):
"""
Turn the published version into a draft, removing the published version
"""
if Location(location).category in DIRECT_ONLY_CATEGORIES:
raise InvalidVersionError(location)
super(DraftModuleStore, self).clone_item(location, as_draft(location))
super(DraftModuleStore, self).delete_item(location)
def _query_children_for_cache_children(self, items):
# first get non-draft in a round-trip
queried_children = []
to_process_non_drafts = super(DraftModuleStore, self)._query_children_for_cache_children(items)
to_process_dict = {}
for non_draft in to_process_non_drafts:
to_process_dict[Location(non_draft["_id"])] = non_draft
# now query all draft content in another round-trip
query = {
'_id': {'$in': [namedtuple_to_son(as_draft(Location(item))) for item in items]}
}
to_process_drafts = list(self.collection.find(query))
# now we have to go through all drafts and replace the non-draft
# with the draft. This is because the semantics of the DraftStore is to
# always return the draft - if available
for draft in to_process_drafts:
draft_loc = Location(draft["_id"])
draft_as_non_draft_loc = draft_loc.replace(revision=None)
# does non-draft exist in the collection
# if so, replace it
if draft_as_non_draft_loc in to_process_dict:
to_process_dict[draft_as_non_draft_loc] = draft
# convert the dict - which is used for look ups - back into a list
for key, value in to_process_dict.iteritems():
queried_children.append(value)
return queried_children
| IITBinterns13/edx-platform-dev | common/lib/xmodule/xmodule/modulestore/mongo/draft.py | Python | agpl-3.0 | 9,889 | 0.002225 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import random
import bpy
from bpy.props import BoolProperty, IntProperty, StringProperty
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode, changable_sockets
import numpy as np
from numpy import random as np_random, ndarray, array
class ListShuffleNode(bpy.types.Node, SverchCustomTreeNode):
'''
Triggers: Randomize list order
Tooltip: Change randomly the order of the elements in a list
'''
bl_idname = 'ListShuffleNode'
bl_label = 'List Shuffle'
bl_icon = 'OUTLINER_OB_EMPTY'
sv_icon = 'SV_LIST_SHUFFLE'
level: IntProperty(name='level_to_Shuffle', default=2, min=1, update=updateNode)
seed: IntProperty(name='Seed', default=0, update=updateNode)
typ: StringProperty(name='typ', default='')
newsock: BoolProperty(name='newsock', default=False)
def draw_buttons(self, context, layout):
layout.prop(self, 'level', text="level")
if 'seed' not in self.inputs:
layout.prop(self, 'seed', text="Seed")
def sv_init(self, context):
self.inputs.new('SvStringsSocket', "data")
self.inputs.new('SvStringsSocket', "seed").prop_name = 'seed'
self.outputs.new('SvStringsSocket', 'data')
def sv_update(self):
if 'data' in self.inputs and self.inputs['data'].links:
inputsocketname = 'data'
outputsocketname = ['data']
changable_sockets(self, inputsocketname, outputsocketname)
def process(self):
if self.outputs[0].is_linked and self.inputs[0].is_linked:
seed = self.inputs['seed'].sv_get(deepcopy=False)[0][0]
random.seed(seed)
np_random.seed(seed)
data = self.inputs['data'].sv_get(deepcopy=False)
output = self.shuffle(data, self.level)
self.outputs['data'].sv_set(output)
def shuffle(self, data, level):
level -= 1
if level:
if level == 1 and isinstance(data, ndarray):
out = np.array(data)
for row in out:
np_random.shuffle(row)
return out
out = []
for l in data:
out.append(self.shuffle(l, level))
return out
elif isinstance(data, list):
l = data.copy()
random.shuffle(l)
return l
elif isinstance(data, tuple):
data = list(data)
random.shuffle(data)
return tuple(data)
elif isinstance(data, ndarray):
out = array(data)
np_random.shuffle(out)
return out
def register():
bpy.utils.register_class(ListShuffleNode)
def unregister():
bpy.utils.unregister_class(ListShuffleNode)
| nortikin/sverchok | nodes/list_struct/shuffle.py | Python | gpl-3.0 | 3,555 | 0.001125 |
"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation
| eHealthAfrica/LMIS | LMIS/core/api/serializers.py | Python | gpl-2.0 | 3,856 | 0.001815 |
import http.client
from github3 import GitHubError
import github3.exceptions
from cumulusci.core.exceptions import GithubApiNotFoundError
from cumulusci.core.utils import process_bool_arg
from cumulusci.tasks.github.base import BaseGithubTask
class MergeBranch(BaseGithubTask):
task_docs = """
Merges the most recent commit on the current branch into other branches depending on the value of source_branch.
If source_branch is a branch that does not start with the specified branch_prefix, then the commit will be
merged to all branches that begin with branch_prefix and are not themselves child branches (i.e. branches don't contain '__' in their name).
If source_branch begins with branch_prefix, then the commit is merged to all child branches of source_branch.
"""
task_options = {
"commit": {
"description": "The commit to merge into feature branches. Defaults to the current head commit."
},
"source_branch": {
"description": "The source branch to merge from. Defaults to project__git__default_branch."
},
"branch_prefix": {
"description": "A list of prefixes of branches that should receive the merge. Defaults to project__git__prefix_feature"
},
"update_future_releases": {
"description": "If source_branch is a release branch, then merge all future release branches that exist. Defaults to False."
},
}
def _init_options(self, kwargs):
super()._init_options(kwargs)
if "commit" not in self.options:
self.options["commit"] = self.project_config.repo_commit
if "branch_prefix" not in self.options:
self.options[
"branch_prefix"
] = self.project_config.project__git__prefix_feature
if "source_branch" not in self.options:
self.options[
"source_branch"
] = self.project_config.project__git__default_branch
self.options["update_future_releases"] = process_bool_arg(
self.options.get("update_future_releases") or False
)
def _init_task(self):
super()._init_task()
self.repo = self.get_repo()
def _run_task(self):
self._validate_source_branch(self.options["source_branch"])
branches_to_merge = self._get_branches_to_merge()
for branch in branches_to_merge:
self._merge(
branch.name,
self.options["source_branch"],
self.options["commit"],
)
def _validate_source_branch(self, source_branch):
"""Validates that the source branch exists in the repository"""
try:
self.repo.branch(source_branch)
except github3.exceptions.NotFoundError:
message = f"Branch {source_branch} not found"
raise GithubApiNotFoundError(message)
def _get_existing_prs(self, source_branch, branch_prefix):
"""Returns the existing pull requests from the source branch
to other branches that are candidates for merging."""
existing_prs = []
for pr in self.repo.pull_requests(state="open"):
if pr.base.ref.startswith(branch_prefix) and pr.head.ref == source_branch:
existing_prs.append(pr.base.ref)
return existing_prs
def _get_branches_to_merge(self):
"""
If source_branch is the default branch (or a branch that doesn't start with a prefix), we
gather all branches with branch_prefix that are not child branches.
NOTE: We only include the _next_ closes release branch when automerging from main.
A change on main may conflict with teh current contents of the lowest release branch.
In this case, we would like for that conflict to only need to be resolved once
(not once for each release branch).
If source_branch starts with branch prefix, we gather
all branches with branch_prefix that are direct descendents of source_branch.
If update_future_releases is True, and source_branch is a release branch
then we also collect all future release branches.
"""
repo_branches = list(self.repo.branches())
next_release = self._get_next_release(repo_branches)
update_future_releases = self._update_future_releases(next_release)
child_branches = []
main_descendents = []
release_branches = []
for branch in repo_branches:
# check for adding future release branches
if update_future_releases and self._is_future_release_branch(
branch.name, next_release
):
release_branches.append(branch)
continue
# check if we looking at the source_branch
if branch.name == self.options["source_branch"]:
self.logger.debug(f"Skipping branch {branch.name}: is source branch")
continue
# check for branch prefix match
elif not branch.name.startswith(self.options["branch_prefix"]):
self.logger.debug(
f"Skipping branch {branch.name}: does not match prefix '{self.options['branch_prefix']}'"
)
continue
# check if source_branch doesn't have prefix and is not a child (e.g. main)
elif (
not self.options["source_branch"].startswith(
self.options["branch_prefix"]
)
and "__" not in branch.name
):
# only merge to the lowest numbered release branch
# when merging from a branch without a prefix (e.g. main)
if self._is_future_release_branch(branch.name, next_release):
continue
main_descendents.append(branch)
# else, we have a branch that starts with branch_prefix
# check is this branch is a direct descendent
elif self._is_source_branch_direct_descendent(branch.name):
child_branches.append(branch)
# else not a direct descendent
else:
self.logger.debug(
f"Skipping branch {branch.name}: is not a direct descendent of {self.options['source_branch']}"
)
to_merge = []
if child_branches:
self.logger.debug(
f"Found child branches to update: {[branch.name for branch in child_branches]}"
)
to_merge = child_branches
elif self.options["source_branch"].startswith(self.options["branch_prefix"]):
self.logger.debug(
f"No children found for branch {self.options['source_branch']}"
)
if release_branches:
self.logger.debug(
f"Found future release branches to update: {[branch.name for branch in release_branches]}"
)
to_merge = to_merge + release_branches
if main_descendents:
self.logger.debug(
f"Found descendents of {self.options['source_branch']} to update: {[branch.name for branch in main_descendents]}"
)
to_merge = to_merge + main_descendents
return to_merge
def _get_next_release(self, repo_branches):
"""Returns the integer that corresponds to the lowest release number found on all release branches.
NOTE: We assume that once a release branch is merged that it will be deleted.
"""
release_nums = [
int(branch.name.split("/")[1])
for branch in repo_branches
if self._is_release_branch(branch.name)
]
next_release = sorted(release_nums)[0] if release_nums else None
return next_release
def _update_future_releases(self, next_release):
"""Determines whether or not to update future releases.
Returns True if all of the below checks are True. False otherwise.
Checks:
(1) Did we receive the 'update_future_release' flag?
(2) Is the source_branch a release branch?
(3) Is it the lowest numbered release branch that exists?
NOTE: This functionality assumes that the lowest numbered release branch in the repo is
the next closest release. Put another way, once a release branch is merged we assume that it is immediately deleted.
"""
update_future_releases = False
if (
self.options["update_future_releases"]
and self._is_release_branch(self.options["source_branch"])
and next_release == int(self.options["source_branch"].split("/")[1])
):
update_future_releases = True
return update_future_releases
def _is_release_branch(self, branch_name):
"""A release branch begins with the given prefix"""
prefix = self.options["branch_prefix"]
if not branch_name.startswith(prefix):
return False
parts = branch_name[len(prefix) :].split("__")
return len(parts) == 1 and parts[0].isdigit()
def _merge(self, branch_name, source, commit):
"""Attempt to merge a commit from source to branch with branch_name"""
compare = self.repo.compare_commits(branch_name, commit)
if not compare or not compare.files:
self.logger.info(f"Skipping branch {branch_name}: no file diffs found")
return
try:
self.repo.merge(branch_name, commit)
self.logger.info(
f"Merged {compare.behind_by} commits into branch: {branch_name}"
)
except GitHubError as e:
if e.code != http.client.CONFLICT:
raise
if branch_name in self._get_existing_prs(
self.options["source_branch"], self.options["branch_prefix"]
):
self.logger.info(
f"Merge conflict on branch {branch_name}: merge PR already exists"
)
return
try:
pull = self.repo.create_pull(
title=f"Merge {source} into {branch_name}",
base=branch_name,
head=source,
body="This pull request was automatically generated because "
"an automated merge hit a merge conflict",
)
self.logger.info(
f"Merge conflict on branch {branch_name}: created pull request #{pull.number}"
)
except github3.exceptions.UnprocessableEntity as e:
self.logger.error(
f"Error creating merge conflict pull request to merge {source} into {branch_name}:\n{e.response.text}"
)
def _is_source_branch_direct_descendent(self, branch_name):
"""Returns True if branch is a direct descendent of the source branch"""
source_dunder_count = self.options["source_branch"].count("__")
return (
branch_name.startswith(f"{self.options['source_branch']}__")
and branch_name.count("__") == source_dunder_count + 1
)
def _is_future_release_branch(self, branch_name, next_release):
return (
self._is_release_branch(branch_name)
and branch_name != self.options["source_branch"]
and self._get_release_num(branch_name) > next_release
)
def _get_release_num(self, release_branch_name):
"""Given a release branch, returns an integer that
corresponds to the release number for that branch"""
return int(release_branch_name.split(self.options["branch_prefix"])[1])
| SalesforceFoundation/CumulusCI | cumulusci/tasks/github/merge.py | Python | bsd-3-clause | 11,772 | 0.003058 |
# This file is part of FNPDjango, licensed under GNU Affero GPLv3 or later.
# Copyright © Fundacja Nowoczesna Polska. See README.md for more information.
#
from django.conf import settings
from django.test import TestCase
class UtilsSettingsTestCase(TestCase):
def test_lazy_ugettext_lazy(self):
self.assertEqual(str(settings.TEST_LAZY_UGETTEXT_LAZY),
"Lazy setting.")
| fnp/fnpdjango | tests/tests/test_utils_settings.py | Python | agpl-3.0 | 398 | 0.005038 |
#!/usr/bin/env python3
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Executable sample for creating a Azure AD Context Feed.
Creating other feeds requires changing this sample code.
"""
import argparse
import json
from typing import Any, Mapping
from google.auth.transport import requests
from common import chronicle_auth
from common import regions
CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com"
def create_azure_ad_context_feed(http_session: requests.AuthorizedSession,
tokenendpoint: str, clientid: str,
clientsecret: str, retrievedevices: bool,
retrievegroups: bool) -> Mapping[str, Any]:
"""Creates a new Azure AD Context feed.
Args:
http_session: Authorized session for HTTP requests.
tokenendpoint: A string which represents endpoint to connect to.
clientid: A string which represents Id of the credential to use.
clientsecret: A string which represents secret of the credential to use.
retrievedevices: A boolean to indicate whether to retrieve devices or not.
retrievegroups: A boolean to indicate whether to retrieve groups or not.
Returns:
New Azure AD Feed.
Raises:
requests.exceptions.HTTPError: HTTP request resulted in an error
(response.status_code >= 400).
"""
url = f"{CHRONICLE_API_BASE_URL}/v1/feeds/"
body = {
"details": {
"feedSourceType": "API",
"logType": "AZURE_AD_CONTEXT",
"azureAdContextSettings": {
"authentication": {
"tokenEndpoint": tokenendpoint,
"clientId": clientid,
"clientSecret": clientsecret
},
"retrieveDevices": retrievedevices,
"retrieveGroups": retrievegroups
}
}
}
response = http_session.request("POST", url, json=body)
# Expected server response:
# {
# "name": "feeds/e0eb5fb0-8fbd-4f0f-b063-710943ad7812",
# "details": {
# "logType": "AZURE_AD_CONTEXT",
# "feedSourceType": "API",
# "azureAdContextSettings": {
# "authentication": {
# "tokenEndpoint": "tokenendpoint.example.com",
# "clientId": "clientid_example",
# "clientSecret": "clientsecret_example"
# },
# "retrieveDevices": true
# }
# },
# "feedState": "PENDING_ENABLEMENT"
# }
if response.status_code >= 400:
print(response.text)
response.raise_for_status()
return response.json()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
chronicle_auth.add_argument_credentials_file(parser)
regions.add_argument_region(parser)
parser.add_argument(
"-te",
"--tokenendpoint",
type=str,
required=True,
help="token endpoint")
parser.add_argument(
"-ci",
"--clientid",
type=str,
required=True,
help="client id")
parser.add_argument(
"-cs",
"--clientsecret",
type=str,
required=True,
help="client secret")
parser.add_argument(
"-rd",
"--retrievedevices",
type=bool,
required=True,
help="retrieve devices")
parser.add_argument(
"-rg",
"--retrievegroups",
type=str,
required=True,
help="retrieve groups")
args = parser.parse_args()
CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region)
session = chronicle_auth.initialize_http_session(args.credentials_file)
new_feed = create_azure_ad_context_feed(session, args.tokenendpoint,
args.clientid, args.clientsecret,
args.retrievedevices,
args.retrievegroups)
print(json.dumps(new_feed, indent=2))
| chronicle/api-samples-python | feeds/create_azure_ad_context_feed.py | Python | apache-2.0 | 4,356 | 0.008494 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Initial migration
Revision ID: 464e951dc3b8
Revises: None
Create Date: 2014-08-05 17:41:34.470183
"""
# revision identifiers, used by Alembic.
revision = '464e951dc3b8'
down_revision = None
from alembic import op # noqa: E402
import sqlalchemy as sa # noqa: E402
def upgrade():
op.create_table(
'states',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('state', sa.BigInteger(), nullable=False),
sa.Column('s_metadata', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('name'))
op.create_table(
'modules_state',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('state', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('name'))
| openstack/cloudkitty | cloudkitty/db/sqlalchemy/alembic/versions/464e951dc3b8_initial_migration.py | Python | apache-2.0 | 1,314 | 0 |
import pytest
from pontoon.test import factories
@pytest.fixture
def admin():
"""Admin - a superuser"""
return factories.UserFactory.create(username="admin", is_superuser=True,)
@pytest.fixture
def client_superuser(client, admin):
"""Provides a client with a logged in superuser. """
client.force_login(admin)
return client
@pytest.fixture
def user_a():
return factories.UserFactory(username="user_a")
@pytest.fixture
def user_b():
return factories.UserFactory(username="user_b")
@pytest.fixture
def user_c():
return factories.UserFactory(username="user_c")
@pytest.fixture
def member(client, user_a):
"""Provides a `LoggedInMember` with the attributes `user` and `client`
the `client` is authenticated
"""
class LoggedInMember(object):
def __init__(self, user, client):
client.force_login(user)
self.client = client
self.user = user
return LoggedInMember(user_a, client)
@pytest.fixture
def locale_a():
return factories.LocaleFactory(code="kg", name="Klingon",)
@pytest.fixture
def google_translate_locale(locale_a):
"""Set the Google Cloud Translation API locale code for locale_a"""
locale_a.google_translate_code = "bg"
locale_a.save()
return locale_a
@pytest.fixture
def ms_locale(locale_a):
"""Set the Microsoft API locale code for locale_a"""
locale_a.ms_translator_code = "gb"
locale_a.save()
return locale_a
@pytest.fixture
def locale_b():
return factories.LocaleFactory(code="gs", name="Geonosian",)
@pytest.fixture
def project_a():
return factories.ProjectFactory(
slug="project_a", name="Project A", repositories=[],
)
@pytest.fixture
def project_b():
return factories.ProjectFactory(slug="project_b", name="Project B")
@pytest.fixture
def system_project_a():
return factories.ProjectFactory(
slug="system_project_a",
name="System Project A",
repositories=[],
system_project=True,
)
@pytest.fixture
def resource_a(project_a):
return factories.ResourceFactory(
project=project_a, path="resource_a.po", format="po"
)
@pytest.fixture
def resource_b(project_b):
return factories.ResourceFactory(
project=project_b, path="resource_b.po", format="po"
)
@pytest.fixture
def entity_a(resource_a):
return factories.EntityFactory(resource=resource_a, string="entity a")
@pytest.fixture
def entity_b(resource_b):
return factories.EntityFactory(resource=resource_b, string="entity b")
@pytest.fixture
def project_locale_a(project_a, locale_a):
return factories.ProjectLocaleFactory(project=project_a, locale=locale_a,)
@pytest.fixture
def translation_a(locale_a, project_locale_a, entity_a, user_a):
"""Return a translation.
Note that we require the `project_locale_a` fixture because a
valid ProjectLocale is needed in order to query Translations.
"""
translation_a = factories.TranslationFactory(
entity=entity_a,
locale=locale_a,
user=user_a,
string="Translation for entity_a",
)
translation_a.locale.refresh_from_db()
translation_a.entity.resource.project.refresh_from_db()
return translation_a
@pytest.fixture
def tag_a(resource_a, project_a, locale_a):
# Tags require a TranslatedResource to work.
factories.TranslatedResourceFactory.create(resource=resource_a, locale=locale_a)
tag = factories.TagFactory.create(slug="tag", name="Tag", project=project_a,)
tag.resources.add(resource_a)
return tag
| jotes/pontoon | pontoon/test/fixtures/base.py | Python | bsd-3-clause | 3,576 | 0.000559 |
import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Mobile(Device):
imei = {
'type': 'string',
'unique': True
}
meid = {
'type': 'string',
'unique': True
}
type = {
'type': 'string',
'allowed': {'Smartphone', 'Tablet'},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class MobileSettings(DeviceSubSettings):
_schema = Mobile
| eReuse/DeviceHub | ereuse_devicehub/resources/device/mobile/settings.py | Python | agpl-3.0 | 722 | 0 |
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of Discovery backend."""
import collections
import copy
import datetime
import functools
import sys
import threading
import time
import uuid
from oslo.config import cfg
from oslo.db import exception as db_exc
from oslo.db.sqlalchemy import session as db_session
from oslo.db.sqlalchemy import utils as sqlalchemyutils
from oslo.utils import excutils
from oslo.utils import timeutils
import six
# from sqlalchemy import and_
from sqlalchemy import Boolean
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy import Integer
from sqlalchemy import MetaData
# from sqlalchemy import or_
from sqlalchemy.orm import contains_eager
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import joinedload_all
from sqlalchemy.orm import noload
from sqlalchemy.orm import undefer
from sqlalchemy.schema import Table
from sqlalchemy import sql
from sqlalchemy.sql.expression import asc
from sqlalchemy.sql.expression import desc
from sqlalchemy.sql import false
from sqlalchemy.sql import func
from sqlalchemy.sql import null
from sqlalchemy.sql import true
from sqlalchemy import String
from nova import block_device
from nova.compute import task_states
from nova.compute import vm_states
import nova.context
# from nova.db.sqlalchemy import models
from nova import exception
from nova.i18n import _, _LI
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
try:
from nova import quota
except:
pass
# RIAK
from nova.db.discovery.query import or_
from nova.db.discovery.query import and_
import itertools
import traceback
import uuid
import pprint
import riak
import inspect
from inspect import getmembers
from sqlalchemy.util._collections import KeyedTuple
import netaddr
from sqlalchemy.sql.expression import BinaryExpression
from sqlalchemy.orm.evaluator import EvaluatorCompiler
from sqlalchemy.orm.collections import InstrumentedList
from nova.db.discovery import models
import pytz
from desimplifier import ObjectDesimplifier
from utils import find_table_name
from query import RiakModelQuery
db_opts = [
cfg.StrOpt('osapi_compute_unique_server_name_scope',
default='',
help='When set, compute API will consider duplicate hostnames '
'invalid within the specified scope, regardless of case. '
'Should be empty, "project" or "global".'),
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
CONF.import_opt('compute_topic', 'nova.compute.rpcapi')
LOG = logging.getLogger(__name__)
_ENGINE_FACADE = None
_LOCK = threading.Lock()
def _create_facade_lazily():
global _LOCK, _ENGINE_FACADE
if _ENGINE_FACADE is None:
with _LOCK:
if _ENGINE_FACADE is None:
_ENGINE_FACADE = db_session.EngineFacade.from_config(CONF)
return _ENGINE_FACADE
def get_engine(use_slave=False):
facade = _create_facade_lazily()
return facade.get_engine(use_slave=use_slave)
# def get_session(use_slave=False, **kwargs):
# facade = _create_facade_lazily()
# return facade.get_session(use_slave=use_slave, **kwargs)
class ControlledExecution():
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
class FakeSession():
def add(self, *objs):
for obj in objs:
obj.save()
def query(self, *entities, **kwargs):
return RiakModelQuery(*entities, **kwargs)
def begin(self, *args, **kwargs):
return ControlledExecution()
def flush(self, *args, **kwargs):
pass
def get_session(use_slave=False, **kwargs):
# facade = _create_facade_lazily(use_slave)
# return facade.get_session(**kwargs)
return FakeSession()
_SHADOW_TABLE_PREFIX = 'shadow_'
_DEFAULT_QUOTA_NAME = 'default'
PER_PROJECT_QUOTAS = ['fixed_ips', 'floating_ips', 'networks']
def get_backend():
"""The backend is this module itself."""
return sys.modules[__name__]
def require_admin_context(f):
"""Decorator to require admin request context.
The first argument to the wrapped function must be the context.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
nova.context.require_admin_context(args[0])
return f(*args, **kwargs)
return wrapper
def require_context(f):
"""Decorator to require *any* user or admin context.
This does no authorization for user or project access matching, see
:py:func:`nova.context.authorize_project_context` and
:py:func:`nova.context.authorize_user_context`.
The first argument to the wrapped function must be the context.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
nova.context.require_context(args[0])
return f(*args, **kwargs)
return wrapper
def require_instance_exists_using_uuid(f):
"""Decorator to require the specified instance to exist.
Requires the wrapped function to use context and instance_uuid as
their first two arguments.
"""
@functools.wraps(f)
def wrapper(context, instance_uuid, *args, **kwargs):
instance_get_by_uuid(context, instance_uuid)
return f(context, instance_uuid, *args, **kwargs)
return wrapper
def require_aggregate_exists(f):
"""Decorator to require the specified aggregate to exist.
Requires the wrapped function to use context and aggregate_id as
their first two arguments.
"""
@functools.wraps(f)
def wrapper(context, aggregate_id, *args, **kwargs):
aggregate_get(context, aggregate_id)
return f(context, aggregate_id, *args, **kwargs)
return wrapper
def _retry_on_deadlock(f):
"""Decorator to retry a DB API call if Deadlock was received."""
@functools.wraps(f)
def wrapped(*args, **kwargs):
while True:
try:
return f(*args, **kwargs)
except db_exc.DBDeadlock:
LOG.warn(_("Deadlock detected when running "
"'%(func_name)s': Retrying..."),
dict(func_name=f.__name__))
# Retry!
time.sleep(0.5)
continue
functools.update_wrapper(wrapped, f)
return wrapped
# TODO: modified model_query
def model_query(context, *args, **kwargs):
# base_model = kwargs["base_model"]
# models = args
return RiakModelQuery(*args, **kwargs)
def exact_filter(query, model, filters, legal_keys):
"""Applies exact match filtering to a query.
Returns the updated query. Modifies filters argument to remove
filters consumed.
:param query: query to apply filters to
:param model: model object the query applies to, for IN-style
filtering
:param filters: dictionary of filters; values that are lists,
tuples, sets, or frozensets cause an 'IN' test to
be performed, while exact matching ('==' operator)
is used for other values
:param legal_keys: list of keys to apply exact filtering to
"""
filter_dict = {}
# Walk through all the keys
for key in legal_keys:
# Skip ones we're not filtering on
if key not in filters:
continue
# OK, filtering on this key; what value do we search for?
value = filters.pop(key)
if key in ('metadata', 'system_metadata'):
column_attr = getattr(model, key)
if isinstance(value, list):
for item in value:
for k, v in item.iteritems():
query = query.filter(column_attr.any(key=k))
query = query.filter(column_attr.any(value=v))
else:
for k, v in value.iteritems():
query = query.filter(column_attr.any(key=k))
query = query.filter(column_attr.any(value=v))
elif isinstance(value, (list, tuple, set, frozenset)):
# Looking for values in a list; apply to query directly
column_attr = getattr(model, key)
query = query.filter(column_attr.in_(value))
else:
# OK, simple exact match; save for later
filter_dict[key] = value
# Apply simple exact matches
if filter_dict:
query = query.filter_by(**filter_dict)
return query
def convert_objects_related_datetimes(values, *datetime_keys):
for key in datetime_keys:
if key in values and values[key]:
if isinstance(values[key], six.string_types):
values[key] = timeutils.parse_strtime(values[key])
# NOTE(danms): Strip UTC timezones from datetimes, since they're
# stored that way in the database
values[key] = values[key].replace(tzinfo=None)
return values
def _sync_instances(context, project_id, user_id, session):
return dict(zip(('instances', 'cores', 'ram'),
_instance_data_get_for_user(
context, project_id, user_id, session)))
def _sync_floating_ips(context, project_id, user_id, session):
return dict(floating_ips=_floating_ip_count_by_project(
context, project_id, session))
def _sync_fixed_ips(context, project_id, user_id, session):
return dict(fixed_ips=_fixed_ip_count_by_project(
context, project_id, session))
def _sync_security_groups(context, project_id, user_id, session):
return dict(security_groups=_security_group_count_by_project_and_user(
context, project_id, user_id, session))
def _sync_server_groups(context, project_id, user_id, session):
return dict(server_groups=_instance_group_count_by_project_and_user(
context, project_id, user_id, session))
QUOTA_SYNC_FUNCTIONS = {
'_sync_instances': _sync_instances,
'_sync_floating_ips': _sync_floating_ips,
'_sync_fixed_ips': _sync_fixed_ips,
'_sync_security_groups': _sync_security_groups,
'_sync_server_groups': _sync_server_groups,
}
###################
def constraint(**conditions):
return Constraint(conditions)
def equal_any(*values):
return EqualityCondition(values)
def not_equal(*values):
return InequalityCondition(values)
class Constraint(object):
def __init__(self, conditions):
self.conditions = conditions
def apply(self, model, query):
for key, condition in self.conditions.iteritems():
for clause in condition.clauses(getattr(model, key)):
query = query.filter(clause)
return query
class EqualityCondition(object):
def __init__(self, values):
self.values = values
def clauses(self, field):
# method signature requires us to return an iterable even if for OR
# operator this will actually be a single clause
return [or_(*[field == value for value in self.values])]
class InequalityCondition(object):
def __init__(self, values):
self.values = values
def clauses(self, field):
return [field != value for value in self.values]
###################
@require_admin_context
def service_destroy(context, service_id):
session = get_session()
with session.begin():
count = model_query(context, models.Service, session=session).\
filter_by(id=service_id).\
soft_delete(synchronize_session=False)
if count == 0:
raise exception.ServiceNotFound(service_id=service_id)
model_query(context, models.ComputeNode, session=session).\
filter_by(service_id=service_id).\
soft_delete(synchronize_session=False)
def _service_get(context, service_id, with_compute_node=True, session=None,
use_slave=False):
query = model_query(context, models.Service, session=session,
use_slave=use_slave).\
filter_by(id=service_id)
if with_compute_node:
query = query.options(joinedload('compute_node'))
result = query.first()
if not result:
raise exception.ServiceNotFound(service_id=service_id)
return result
@require_admin_context
def service_get(context, service_id, with_compute_node=False,
use_slave=False):
return _service_get(context, service_id,
with_compute_node=with_compute_node,
use_slave=use_slave)
@require_admin_context
def service_get_all(context, disabled=None):
query = model_query(context, models.Service)
# TODO: commented following as it was a source of probleme with RIAK
# implementation.
# if disabled is not None:
# query = query.filter_by(disabled=disabled)
return query.all()
@require_admin_context
def service_get_all_by_topic(context, topic):
return model_query(context, models.Service, read_deleted="no").\
filter_by(disabled=False).\
filter_by(topic=topic).\
all()
@require_admin_context
def service_get_by_host_and_topic(context, host, topic):
return model_query(context, models.Service, read_deleted="no").\
filter_by(disabled=False).\
filter_by(host=host).\
filter_by(topic=topic).\
first()
@require_admin_context
def service_get_all_by_host(context, host):
return model_query(context, models.Service, read_deleted="no").\
filter_by(host=host).\
all()
@require_admin_context
def service_get_by_compute_host(context, host, use_slave=False):
result = model_query(context, models.Service, read_deleted="no",
use_slave=use_slave).\
options(joinedload('compute_node')).\
filter_by(host=host).\
first()
if not result:
raise exception.ComputeHostNotFound(host=host)
return result
@require_admin_context
def service_get_by_args(context, host, binary):
result = model_query(context, models.Service).\
filter_by(host=host).\
filter_by(binary=binary).\
first()
if not result:
raise exception.HostBinaryNotFound(host=host, binary=binary)
return result
@require_admin_context
def service_create(context, values):
service_ref = models.Service()
service_ref.update(values, do_save=False)
if not CONF.enable_new_services:
service_ref.disabled = True
service_binary = model_query(context, models.Service).\
filter_by(host=values.get('host')).\
filter_by(binary=values.get('binary')).\
all()
if service_binary is None:
service_topic = model_query(context, models.Service).\
filter_by(host=values.get('host')).\
filter_by(topic=values.get('topic')).\
all()
if service_topic is None:
service_ref.save()
else:
raise exception.ServiceTopicExists(host=values.get('host'),
topic=values.get('topic'))
else:
raise exception.ServiceBinaryExists(host=values.get('host'),
binary=values.get('binary'))
# if not CONF.enable_new_services:
# service_ref.disabled = True
# try:
# service_ref.save()
# except db_exc.DBDuplicateEntry as e:
# if 'binary' in e.columns:
# raise exception.ServiceBinaryExists(host=values.get('host'),
# binary=values.get('binary'))
# raise exception.ServiceTopicExists(host=values.get('host'),
# topic=values.get('topic'))
return service_ref
@require_admin_context
@_retry_on_deadlock
def service_update(context, service_id, values):
session = get_session()
with session.begin():
service_ref = _service_get(context, service_id,
with_compute_node=False, session=session)
service_ref.update(values)
return service_ref
###################
def compute_node_get(context, compute_id):
return _compute_node_get(context, compute_id)
def _compute_node_get(context, compute_id, session=None):
result = model_query(context, models.ComputeNode, session=session).\
filter_by(id=compute_id).\
options(joinedload('service')).\
first()
if not result:
raise exception.ComputeHostNotFound(host=compute_id)
return result
@require_admin_context
def compute_node_get_by_service_id(context, service_id):
result = model_query(context, models.ComputeNode, read_deleted='no').\
filter_by(service_id=service_id).\
first()
if not result:
raise exception.ServiceNotFound(service_id=service_id)
return result
@require_admin_context
def compute_node_get_all(context, no_date_fields):
# NOTE(msdubov): Using lower-level 'select' queries and joining the tables
# manually here allows to gain 3x speed-up and to have 5x
# less network load / memory usage compared to the sqla ORM.
# engine = get_engine()
# # Retrieve ComputeNode, Service
# compute_node = models.ComputeNode.__table__
# service = models.Service.__table__
# with engine.begin() as conn:
# redundant_columns = set(['deleted_at', 'created_at', 'updated_at',
# 'deleted']) if no_date_fields else set([])
# def filter_columns(table):
# return [c for c in table.c if c.name not in redundant_columns]
# compute_node_query = sql.select(filter_columns(compute_node)).\
# where(compute_node.c.deleted == 0).\
# order_by(compute_node.c.service_id)
# compute_node_rows = conn.execute(compute_node_query).fetchall()
# service_query = sql.select(filter_columns(service)).\
# where((service.c.deleted == 0) &
# (service.c.binary == 'nova-compute')).\
# order_by(service.c.id)
# service_rows = conn.execute(service_query).fetchall()
# # Join ComputeNode & Service manually.
# services = {}
# for proxy in service_rows:
# services[proxy['id']] = dict(proxy.items())
# compute_nodes = []
# for proxy in compute_node_rows:
# node = dict(proxy.items())
# node['service'] = services.get(proxy['service_id'])
# compute_nodes.append(node)
from nova.db.discovery.simplifier import ObjectSimplifier
from nova.db.discovery.desimplifier import ObjectDesimplifier
query = RiakModelQuery(models.ComputeNode)
compute_nodes = query.all()
def novabase_to_dict(ref):
request_uuid = uuid.uuid1()
object_simplifier = ObjectSimplifier(request_uuid=request_uuid)
object_desimplifier = ObjectDesimplifier(request_uuid=request_uuid)
simplified_object = object_simplifier.simplify(ref)
simplified_object.pop("metadata_novabase_classname")
desimplified_object = object_desimplifier.desimplify(simplified_object)
return desimplified_object
result = []
for each in compute_nodes:
compute_node = novabase_to_dict(each)
compute_node["service"] = novabase_to_dict(compute_node["service"])
compute_node["service"].pop("compute_node")
result += [compute_node]
return result
@require_admin_context
def compute_node_search_by_hypervisor(context, hypervisor_match):
field = models.ComputeNode.hypervisor_hostname
return model_query(context, models.ComputeNode).\
options(joinedload('service')).\
filter(field.like('%%%s%%' % hypervisor_match)).\
all()
@require_admin_context
def compute_node_create(context, values):
"""Creates a new ComputeNode and populates the capacity fields
with the most recent data.
"""
datetime_keys = ('created_at', 'deleted_at', 'updated_at')
convert_objects_related_datetimes(values, *datetime_keys)
compute_node_ref = models.ComputeNode()
compute_node_ref.update(values)
compute_node_ref.save()
return compute_node_ref
@require_admin_context
@_retry_on_deadlock
def compute_node_update(context, compute_id, values):
"""Updates the ComputeNode record with the most recent data."""
session = get_session()
with session.begin():
compute_ref = _compute_node_get(context, compute_id, session=session)
# Always update this, even if there's going to be no other
# changes in data. This ensures that we invalidate the
# scheduler cache of compute node data in case of races.
values['updated_at'] = timeutils.utcnow()
datetime_keys = ('created_at', 'deleted_at', 'updated_at')
convert_objects_related_datetimes(values, *datetime_keys)
compute_ref.update(values)
return compute_ref
@require_admin_context
def compute_node_delete(context, compute_id):
"""Delete a ComputeNode record."""
session = get_session()
with session.begin():
result = model_query(context, models.ComputeNode, session=session).\
filter_by(id=compute_id).\
soft_delete(synchronize_session=False)
if not result:
raise exception.ComputeHostNotFound(host=compute_id)
def compute_node_statistics(context):
"""Compute statistics over all compute nodes."""
result = model_query(context,
func.count(models.ComputeNode.id),
func.sum(models.ComputeNode.vcpus),
func.sum(models.ComputeNode.memory_mb),
func.sum(models.ComputeNode.local_gb),
func.sum(models.ComputeNode.vcpus_used),
func.sum(models.ComputeNode.memory_mb_used),
func.sum(models.ComputeNode.local_gb_used),
func.sum(models.ComputeNode.free_ram_mb),
func.sum(models.ComputeNode.free_disk_gb),
func.sum(models.ComputeNode.current_workload),
func.sum(models.ComputeNode.running_vms),
func.sum(models.ComputeNode.disk_available_least),
base_model=models.ComputeNode,
read_deleted="no").\
filter(models.Service.disabled == false()).\
filter(
models.Service.id ==
models.ComputeNode.service_id).\
first()
# Build a dict of the info--making no assumptions about result
fields = ('count', 'vcpus', 'memory_mb', 'local_gb', 'vcpus_used',
'memory_mb_used', 'local_gb_used', 'free_ram_mb', 'free_disk_gb',
'current_workload', 'running_vms', 'disk_available_least')
return dict((field, int(result[idx] or 0))
for idx, field in enumerate(fields))
###################
@require_admin_context
def certificate_create(context, values):
certificate_ref = models.Certificate()
for (key, value) in values.iteritems():
certificate_ref[key] = value
certificate_ref.save()
return certificate_ref
@require_admin_context
def certificate_get_all_by_project(context, project_id):
return model_query(context, models.Certificate, read_deleted="no").\
filter_by(project_id=project_id).\
all()
@require_admin_context
def certificate_get_all_by_user(context, user_id):
return model_query(context, models.Certificate, read_deleted="no").\
filter_by(user_id=user_id).\
all()
@require_admin_context
def certificate_get_all_by_user_and_project(context, user_id, project_id):
return model_query(context, models.Certificate, read_deleted="no").\
filter_by(user_id=user_id).\
filter_by(project_id=project_id).\
all()
###################
@require_context
def floating_ip_get(context, id):
try:
result = model_query(context, models.FloatingIp, project_only=True).\
filter_by(id=id).\
options(joinedload_all('fixed_ip.instance')).\
first()
if not result:
raise exception.FloatingIpNotFound(id=id)
except db_exc.DBError:
msg = _("Invalid floating ip id %s in request") % id
LOG.warn(msg)
raise exception.InvalidID(id=id)
return result
@require_context
def floating_ip_get_pools(context):
pools = []
for result in model_query(context, models.FloatingIp.pool,
base_model=models.FloatingIp).distinct():
pools.append({'name': result[0]})
return pools
@require_context
def floating_ip_allocate_address(context, project_id, pool,
auto_assigned=False):
nova.context.authorize_project_context(context, project_id)
session = get_session()
with session.begin():
floating_ip_ref = model_query(context, models.FloatingIp,
session=session, read_deleted="no").\
filter_by(fixed_ip_id=None).\
filter_by(project_id=None).\
filter_by(pool=pool).\
with_lockmode('update').\
first()
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if not floating_ip_ref:
raise exception.NoMoreFloatingIps()
floating_ip_ref['project_id'] = project_id
floating_ip_ref['auto_assigned'] = auto_assigned
session.add(floating_ip_ref)
return floating_ip_ref['address']
@require_context
def floating_ip_bulk_create(context, ips):
session = get_session()
result = []
with session.begin():
for ip in ips:
model = models.FloatingIp()
model.update(ip)
result.append(model)
try:
# NOTE(boris-42): To get existing address we have to do each
# time session.flush()..
session.add(model)
session.flush()
except db_exc.DBDuplicateEntry:
raise exception.FloatingIpExists(address=ip['address'])
return result
def _ip_range_splitter(ips, block_size=256):
"""Yields blocks of IPs no more than block_size elements long."""
out = []
count = 0
for ip in ips:
out.append(ip['address'])
count += 1
if count > block_size - 1:
yield out
out = []
count = 0
if out:
yield out
@require_context
def floating_ip_bulk_destroy(context, ips):
session = get_session()
with session.begin():
project_id_to_quota_count = collections.defaultdict(int)
for ip_block in _ip_range_splitter(ips):
# Find any floating IPs that were not auto_assigned and
# thus need quota released.
query = model_query(context, models.FloatingIp).\
filter(models.FloatingIp.address.in_(ip_block)).\
filter_by(auto_assigned=False)
rows = query.all()
for row in rows:
# The count is negative since we release quota by
# reserving negative quota.
project_id_to_quota_count[row['project_id']] -= 1
# Delete the floating IPs.
model_query(context, models.FloatingIp).\
filter(models.FloatingIp.address.in_(ip_block)).\
soft_delete(synchronize_session='fetch')
# Delete the quotas, if needed.
for project_id, count in project_id_to_quota_count.iteritems():
try:
reservations = quota.QUOTAS.reserve(context,
project_id=project_id,
floating_ips=count)
quota.QUOTAS.commit(context,
reservations,
project_id=project_id)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update usages bulk "
"deallocating floating IP"))
@require_context
def floating_ip_create(context, values):
floating_ip_ref = models.FloatingIp()
floating_ip_ref.update(values)
try:
floating_ip_ref.save()
except db_exc.DBDuplicateEntry:
raise exception.FloatingIpExists(address=values['address'])
return floating_ip_ref
def _floating_ip_count_by_project(context, project_id, session=None):
nova.context.authorize_project_context(context, project_id)
# TODO(tr3buchet): why leave auto_assigned floating IPs out?
return model_query(context, models.FloatingIp, read_deleted="no",
session=session).\
filter_by(project_id=project_id).\
filter_by(auto_assigned=False).\
count()
@require_context
@_retry_on_deadlock
def floating_ip_fixed_ip_associate(context, floating_address,
fixed_address, host):
session = get_session()
with session.begin():
floating_ip_ref = _floating_ip_get_by_address(context,
floating_address,
session=session)
fixed_ip_ref = model_query(context, models.FixedIp, session=session).\
filter_by(address=fixed_address).\
options(joinedload('network')).\
first()
if floating_ip_ref.fixed_ip_id == fixed_ip_ref["id"]:
return None
floating_ip_ref.fixed_ip_id = fixed_ip_ref["id"]
floating_ip_ref.host = host
return fixed_ip_ref
@require_context
@_retry_on_deadlock
def floating_ip_deallocate(context, address):
session = get_session()
with session.begin():
return model_query(context, models.FloatingIp, session=session).\
filter_by(address=address).\
filter(models.FloatingIp.project_id != null()).\
update({'project_id': None,
'host': None,
'auto_assigned': False},
synchronize_session=False)
@require_context
def floating_ip_destroy(context, address):
model_query(context, models.FloatingIp).\
filter_by(address=address).\
delete()
@require_context
def floating_ip_disassociate(context, address):
session = get_session()
with session.begin():
floating_ip_ref = model_query(context,
models.FloatingIp,
session=session).\
filter_by(address=address).\
first()
if not floating_ip_ref:
raise exception.FloatingIpNotFoundForAddress(address=address)
fixed_ip_ref = model_query(context, models.FixedIp, session=session).\
filter_by(id=floating_ip_ref['fixed_ip_id']).\
options(joinedload('network')).\
first()
floating_ip_ref.fixed_ip_id = None
floating_ip_ref.host = None
return fixed_ip_ref
@require_context
def floating_ip_set_auto_assigned(context, address):
model_query(context, models.FloatingIp).\
filter_by(address=address).\
update({'auto_assigned': True})
def _floating_ip_get_all(context, session=None):
return model_query(context, models.FloatingIp, read_deleted="no",
session=session)
@require_admin_context
def floating_ip_get_all(context):
floating_ip_refs = _floating_ip_get_all(context).all()
if not floating_ip_refs:
raise exception.NoFloatingIpsDefined()
return floating_ip_refs
@require_admin_context
def floating_ip_get_all_by_host(context, host):
floating_ip_refs = _floating_ip_get_all(context).\
filter_by(host=host).\
all()
if not floating_ip_refs:
raise exception.FloatingIpNotFoundForHost(host=host)
return floating_ip_refs
@require_context
def floating_ip_get_all_by_project(context, project_id):
nova.context.authorize_project_context(context, project_id)
# TODO(tr3buchet): why do we not want auto_assigned floating IPs here?
return _floating_ip_get_all(context).\
filter_by(project_id=project_id).\
filter_by(auto_assigned=False).\
options(joinedload_all('fixed_ip.instance')).\
all()
@require_context
def floating_ip_get_by_address(context, address):
return _floating_ip_get_by_address(context, address)
def _floating_ip_get_by_address(context, address, session=None):
# if address string is empty explicitly set it to None
if not address:
address = None
try:
result = model_query(context, models.FloatingIp, session=session).\
filter_by(address=address).\
options(joinedload_all('fixed_ip.instance')).\
first()
if not result:
raise exception.FloatingIpNotFoundForAddress(address=address)
except db_exc.DBError:
msg = _("Invalid floating IP %s in request") % address
LOG.warn(msg)
raise exception.InvalidIpAddressError(msg)
# If the floating IP has a project ID set, check to make sure
# the non-admin user has access.
if result.project_id and nova.context.is_user_context(context):
nova.context.authorize_project_context(context, result.project_id)
return result
@require_context
def floating_ip_get_by_fixed_address(context, fixed_address):
return model_query(context, models.FloatingIp).\
outerjoin(models.FixedIp,
models.FixedIp.id ==
models.FloatingIp.fixed_ip_id).\
filter(models.FixedIp.address == fixed_address).\
all()
@require_context
def floating_ip_get_by_fixed_ip_id(context, fixed_ip_id):
return model_query(context, models.FloatingIp).\
filter_by(fixed_ip_id=fixed_ip_id).\
all()
@require_context
def floating_ip_update(context, address, values):
session = get_session()
with session.begin():
float_ip_ref = _floating_ip_get_by_address(context, address, session)
float_ip_ref.update(values)
try:
float_ip_ref.save(session=session)
except db_exc.DBDuplicateEntry:
raise exception.FloatingIpExists(address=values['address'])
return float_ip_ref
def _dnsdomain_get(context, session, fqdomain):
return model_query(context, models.DNSDomain,
session=session, read_deleted="no").\
filter_by(domain=fqdomain).\
with_lockmode('update').\
first()
@require_context
def dnsdomain_get(context, fqdomain):
session = get_session()
with session.begin():
return _dnsdomain_get(context, session, fqdomain)
def _dnsdomain_get_or_create(context, session, fqdomain):
domain_ref = _dnsdomain_get(context, session, fqdomain)
if not domain_ref:
dns_ref = models.DNSDomain()
dns_ref.update({'domain': fqdomain,
'availability_zone': None,
'project_id': None})
return dns_ref
return domain_ref
@require_admin_context
def dnsdomain_register_for_zone(context, fqdomain, zone):
session = get_session()
with session.begin():
domain_ref = _dnsdomain_get_or_create(context, session, fqdomain)
domain_ref.scope = 'private'
domain_ref.availability_zone = zone
session.add(domain_ref)
@require_admin_context
def dnsdomain_register_for_project(context, fqdomain, project):
session = get_session()
with session.begin():
domain_ref = _dnsdomain_get_or_create(context, session, fqdomain)
domain_ref.scope = 'public'
domain_ref.project_id = project
session.add(domain_ref)
@require_admin_context
def dnsdomain_unregister(context, fqdomain):
model_query(context, models.DNSDomain).\
filter_by(domain=fqdomain).\
delete()
@require_context
def dnsdomain_list(context):
query = model_query(context, models.DNSDomain, read_deleted="no")
return [row.domain for row in query.all()]
def dnsdomain_get_all(context):
return model_query(context, models.DNSDomain, read_deleted="no").all()
###################
@require_admin_context
def fixed_ip_associate(context, address, instance_uuid, network_id=None,
reserved=False):
"""Keyword arguments:
reserved -- should be a boolean value(True or False), exact value will be
used to filter on the fixed ip address
"""
if not uuidutils.is_uuid_like(instance_uuid):
raise exception.InvalidUUID(uuid=instance_uuid)
session = get_session()
with session.begin():
network_or_none = or_(models.FixedIp.network_id == network_id,
models.FixedIp.network_id == null())
fixed_ip_ref = model_query(context, models.FixedIp, session=session,
read_deleted="no").\
filter(network_or_none).\
filter_by(reserved=reserved).\
filter_by(address=address).\
with_lockmode('update').\
first()
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if fixed_ip_ref is None:
raise exception.FixedIpNotFoundForNetwork(address=address,
network_uuid=network_id)
if fixed_ip_ref.instance_uuid:
raise exception.FixedIpAlreadyInUse(address=address,
instance_uuid=instance_uuid)
if not fixed_ip_ref.network_id:
fixed_ip_ref.network_id = network_id
fixed_ip_ref.instance_uuid = instance_uuid
session.add(fixed_ip_ref)
return fixed_ip_ref
@require_admin_context
def fixed_ip_associate_pool(context, network_id, instance_uuid=None,
host=None):
if instance_uuid and not uuidutils.is_uuid_like(instance_uuid):
raise exception.InvalidUUID(uuid=instance_uuid)
session = get_session()
with session.begin():
network_or_none = or_(models.FixedIp.network_id == network_id,
models.FixedIp.network_id == null())
fixed_ip_ref = model_query(context, models.FixedIp, session=session,
read_deleted="no").\
filter(network_or_none).\
filter_by(reserved=False).\
filter_by(instance_uuid=None).\
filter_by(host=None).\
with_lockmode('update').\
first()
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if not fixed_ip_ref:
raise exception.NoMoreFixedIps(net=network_id)
if fixed_ip_ref['network_id'] is None:
fixed_ip_ref['network'] = network_id
if instance_uuid:
fixed_ip_ref['instance_uuid'] = instance_uuid
if host:
fixed_ip_ref['host'] = host
session.add(fixed_ip_ref)
return fixed_ip_ref
@require_context
def fixed_ip_create(context, values):
fixed_ip_ref = models.FixedIp()
fixed_ip_ref.update(values)
try:
fixed_ip_ref.save()
except db_exc.DBDuplicateEntry:
raise exception.FixedIpExists(address=values['address'])
return fixed_ip_ref
@require_context
def fixed_ip_bulk_create(context, ips):
session = get_session()
with session.begin():
for ip in ips:
model = models.FixedIp()
model.update(ip)
try:
# NOTE (vsergeyev): To get existing address we have to do each
# time session.flush().
# See related note at line 697.
session.add(model)
session.flush()
except db_exc.DBDuplicateEntry:
raise exception.FixedIpExists(address=ip['address'])
@require_context
def fixed_ip_disassociate(context, address):
session = get_session()
with session.begin():
_fixed_ip_get_by_address(context, address, session=session).\
update({'instance_uuid': None,
'virtual_interface_id': None})
@require_admin_context
def fixed_ip_disassociate_all_by_timeout(context, host, time):
session = get_session()
# NOTE(vish): only update fixed ips that "belong" to this
# host; i.e. the network host or the instance
# host matches. Two queries necessary because
# join with update doesn't work.
with session.begin():
host_filter = or_(and_(models.Instance.host == host,
models.Network.multi_host == true()),
models.Network.host == host)
result = model_query(context, models.FixedIp.id,
base_model=models.FixedIp, read_deleted="no",
session=session).\
filter(models.FixedIp.allocated == false()).\
filter(models.FixedIp.updated_at < time).\
join((models.Network,
models.Network.id == models.FixedIp.network_id)).\
join((models.Instance,
models.Instance.uuid == models.FixedIp.instance_uuid)).\
filter(host_filter).\
all()
fixed_ip_ids = [fip[0] for fip in result]
if not fixed_ip_ids:
return 0
result = model_query(context, models.FixedIp, session=session).\
filter(models.FixedIp.id.in_(fixed_ip_ids)).\
update({'instance_uuid': None,
'leased': False,
'updated_at': timeutils.utcnow()},
synchronize_session='fetch')
return result
@require_context
def fixed_ip_get(context, id, get_network=False):
query = model_query(context, models.FixedIp).filter_by(id=id)
if get_network:
query = query.options(joinedload('network'))
result = query.first()
if not result:
raise exception.FixedIpNotFound(id=id)
# FIXME(sirp): shouldn't we just use project_only here to restrict the
# results?
if (nova.context.is_user_context(context) and
result['instance_uuid'] is not None):
instance = instance_get_by_uuid(context.elevated(read_deleted='yes'),
result['instance_uuid'])
nova.context.authorize_project_context(context, instance.project_id)
return result
@require_admin_context
def fixed_ip_get_all(context):
result = model_query(context, models.FixedIp, read_deleted="yes").all()
if not result:
raise exception.NoFixedIpsDefined()
return result
@require_context
def fixed_ip_get_by_address(context, address, columns_to_join=None):
return _fixed_ip_get_by_address(context, address,
columns_to_join=columns_to_join)
def _fixed_ip_get_by_address(context, address, session=None,
columns_to_join=None):
if session is None:
session = get_session()
if columns_to_join is None:
columns_to_join = []
with session.begin(subtransactions=True):
try:
result = model_query(context, models.FixedIp, session=session)
for column in columns_to_join:
result = result.options(joinedload_all(column))
result = result.filter_by(address=address).first()
if not result:
raise exception.FixedIpNotFoundForAddress(address=address)
except db_exc.DBError:
msg = _("Invalid fixed IP Address %s in request") % address
LOG.warn(msg)
raise exception.FixedIpInvalid(msg)
# NOTE(sirp): shouldn't we just use project_only here to restrict the
# results?
if (nova.context.is_user_context(context) and
result['instance_uuid'] is not None):
instance = _instance_get_by_uuid(
context.elevated(read_deleted='yes'),
result['instance_uuid'],
session
)
nova.context.authorize_project_context(context,
instance.project_id)
return result
@require_admin_context
def fixed_ip_get_by_address_detailed(context, address):
""":returns: a tuple of (models.FixedIp, models.Network, models.Instance)
"""
try:
result = model_query(context, models.FixedIp,
models.Network, models.Instance).\
filter_by(address=address).\
outerjoin((models.Network,
models.Network.id ==
models.FixedIp.network_id)).\
outerjoin((models.Instance,
models.Instance.uuid ==
models.FixedIp.instance_uuid)).\
first()
if not result:
raise exception.FixedIpNotFoundForAddress(address=address)
except db_exc.DBError:
msg = _("Invalid fixed IP Address %s in request") % address
LOG.warn(msg)
raise exception.FixedIpInvalid(msg)
return result
@require_context
def fixed_ip_get_by_floating_address(context, floating_address):
return model_query(context, models.FixedIp).\
outerjoin(models.FloatingIp,
models.FloatingIp.fixed_ip_id ==
models.FixedIp.id).\
filter(models.FloatingIp.address == floating_address).\
first()
# NOTE(tr3buchet) please don't invent an exception here, empty list is fine
@require_context
def fixed_ip_get_by_instance(context, instance_uuid):
if not uuidutils.is_uuid_like(instance_uuid):
raise exception.InvalidUUID(uuid=instance_uuid)
vif_and = and_(models.VirtualInterface.id ==
models.FixedIp.virtual_interface_id,
models.VirtualInterface.deleted == 0)
result = model_query(context, models.FixedIp, read_deleted="no").\
filter_by(instance_uuid=instance_uuid).\
outerjoin(models.VirtualInterface, vif_and).\
options(contains_eager("virtual_interface")).\
options(joinedload('network')).\
options(joinedload('floating_ips')).\
order_by(asc(models.VirtualInterface.created_at),
asc(models.VirtualInterface.id)).\
all()
if not result:
raise exception.FixedIpNotFoundForInstance(instance_uuid=instance_uuid)
# TODO(Jonathan): quick fix
print("debugging discovery: result: %s" % (str(result)))
return [x[0] for x in result]
# return result
@require_admin_context
def fixed_ip_get_by_host(context, host):
session = get_session()
with session.begin():
instance_uuids = _instance_get_all_uuids_by_host(context, host,
session=session)
if not instance_uuids:
return []
return model_query(context, models.FixedIp, session=session).\
filter(models.FixedIp.instance_uuid.in_(instance_uuids)).\
all()
@require_context
def fixed_ip_get_by_network_host(context, network_id, host):
result = model_query(context, models.FixedIp, read_deleted="no").\
filter_by(network_id=network_id).\
filter_by(host=host).\
first()
if not result:
raise exception.FixedIpNotFoundForNetworkHost(network_id=network_id,
host=host)
return result
@require_context
def fixed_ips_by_virtual_interface(context, vif_id):
result = model_query(context, models.FixedIp, read_deleted="no").\
filter_by(virtual_interface_id=vif_id).\
options(joinedload('network')).\
options(joinedload('floating_ips')).\
all()
return result
@require_context
def fixed_ip_update(context, address, values):
session = get_session()
with session.begin():
fo = open("/opt/logs/db_api.log", "a")
fo.write("[NET] api.fixed_ip_update() (1-a): address: %s\n" % (str(address)))
fo.write("[NET] api.fixed_ip_update() (1-b): values: %s\n" % (str(values)))
fo.close()
_fixed_ip_get_by_address(context, address, session=session).\
update(values)
def _fixed_ip_count_by_project(context, project_id, session=None):
nova.context.authorize_project_context(context, project_id)
return model_query(context, models.FixedIp.id,
base_model=models.FixedIp, read_deleted="no",
session=session).\
join((models.Instance,
models.Instance.uuid == models.FixedIp.instance_uuid)).\
filter(models.Instance.project_id == project_id).\
count()
###################
@require_context
def virtual_interface_create(context, values):
"""Create a new virtual interface record in the database.
:param values: = dict containing column values
"""
try:
vif_ref = models.VirtualInterface()
vif_ref.update(values)
vif_ref.save()
except db_exc.DBError:
raise exception.VirtualInterfaceCreateException()
return vif_ref
def _virtual_interface_query(context, session=None, use_slave=False):
return model_query(context, models.VirtualInterface, session=session,
read_deleted="no", use_slave=use_slave)
@require_context
def virtual_interface_get(context, vif_id):
"""Gets a virtual interface from the table.
:param vif_id: = id of the virtual interface
"""
vif_ref = _virtual_interface_query(context).\
filter_by(id=vif_id).\
first()
return vif_ref
@require_context
def virtual_interface_get_by_address(context, address):
"""Gets a virtual interface from the table.
:param address: = the address of the interface you're looking to get
"""
try:
vif_ref = _virtual_interface_query(context).\
filter_by(address=address).\
first()
except db_exc.DBError:
msg = _("Invalid virtual interface address %s in request") % address
LOG.warn(msg)
raise exception.InvalidIpAddressError(msg)
return vif_ref
@require_context
def virtual_interface_get_by_uuid(context, vif_uuid):
"""Gets a virtual interface from the table.
:param vif_uuid: the uuid of the interface you're looking to get
"""
vif_ref = _virtual_interface_query(context).\
filter_by(uuid=vif_uuid).\
first()
return vif_ref
@require_context
@require_instance_exists_using_uuid
def virtual_interface_get_by_instance(context, instance_uuid, use_slave=False):
"""Gets all virtual interfaces for instance.
:param instance_uuid: = uuid of the instance to retrieve vifs for
"""
vif_refs = _virtual_interface_query(context, use_slave=use_slave).\
filter_by(instance_uuid=instance_uuid).\
order_by(asc("created_at"), asc("id")).\
all()
return vif_refs
@require_context
def virtual_interface_get_by_instance_and_network(context, instance_uuid,
network_id):
"""Gets virtual interface for instance that's associated with network."""
vif_ref = _virtual_interface_query(context).\
filter_by(instance_uuid=instance_uuid).\
filter_by(network_id=network_id).\
first()
return vif_ref
@require_context
def virtual_interface_delete_by_instance(context, instance_uuid):
"""Delete virtual interface records that are associated
with the instance given by instance_id.
:param instance_uuid: = uuid of instance
"""
_virtual_interface_query(context).\
filter_by(instance_uuid=instance_uuid).\
soft_delete()
@require_context
def virtual_interface_get_all(context):
"""Get all vifs."""
vif_refs = _virtual_interface_query(context).all()
return vif_refs
###################
def _metadata_refs(metadata_dict, meta_class):
metadata_refs = []
if metadata_dict:
for k, v in metadata_dict.iteritems():
metadata_ref = meta_class()
metadata_ref['key'] = k
metadata_ref['value'] = v
metadata_refs.append(metadata_ref)
return metadata_refs
def _validate_unique_server_name(context, session, name):
if not CONF.osapi_compute_unique_server_name_scope:
return
lowername = name.lower()
base_query = model_query(context, models.Instance, session=session,
read_deleted=False).\
filter(func.lower(models.Instance.hostname) == lowername)
if CONF.osapi_compute_unique_server_name_scope == 'project':
instance_with_same_name = base_query.\
filter_by(project_id=context.project_id).\
count()
elif CONF.osapi_compute_unique_server_name_scope == 'global':
instance_with_same_name = base_query.count()
else:
msg = _('Unknown osapi_compute_unique_server_name_scope value: %s'
' Flag must be empty, "global" or'
' "project"') % CONF.osapi_compute_unique_server_name_scope
LOG.warn(msg)
return
if instance_with_same_name > 0:
raise exception.InstanceExists(name=lowername)
def _handle_objects_related_type_conversions(values):
"""Make sure that certain things in values (which may have come from
an objects.instance.Instance object) are in suitable form for the
database.
"""
# NOTE(danms): Make sure IP addresses are passed as strings to
# the database engine
for key in ('access_ip_v4', 'access_ip_v6'):
if key in values and values[key] is not None:
values[key] = str(values[key])
datetime_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at', 'scheduled_at')
convert_objects_related_datetimes(values, *datetime_keys)
@require_context
def instance_create(context, values):
"""Create a new Instance record in the database.
context - request context object
values - dict containing column values.
"""
# NOTE(rpodolyaka): create the default security group, if it doesn't exist.
# This must be done in a separate transaction, so that this one is not
# aborted in case a concurrent one succeeds first and the unique constraint
# for security group names is violated by a concurrent INSERT
security_group_ensure_default(context)
values = values.copy()
values['metadata'] = _metadata_refs(
values.get('metadata'), models.InstanceMetadata)
values['system_metadata'] = _metadata_refs(
values.get('system_metadata'), models.InstanceSystemMetadata)
_handle_objects_related_type_conversions(values)
instance_ref = models.Instance()
if not values.get('uuid'):
values['uuid'] = str(uuid.uuid4())
instance_ref['info_cache'] = models.InstanceInfoCache()
info_cache = values.pop('info_cache', None)
if info_cache is not None:
instance_ref['info_cache'].update(info_cache)
security_groups = values.pop('security_groups', [])
instance_ref.update(values, do_save=False)
def _get_sec_group_models(session, security_groups):
models = []
default_group = _security_group_ensure_default(context, session)
if 'default' in security_groups:
models.append(default_group)
# Generate a new list, so we don't modify the original
security_groups = [x for x in security_groups if x != 'default']
if security_groups:
models.extend(_security_group_get_by_names(context,
session, context.project_id, security_groups))
return models
session = get_session()
with session.begin():
if 'hostname' in values:
_validate_unique_server_name(context, session, values['hostname'])
instance_ref.security_groups = _get_sec_group_models(session,
security_groups)
session.add(instance_ref)
# create the instance uuid to ec2_id mapping entry for instance
ec2_instance_create(context, instance_ref['uuid'])
_instance_extra_create(context, {'instance_uuid': instance_ref['uuid']})
return instance_ref
def _instance_data_get_for_user(context, project_id, user_id, session=None):
result = model_query(context,
func.count(models.Instance.id),
func.sum(models.Instance.vcpus),
func.sum(models.Instance.memory_mb),
base_model=models.Instance,
session=session).\
filter_by(project_id=project_id)
if user_id:
result = result.filter_by(user_id=user_id).first()
else:
result = result.first()
# NOTE(vish): convert None to 0
return (result[0] or 0, result[1] or 0, result[2] or 0)
@require_context
@_retry_on_deadlock
def instance_destroy(context, instance_uuid, constraint=None):
session = get_session()
with session.begin():
if uuidutils.is_uuid_like(instance_uuid):
instance_ref = _instance_get_by_uuid(context, instance_uuid,
session=session)
else:
raise exception.InvalidUUID(instance_uuid)
query = model_query(context, models.Instance, session=session).\
filter_by(uuid=instance_uuid)
if constraint is not None:
query = constraint.apply(models.Instance, query)
count = query.soft_delete()
if count == 0:
raise exception.ConstraintNotMet()
model_query(context, models.SecurityGroupInstanceAssociation,
session=session).\
filter_by(instance_uuid=instance_uuid).\
soft_delete()
model_query(context, models.InstanceInfoCache, session=session).\
filter_by(instance_uuid=instance_uuid).\
soft_delete()
model_query(context, models.InstanceMetadata, session=session).\
filter_by(instance_uuid=instance_uuid).\
soft_delete()
model_query(context, models.InstanceFault, session=session).\
filter_by(instance_uuid=instance_uuid).\
soft_delete()
model_query(context, models.InstanceExtra, session=session).\
filter_by(instance_uuid=instance_uuid).\
soft_delete()
return instance_ref
@require_context
def instance_get_by_uuid(context, uuid, columns_to_join=None, use_slave=False):
return _instance_get_by_uuid(context, uuid,
columns_to_join=columns_to_join, use_slave=use_slave)
def _instance_get_by_uuid(context, uuid, session=None,
columns_to_join=None, use_slave=False):
result = _build_instance_get(context, session=session,
columns_to_join=columns_to_join,
use_slave=use_slave).\
filter_by(uuid=uuid).\
first()
if not result:
raise exception.InstanceNotFound(instance_id=uuid)
return result
@require_context
def instance_get(context, instance_id, columns_to_join=None):
try:
result = _build_instance_get(context, columns_to_join=columns_to_join
).filter_by(id=instance_id).first()
if not result:
raise exception.InstanceNotFound(instance_id=instance_id)
return result
except db_exc.DBError:
# NOTE(sdague): catch all in case the db engine chokes on the
# id because it's too long of an int to store.
msg = _("Invalid instance id %s in request") % instance_id
LOG.warn(msg)
raise exception.InvalidID(id=instance_id)
def _build_instance_get(context, session=None,
columns_to_join=None, use_slave=False):
query = model_query(context, models.Instance, session=session,
project_only=True, use_slave=use_slave).\
options(joinedload_all('security_groups.rules')).\
options(joinedload('info_cache'))
if columns_to_join is None:
columns_to_join = ['metadata', 'system_metadata']
for column in columns_to_join:
if column in ['info_cache', 'security_groups']:
# Already always joined above
continue
query = query.options(joinedload(column))
# NOTE(alaski) Stop lazy loading of columns not needed.
for col in ['metadata', 'system_metadata']:
if col not in columns_to_join:
query = query.options(noload(col))
return query
def _instances_fill_metadata(context, instances,
manual_joins=None, use_slave=False):
"""Selectively fill instances with manually-joined metadata. Note that
instance will be converted to a dict.
:param context: security context
:param instances: list of instances to fill
:param manual_joins: list of tables to manually join (can be any
combination of 'metadata' and 'system_metadata' or
None to take the default of both)
"""
uuids = [inst['uuid'] for inst in instances]
if manual_joins is None:
manual_joins = ['metadata', 'system_metadata']
meta = collections.defaultdict(list)
if 'metadata' in manual_joins:
for row in _instance_metadata_get_multi(context, uuids,
use_slave=use_slave):
meta[row['instance_uuid']].append(row)
sys_meta = collections.defaultdict(list)
if 'system_metadata' in manual_joins:
for row in _instance_system_metadata_get_multi(context, uuids):
sys_meta[row['instance_uuid']].append(row)
pcidevs = collections.defaultdict(list)
if 'pci_devices' in manual_joins:
for row in _instance_pcidevs_get_multi(context, uuids):
pcidevs[row['instance_uuid']].append(row)
filled_instances = []
for inst in instances:
inst = dict(inst.iteritems())
# inst['system_metadata'] = sys_meta[inst['uuid']]
inst['metadata'] = meta[inst['uuid']]
if 'pci_devices' in manual_joins:
inst['pci_devices'] = pcidevs[inst['uuid']]
filled_instances.append(inst)
return filled_instances
def _manual_join_columns(columns_to_join):
manual_joins = []
for column in ('metadata', 'system_metadata', 'pci_devices'):
if column in columns_to_join:
columns_to_join.remove(column)
manual_joins.append(column)
return manual_joins, columns_to_join
@require_context
def instance_get_all(context, columns_to_join=None):
if columns_to_join is None:
columns_to_join = ['info_cache', 'security_groups']
manual_joins = ['metadata', 'system_metadata']
else:
manual_joins, columns_to_join = _manual_join_columns(columns_to_join)
query = model_query(context, models.Instance)
for column in columns_to_join:
query = query.options(joinedload(column))
if not context.is_admin:
# If we're not admin context, add appropriate filter..
if context.project_id:
query = query.filter_by(project_id=context.project_id)
else:
query = query.filter_by(user_id=context.user_id)
instances = query.all()
return _instances_fill_metadata(context, instances, manual_joins)
@require_context
def instance_get_all_by_filters(context, filters, sort_key, sort_dir,
limit=None, marker=None, columns_to_join=None,
use_slave=False):
"""Return instances that match all filters. Deleted instances
will be returned by default, unless there's a filter that says
otherwise.
Depending on the name of a filter, matching for that filter is
performed using either exact matching or as regular expression
matching. Exact matching is applied for the following filters::
| ['project_id', 'user_id', 'image_ref',
| 'vm_state', 'instance_type_id', 'uuid',
| 'metadata', 'host', 'system_metadata']
A third type of filter (also using exact matching), filters
based on instance metadata tags when supplied under a special
key named 'filter'::
| filters = {
| 'filter': [
| {'name': 'tag-key', 'value': '<metakey>'},
| {'name': 'tag-value', 'value': '<metaval>'},
| {'name': 'tag:<metakey>', 'value': '<metaval>'}
| ]
| }
Special keys are used to tweek the query further::
| 'changes-since' - only return instances updated after
| 'deleted' - only return (or exclude) deleted instances
| 'soft_deleted' - modify behavior of 'deleted' to either
| include or exclude instances whose
| vm_state is SOFT_DELETED.
"""
# NOTE(mriedem): If the limit is 0 there is no point in even going
# to the database since nothing is going to be returned anyway.
if limit == 0:
return []
sort_fn = {'desc': desc, 'asc': asc}
if CONF.database.slave_connection == '':
use_slave = False
session = get_session(use_slave=use_slave)
if columns_to_join is None:
columns_to_join = ['info_cache', 'security_groups']
manual_joins = ['metadata', 'system_metadata']
else:
manual_joins, columns_to_join = _manual_join_columns(columns_to_join)
query_prefix = session.query(models.Instance)
for column in columns_to_join:
query_prefix = query_prefix.options(joinedload(column))
query_prefix = query_prefix.order_by(sort_fn[sort_dir](
getattr(models.Instance, sort_key)))
# Make a copy of the filters dictionary to use going forward, as we'll
# be modifying it and we shouldn't affect the caller's use of it.
filters = filters.copy()
filters_ = {}
print("[FILT] filters => %s" % (filters))
query_prefix = session.query(models.Instance)
if 'changes-since' in filters:
filters.pop('changes_since')
changes_since = timeutils.normalize_time(filters['changes-since'])
query_prefix = query_prefix.\
filter(models.Instance.updated_at >= changes_since)
if 'deleted' in filters:
# Instances can be soft or hard deleted and the query needs to
# include or exclude both
if filters.pop('deleted'):
if filters.pop('soft_deleted', True):
deleted = or_(
models.Instance.deleted == models.Instance.id,
models.Instance.vm_state == vm_states.SOFT_DELETED
)
query_prefix = query_prefix.\
filter(deleted)
else:
query_prefix = query_prefix.\
filter(models.Instance.deleted == models.Instance.id)
else:
query_prefix = query_prefix.\
filter_by(deleted=0)
if not filters.pop('soft_deleted', False):
# It would be better to have vm_state not be nullable
# but until then we test it explicitly as a workaround.
not_soft_deleted = or_(
models.Instance.vm_state != vm_states.SOFT_DELETED,
models.Instance.vm_state == null()
)
query_prefix = query_prefix.filter(not_soft_deleted)
if 'cleaned' in filters:
if filters.pop('cleaned'):
query_prefix = query_prefix.filter(models.Instance.cleaned == 1)
else:
query_prefix = query_prefix.filter(models.Instance.cleaned == 0)
if not context.is_admin:
# If we're not admin context, add appropriate filter..
if context.project_id:
filters['project_id'] = context.project_id
else:
filters['user_id'] = context.user_id
# Filters for exact matches that we can do along with the SQL query...
# For other filters that don't match this, we will do regexp matching
exact_match_filter_names = ['project_id', 'user_id', 'image_ref',
'vm_state', 'instance_type_id', 'uuid',
'metadata', 'host', 'task_state',
'system_metadata']
# Filter the query
query_prefix = exact_filter(query_prefix, models.Instance,
filters, exact_match_filter_names)
query_prefix = regex_filter(query_prefix, models.Instance, filters)
query_prefix = tag_filter(context, query_prefix, models.Instance,
models.InstanceMetadata,
models.InstanceMetadata.instance_uuid,
filters)
# paginate query
# if marker is not None:
# try:
# marker = _instance_get_by_uuid(context, marker, session=session)
# except exception.InstanceNotFound:
# raise exception.MarkerNotFound(marker)
# TODO: following cannot yet work with the RIAK DB implementation!
# query_prefix = sqlalchemyutils.paginate_query(query_prefix,
# models.Instance, limit,
# [sort_key, 'created_at', 'id'],
# marker=marker,
# sort_dir=sort_dir)
# print("filters: %s" % (filters))
# query_prefix = RiakModelQuery(models.Instance).filter_dict(filters_)
# query_prefix = RiakModelQuery(models.Instance)
return _instances_fill_metadata(context, query_prefix.all(), manual_joins)
def tag_filter(context, query, model, model_metadata,
model_uuid, filters):
"""Applies tag filtering to a query.
Returns the updated query. This method alters filters to remove
keys that are tags. This filters on resources by tags - this
method assumes that the caller will take care of access control
:param query: query to apply filters to
:param model: model object the query applies to
:param filters: dictionary of filters
"""
if filters.get('filter') is None:
return query
or_query = None
def _to_list(val):
if isinstance(val, dict):
val = val.values()
if not isinstance(val, (tuple, list, set)):
val = (val,)
return val
for filter_block in filters['filter']:
if not isinstance(filter_block, dict):
continue
filter_name = filter_block.get('name')
if filter_name is None:
continue
tag_name = filter_name[4:]
tag_val = _to_list(filter_block.get('value'))
if filter_name.startswith('tag-'):
if tag_name not in ['key', 'value']:
msg = _("Invalid field name: %s") % tag_name
raise exception.InvalidParameterValue(err=msg)
subq = getattr(model_metadata, tag_name).in_(tag_val)
or_query = subq if or_query is None else or_(or_query, subq)
elif filter_name.startswith('tag:'):
subq = model_query(context, model_uuid,
session=query.session, base_model=model_metadata).\
filter_by(key=tag_name).\
filter(model_metadata.value.in_(tag_val))
query = query.filter(model.uuid.in_(subq))
if or_query is not None:
subq = model_query(context, model_uuid,
session=query.session, base_model=model_metadata).\
filter(or_query)
query = query.filter(model.uuid.in_(subq))
return query
def regex_filter(query, model, filters):
"""Applies regular expression filtering to a query.
Returns the updated query.
:param query: query to apply filters to
:param model: model object the query applies to
:param filters: dictionary of filters with regex values
"""
regexp_op_map = {
'postgresql': '~',
'mysql': 'REGEXP',
'sqlite': 'REGEXP'
}
db_string = CONF.database.connection.split(':')[0].split('+')[0]
db_regexp_op = regexp_op_map.get(db_string, 'LIKE')
for filter_name in filters.iterkeys():
try:
column_attr = getattr(model, filter_name)
except AttributeError:
continue
if 'property' == type(column_attr).__name__:
continue
if db_regexp_op == 'LIKE':
query = query.filter(column_attr.op(db_regexp_op)(
'%' + str(filters[filter_name]) + '%'))
else:
query = query.filter(column_attr.op(db_regexp_op)(
str(filters[filter_name])))
return query
def process_sort_params(sort_keys, sort_dirs,
default_keys=['created_at', 'id'],
default_dir='asc'):
"""Process the sort parameters to include default keys.
Creates a list of sort keys and a list of sort directions. Adds the default
keys to the end of the list if they are not already included.
When adding the default keys to the sort keys list, the associated
direction is:
1) The first element in the 'sort_dirs' list (if specified), else
2) 'default_dir' value (Note that 'asc' is the default value since this is
the default in sqlalchemy.utils.paginate_query)
:param sort_keys: List of sort keys to include in the processed list
:param sort_dirs: List of sort directions to include in the processed list
:param default_keys: List of sort keys that need to be included in the
processed list, they are added at the end of the list
if not already specified.
:param default_dir: Sort direction associated with each of the default
keys that are not supplied, used when they are added
to the processed list
:returns: list of sort keys, list of sort directions
:raise exception.InvalidInput: If more sort directions than sort keys
are specified
"""
# Determine direction to use for when adding default keys
if sort_dirs and len(sort_dirs) != 0:
default_dir_value = sort_dirs[0]
else:
default_dir_value = default_dir
# Create list of keys (do not modify the input list)
if sort_keys:
result_keys = list(sort_keys)
else:
result_keys = []
# If a list of directions is not provided, use the default sort direction
# for all provided keys
if sort_dirs:
result_dirs = list(sort_dirs)
else:
result_dirs = [default_dir_value for _sort_key in result_keys]
# Ensure that the key and direction length match
while len(result_dirs) < len(result_keys):
result_dirs.append(default_dir_value)
# Unless more direction are specified, which is an error
if len(result_dirs) > len(result_keys):
msg = _("Sort direction size exceeds sort key size")
raise exception.InvalidInput(reason=msg)
# Ensure defaults are included
for key in default_keys:
if key not in result_keys:
result_keys.append(key)
result_dirs.append(default_dir_value)
return result_keys, result_dirs
@require_context
def instance_get_active_by_window_joined(context, begin, end=None,
project_id=None, host=None,
use_slave=False):
"""Return instances and joins that were active during window."""
session = get_session(use_slave=use_slave)
query = session.query(models.Instance)
query = query.options(joinedload('info_cache')).\
options(joinedload('security_groups')).\
filter(or_(models.Instance.terminated_at == null(),
models.Instance.terminated_at > begin))
if end:
query = query.filter(models.Instance.launched_at < end)
if project_id:
query = query.filter_by(project_id=project_id)
if host:
query = query.filter_by(host=host)
return _instances_fill_metadata(context, query.all())
def _instance_get_all_query(context, project_only=False,
joins=None, use_slave=False):
if joins is None:
joins = ['info_cache', 'security_groups']
query = model_query(context,
models.Instance,
project_only=project_only,
use_slave=use_slave)
for join in joins:
query = query.options(joinedload(join))
return query
@require_admin_context
def instance_get_all_by_host(context, host,
columns_to_join=None,
use_slave=False):
instances = _instance_get_all_query(context,
use_slave=use_slave).filter_by(host=host).all()
print(">> %s" % (instances))
return _instances_fill_metadata(context, instances, ["system_metadata"])
def _instance_get_all_uuids_by_host(context, host, session=None):
"""Return a list of the instance uuids on a given host.
Returns a list of UUIDs, not Instance model objects. This internal version
allows you to specify a session object as a kwarg.
"""
uuids = []
for tuple in model_query(context, models.Instance.uuid, read_deleted="no",
base_model=models.Instance, session=session).\
filter_by(host=host).\
all():
uuids.append(tuple[0])
return uuids
@require_admin_context
def instance_get_all_by_host_and_node(context, host, node):
return _instances_fill_metadata(context,
_instance_get_all_query(context, joins=[]).filter_by(host=host).
filter_by(node=node).all(), manual_joins=[])
@require_admin_context
def instance_get_all_by_host_and_not_type(context, host, type_id=None):
return _instances_fill_metadata(context,
_instance_get_all_query(context).filter_by(host=host).
filter(models.Instance.instance_type_id != type_id).all())
# NOTE(jkoelker) This is only being left here for compat with floating
# ips. Currently the network_api doesn't return floaters
# in network_info. Once it starts return the model. This
# function and its call in compute/manager.py on 1829 can
# go away
@require_context
def instance_get_floating_address(context, instance_id):
instance = instance_get(context, instance_id)
fixed_ips = fixed_ip_get_by_instance(context, instance['uuid'])
if not fixed_ips:
return None
# NOTE(tr3buchet): this only gets the first fixed_ip
# won't find floating ips associated with other fixed_ips
floating_ips = floating_ip_get_by_fixed_address(context,
fixed_ips[0]['address'])
if not floating_ips:
return None
# NOTE(vish): this just returns the first floating ip
return floating_ips[0]['address']
@require_context
def instance_floating_address_get_all(context, instance_uuid):
if not uuidutils.is_uuid_like(instance_uuid):
raise exception.InvalidUUID(uuid=instance_uuid)
floating_ips = model_query(context,
models.FloatingIp.address,
base_model=models.FloatingIp).\
join(models.FloatingIp.fixed_ip).\
filter_by(instance_uuid=instance_uuid)
return [floating_ip.address for floating_ip in floating_ips]
# NOTE(hanlind): This method can be removed as conductor RPC API moves to v2.0.
@require_admin_context
def instance_get_all_hung_in_rebooting(context, reboot_window):
reboot_window = (timeutils.utcnow() -
datetime.timedelta(seconds=reboot_window))
# NOTE(danms): this is only used in the _poll_rebooting_instances()
# call in compute/manager, so we can avoid the metadata lookups
# explicitly
return _instances_fill_metadata(context,
model_query(context, models.Instance).
filter(models.Instance.updated_at <= reboot_window).
filter_by(task_state=task_states.REBOOTING).all(),
manual_joins=[])
@require_context
def instance_update(context, instance_uuid, values):
instance_ref = _instance_update(context, instance_uuid, values)[1]
return instance_ref
@require_context
def instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=None):
"""Set the given properties on an instance and update it. Return
a shallow copy of the original instance reference, as well as the
updated one.
:param context: = request context object
:param instance_uuid: = instance uuid
:param values: = dict containing column values
If "expected_task_state" exists in values, the update can only happen
when the task state before update matches expected_task_state. Otherwise
a UnexpectedTaskStateError is thrown.
:returns: a tuple of the form (old_instance_ref, new_instance_ref)
Raises NotFound if instance does not exist.
"""
return _instance_update(context, instance_uuid, values,
copy_old_instance=True,
columns_to_join=columns_to_join)
# NOTE(danms): This updates the instance's metadata list in-place and in
# the database to avoid stale data and refresh issues. It assumes the
# delete=True behavior of instance_metadata_update(...)
def _instance_metadata_update_in_place(context, instance, metadata_type, model,
metadata, session):
metadata = dict(metadata)
to_delete = []
for keyvalue in instance[metadata_type]:
key = keyvalue['key']
if key in metadata:
keyvalue['value'] = metadata.pop(key)
elif key not in metadata:
to_delete.append(keyvalue)
for condemned in to_delete:
condemned.soft_delete(session=session)
for key, value in metadata.iteritems():
newitem = model()
newitem.update({'key': key, 'value': value,
'instance_uuid': instance['uuid']})
session.add(newitem)
instance[metadata_type].append(newitem)
@_retry_on_deadlock
def _instance_update(context, instance_uuid, values, copy_old_instance=False,
columns_to_join=None):
session = get_session()
if not uuidutils.is_uuid_like(instance_uuid):
raise exception.InvalidUUID(instance_uuid)
with session.begin():
instance_ref = _instance_get_by_uuid(context, instance_uuid,
session=session,
columns_to_join=columns_to_join)
if "expected_task_state" in values:
# it is not a db column so always pop out
expected = values.pop("expected_task_state")
if not isinstance(expected, (tuple, list, set)):
expected = (expected,)
actual_state = instance_ref["task_state"]
if actual_state not in expected:
if actual_state == task_states.DELETING:
raise exception.UnexpectedDeletingTaskStateError(
actual=actual_state, expected=expected)
else:
raise exception.UnexpectedTaskStateError(
actual=actual_state, expected=expected)
if "expected_vm_state" in values:
expected = values.pop("expected_vm_state")
if not isinstance(expected, (tuple, list, set)):
expected = (expected,)
actual_state = instance_ref["vm_state"]
if actual_state not in expected:
raise exception.UnexpectedVMStateError(actual=actual_state,
expected=expected)
instance_hostname = instance_ref['hostname'] or ''
if ("hostname" in values and
values["hostname"].lower() != instance_hostname.lower()):
_validate_unique_server_name(context,
session,
values['hostname'])
if copy_old_instance:
old_instance_ref = copy.copy(instance_ref)
else:
old_instance_ref = None
metadata = values.get('metadata')
if metadata is not None:
_instance_metadata_update_in_place(context, instance_ref,
'metadata',
models.InstanceMetadata,
values.pop('metadata'),
session)
system_metadata = values.get('system_metadata')
if system_metadata is not None:
_instance_metadata_update_in_place(context, instance_ref,
'system_metadata',
models.InstanceSystemMetadata,
values.pop('system_metadata'),
session)
_handle_objects_related_type_conversions(values)
instance_ref.update(values, do_save=False)
session.add(instance_ref)
return (old_instance_ref, instance_ref)
def instance_add_security_group(context, instance_uuid, security_group_id):
"""Associate the given security group with the given instance."""
sec_group_ref = models.SecurityGroupInstanceAssociation()
sec_group_ref.update({'instance_uuid': instance_uuid,
'security_group_id': security_group_id})
sec_group_ref.save()
@require_context
def instance_remove_security_group(context, instance_uuid, security_group_id):
"""Disassociate the given security group from the given instance."""
model_query(context, models.SecurityGroupInstanceAssociation).\
filter_by(instance_uuid=instance_uuid).\
filter_by(security_group_id=security_group_id).\
soft_delete()
###################
@require_context
def instance_info_cache_get(context, instance_uuid):
"""Gets an instance info cache from the table.
:param instance_uuid: = uuid of the info cache's instance
:param session: = optional session object
"""
return model_query(context, models.InstanceInfoCache).\
filter_by(instance_uuid=instance_uuid).\
first()
@require_context
def instance_info_cache_update(context, instance_uuid, values):
"""Update an instance info cache record in the table.
:param instance_uuid: = uuid of info cache's instance
:param values: = dict containing column values to update
:param session: = optional session object
"""
session = get_session()
with session.begin():
info_cache = model_query(context, models.InstanceInfoCache,
session=session).\
filter_by(instance_uuid=instance_uuid).\
first()
if info_cache and info_cache['deleted']:
raise exception.InstanceInfoCacheNotFound(
instance_uuid=instance_uuid)
elif not info_cache:
# NOTE(tr3buchet): just in case someone blows away an instance's
# cache entry, re-create it.
info_cache = models.InstanceInfoCache()
values['instance_uuid'] = instance_uuid
try:
info_cache.update(values)
except db_exc.DBDuplicateEntry:
# NOTE(sirp): Possible race if two greenthreads attempt to
# recreate the instance cache entry at the same time. First one
# wins.
pass
return info_cache
@require_context
def instance_info_cache_delete(context, instance_uuid):
"""Deletes an existing instance_info_cache record
:param instance_uuid: = uuid of the instance tied to the cache record
:param session: = optional session object
"""
model_query(context, models.InstanceInfoCache).\
filter_by(instance_uuid=instance_uuid).\
soft_delete()
###################
def _instance_extra_create(context, values):
inst_extra_ref = models.InstanceExtra()
inst_extra_ref.update(values)
inst_extra_ref.save()
return inst_extra_ref
def instance_extra_update_by_uuid(context, instance_uuid, values):
return model_query(context, models.InstanceExtra).\
filter_by(instance_uuid=instance_uuid).\
update(values)
def _instance_extra_get_by_instance_uuid_query(context, instance_uuid):
return (model_query(context, models.InstanceExtra)
.filter_by(instance_uuid=instance_uuid))
def instance_extra_get_by_instance_uuid(context, instance_uuid,
columns=None):
query = _instance_extra_get_by_instance_uuid_query(
context, instance_uuid)
if columns is None:
columns = ['numa_topology', 'pci_requests']
for column in columns:
query = query.options(undefer(column))
instance_extra = query.first()
return instance_extra
###################
@require_context
def key_pair_create(context, values):
try:
key_pair_ref = models.KeyPair()
key_pair_ref.update(values)
key_pair_ref.save()
return key_pair_ref
except db_exc.DBDuplicateEntry:
raise exception.KeyPairExists(key_name=values['name'])
@require_context
def key_pair_destroy(context, user_id, name):
nova.context.authorize_user_context(context, user_id)
result = model_query(context, models.KeyPair).\
filter_by(user_id=user_id).\
filter_by(name=name).\
soft_delete()
if not result:
raise exception.KeypairNotFound(user_id=user_id, name=name)
@require_context
def key_pair_get(context, user_id, name):
nova.context.authorize_user_context(context, user_id)
result = model_query(context, models.KeyPair).\
filter_by(user_id=user_id).\
filter_by(name=name).\
first()
if not result:
raise exception.KeypairNotFound(user_id=user_id, name=name)
return result
@require_context
def key_pair_get_all_by_user(context, user_id):
nova.context.authorize_user_context(context, user_id)
return model_query(context, models.KeyPair, read_deleted="no").\
filter_by(user_id=user_id).\
all()
def key_pair_count_by_user(context, user_id):
nova.context.authorize_user_context(context, user_id)
return model_query(context, models.KeyPair, read_deleted="no").\
filter_by(user_id=user_id).\
count()
###################
@require_admin_context
def network_associate(context, project_id, network_id=None, force=False):
"""Associate a project with a network.
called by project_get_networks under certain conditions
and network manager add_network_to_project()
only associate if the project doesn't already have a network
or if force is True
force solves race condition where a fresh project has multiple instance
builds simultaneously picked up by multiple network hosts which attempt
to associate the project with multiple networks
force should only be used as a direct consequence of user request
all automated requests should not use force
"""
session = get_session()
with session.begin():
def network_query(project_filter, id=None):
filter_kwargs = {'project_id': project_filter}
if id is not None:
filter_kwargs['id'] = id
return model_query(context, models.Network, session=session,
read_deleted="no").\
filter_by(**filter_kwargs).\
with_lockmode('update').\
first()
if not force:
# find out if project has a network
network_ref = network_query(project_id)
if force or not network_ref:
# in force mode or project doesn't have a network so associate
# with a new network
# get new network
network_ref = network_query(None, network_id)
if not network_ref:
raise exception.NoMoreNetworks()
# associate with network
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
network_ref['project_id'] = project_id
session.add(network_ref)
return network_ref
def _network_ips_query(context, network_id):
return model_query(context, models.FixedIp, read_deleted="no").\
filter_by(network_id=network_id)
@require_admin_context
def network_count_reserved_ips(context, network_id):
return _network_ips_query(context, network_id).\
filter_by(reserved=True).\
count()
@require_admin_context
def network_create_safe(context, values):
network_ref = models.Network()
network_ref['uuid'] = str(uuid.uuid4())
network_ref.update(values)
print("updating with values (a): %s" % (values))
print("updating with values (b): %s" % (network_ref.injected))
try:
network_ref.save()
return network_ref
except db_exc.DBDuplicateEntry:
raise exception.DuplicateVlan(vlan=values['vlan'])
@require_admin_context
def network_delete_safe(context, network_id):
session = get_session()
with session.begin():
result = model_query(context, models.FixedIp, session=session,
read_deleted="no").\
filter_by(network_id=network_id).\
filter_by(allocated=True).\
count()
if result != 0:
raise exception.NetworkInUse(network_id=network_id)
network_ref = _network_get(context, network_id=network_id,
session=session)
model_query(context, models.FixedIp, session=session,
read_deleted="no").\
filter_by(network_id=network_id).\
soft_delete()
session.delete(network_ref)
@require_admin_context
def network_disassociate(context, network_id, disassociate_host,
disassociate_project):
net_update = {}
if disassociate_project:
net_update['project_id'] = None
if disassociate_host:
net_update['host'] = None
network_update(context, network_id, net_update)
def _network_get(context, network_id, session=None, project_only='allow_none'):
result = model_query(context, models.Network, session=session,
project_only=project_only).\
filter_by(id=network_id).\
first()
if not result:
raise exception.NetworkNotFound(network_id=network_id)
return result
@require_context
def network_get(context, network_id, project_only='allow_none'):
return _network_get(context, network_id, project_only=project_only)
@require_context
def network_get_all(context, project_only):
result = model_query(context, models.Network, read_deleted="no",
project_only=project_only).all()
if not result:
raise exception.NoNetworksFound()
return result
@require_context
def network_get_all_by_uuids(context, network_uuids, project_only):
result = model_query(context, models.Network, read_deleted="no",
project_only=project_only).\
filter(models.Network.uuid.in_(network_uuids)).\
all()
if not result:
raise exception.NoNetworksFound()
# check if the result contains all the networks
# we are looking for
for network_uuid in network_uuids:
for network in result:
if network['uuid'] == network_uuid:
break
else:
if project_only:
raise exception.NetworkNotFoundForProject(
network_uuid=network_uuid, project_id=context.project_id)
raise exception.NetworkNotFound(network_id=network_uuid)
return result
# NOTE(vish): pylint complains because of the long method name, but
# it fits with the names of the rest of the methods
# pylint: disable=C0103
# @require_admin_context
# def network_get_associated_fixed_ips(context, network_id, host=None):
# # FIXME(sirp): since this returns fixed_ips, this would be better named
# # fixed_ip_get_all_by_network.
# # NOTE(vish): The ugly joins here are to solve a performance issue and
# # should be removed once we can add and remove leases
# # without regenerating the whole list
# vif_and = and_(models.VirtualInterface.id ==
# models.FixedIp.virtual_interface_id,
# models.VirtualInterface.deleted == 0)
# inst_and = and_(models.Instance.uuid == models.FixedIp.instance_uuid,
# models.Instance.deleted == 0)
# session = get_session()
# # NOTE(vish): This subquery left joins the minimum interface id for each
# # instance. If the join succeeds (i.e. the 11th column is not
# # null), then the fixed ip is on the first interface.
# subq = session.query(func.min(models.VirtualInterface.id).label("id"),
# models.VirtualInterface.instance_uuid).\
# group_by(models.VirtualInterface.instance_uuid).subquery()
# subq_and = and_(subq.c.id == models.FixedIp.virtual_interface_id,
# subq.c.instance_uuid == models.VirtualInterface.instance_uuid)
# query = session.query(models.FixedIp.address,
# models.FixedIp.instance_uuid,
# models.FixedIp.network_id,
# models.FixedIp.virtual_interface_id,
# models.VirtualInterface.address,
# models.Instance.hostname,
# models.Instance.updated_at,
# models.Instance.created_at,
# models.FixedIp.allocated,
# models.FixedIp.leased,
# subq.c.id).\
# filter(models.FixedIp.deleted == 0).\
# filter(models.FixedIp.network_id == network_id).\
# join((models.VirtualInterface, vif_and)).\
# join((models.Instance, inst_and)).\
# outerjoin((subq, subq_and)).\
# filter(models.FixedIp.instance_uuid != null()).\
# filter(models.FixedIp.virtual_interface_id != null())
# if host:
# query = query.filter(models.Instance.host == host)
# result = query.all()
# data = []
# for datum in result:
# cleaned = {}
# cleaned['address'] = datum[0]
# cleaned['instance_uuid'] = datum[1]
# cleaned['network_id'] = datum[2]
# cleaned['vif_id'] = datum[3]
# cleaned['vif_address'] = datum[4]
# cleaned['instance_hostname'] = datum[5]
# cleaned['instance_updated'] = datum[6]
# cleaned['instance_created'] = datum[7]
# cleaned['allocated'] = datum[8]
# cleaned['leased'] = datum[9]
# # NOTE(vish): default_route is True if this fixed ip is on the first
# # interface its instance.
# cleaned['default_route'] = datum[10] is not None
# data.append(cleaned)
# return data
@require_admin_context
def network_get_associated_fixed_ips(context, network_id, host=None):
# FIXME(sirp): since this returns fixed_ips, this would be better named
# fixed_ip_get_all_by_network.
# NOTE(vish): The ugly joins here are to solve a performance issue and
# should be removed once we can add and remove leases
# without regenerating the whole list
vif_and = and_(models.VirtualInterface.id ==
models.FixedIp.virtual_interface_id,
models.VirtualInterface.deleted == 0)
inst_and = and_(models.Instance.uuid == models.FixedIp.instance_uuid,
models.Instance.deleted == 0)
session = get_session()
query = session.query(models.FixedIp.address,
models.FixedIp.instance_uuid,
models.FixedIp.network_id,
models.FixedIp.virtual_interface_id,
models.VirtualInterface.address,
models.Instance.hostname,
models.Instance.updated_at,
models.Instance.created_at,
models.FixedIp.allocated,
models.FixedIp.leased)
query = query.join(models.VirtualInterface).join(models.Instance)
query = query.filter(models.FixedIp.deleted == 0)
query = query.filter(models.FixedIp.network_id == network_id)
query = query.join((models.VirtualInterface, vif_and))
query = query.filter(models.FixedIp.instance_uuid != None)
query = query.filter(models.FixedIp.virtual_interface_id != None)
# query = query.filter(models.FixedIp.deleted == 0).\
# filter(models.FixedIp.network_id == network_id).\
# join((models.VirtualInterface, vif_and)).\
# join((models.Instance, inst_and)).\
# filter(models.FixedIp.instance_uuid != None).\
# filter(models.FixedIp.virtual_interface_id != None)
if host:
query = query.filter(models.Instance.host == host)
result = query.all()
data = []
for datum in result:
cleaned = {}
cleaned['address'] = datum[0]
cleaned['instance_uuid'] = datum[1]
cleaned['network_id'] = datum[2]
cleaned['vif_id'] = datum[3]
cleaned['vif_address'] = datum[4]
cleaned['instance_hostname'] = datum[5]
cleaned['instance_updated'] = datum[6]
cleaned['instance_created'] = datum[7]
cleaned['allocated'] = datum[8]
cleaned['leased'] = datum[9]
cleaned['default_route'] = datum[10] is not None
data.append(cleaned)
return data
def network_in_use_on_host(context, network_id, host):
fixed_ips = network_get_associated_fixed_ips(context, network_id, host)
return len(fixed_ips) > 0
def _network_get_query(context, session=None):
return model_query(context, models.Network, session=session,
read_deleted="no")
@require_admin_context
def network_get_by_uuid(context, uuid):
result = _network_get_query(context).filter_by(uuid=uuid).first()
if not result:
raise exception.NetworkNotFoundForUUID(uuid=uuid)
return result
@require_admin_context
def network_get_by_cidr(context, cidr):
result = _network_get_query(context).\
filter(or_(models.Network.cidr == cidr,
models.Network.cidr_v6 == cidr)).\
first()
if not result:
raise exception.NetworkNotFoundForCidr(cidr=cidr)
return result
@require_admin_context
def network_get_all_by_host(context, host):
session = get_session()
fixed_host_filter = or_(models.FixedIp.host == host,
and_(models.FixedIp.instance_uuid != null(),
models.Instance.host == host))
fixed_ip_query = model_query(context, models.FixedIp.network_id,
base_model=models.FixedIp,
session=session).\
outerjoin((models.Instance,
models.Instance.uuid ==
models.FixedIp.instance_uuid)).\
filter(fixed_host_filter)
# NOTE(vish): return networks that have host set
# or that have a fixed ip with host set
# or that have an instance with host set
host_filter = or_(models.Network.host == host,
models.Network.id.in_(fixed_ip_query.subquery()))
return _network_get_query(context, session=session).\
filter(host_filter).\
all()
@require_admin_context
def network_set_host(context, network_id, host_id):
session = get_session()
with session.begin():
network_ref = _network_get_query(context, session=session).\
filter_by(id=network_id).\
with_lockmode('update').\
first()
if not network_ref:
raise exception.NetworkNotFound(network_id=network_id)
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if not network_ref['host']:
network_ref['host'] = host_id
session.add(network_ref)
return network_ref['host']
@require_context
def network_update(context, network_id, values):
session = get_session()
with session.begin():
network_ref = _network_get(context, network_id, session=session)
network_ref.update(values)
try:
network_ref.save(session=session)
except db_exc.DBDuplicateEntry:
raise exception.DuplicateVlan(vlan=values['vlan'])
return network_ref
###################
@require_context
def quota_get(context, project_id, resource, user_id=None):
model = models.ProjectUserQuota if user_id else models.Quota
query = model_query(context, model).\
filter_by(project_id=project_id).\
filter_by(resource=resource)
if user_id:
query = query.filter_by(user_id=user_id)
result = query.first()
if not result:
if user_id:
raise exception.ProjectUserQuotaNotFound(project_id=project_id,
user_id=user_id)
else:
raise exception.ProjectQuotaNotFound(project_id=project_id)
return result
@require_context
def quota_get_all_by_project_and_user(context, project_id, user_id):
nova.context.authorize_project_context(context, project_id)
user_quotas = model_query(context, models.ProjectUserQuota.resource,
models.ProjectUserQuota.hard_limit,
base_model=models.ProjectUserQuota).\
filter_by(project_id=project_id).\
filter_by(user_id=user_id).\
all()
result = {'project_id': project_id, 'user_id': user_id}
for user_quota in user_quotas:
result[user_quota.resource] = user_quota.hard_limit
return result
@require_context
def quota_get_all_by_project(context, project_id):
nova.context.authorize_project_context(context, project_id)
rows = model_query(context, models.Quota, read_deleted="no").\
filter_by(project_id=project_id).\
all()
result = {'project_id': project_id}
for row in rows:
result[row.resource] = row.hard_limit
return result
@require_context
def quota_get_all(context, project_id):
nova.context.authorize_project_context(context, project_id)
result = model_query(context, models.ProjectUserQuota).\
filter_by(project_id=project_id).\
all()
return result
@require_admin_context
def quota_create(context, project_id, resource, limit, user_id=None):
per_user = user_id and resource not in PER_PROJECT_QUOTAS
quota_ref = models.ProjectUserQuota() if per_user else models.Quota()
if per_user:
quota_ref.user_id = user_id
quota_ref.project_id = project_id
quota_ref.resource = resource
quota_ref.hard_limit = limit
try:
quota_ref.save()
except db_exc.DBDuplicateEntry:
raise exception.QuotaExists(project_id=project_id, resource=resource)
return quota_ref
@require_admin_context
def quota_update(context, project_id, resource, limit, user_id=None):
per_user = user_id and resource not in PER_PROJECT_QUOTAS
model = models.ProjectUserQuota if per_user else models.Quota
query = model_query(context, model).\
filter_by(project_id=project_id).\
filter_by(resource=resource)
if per_user:
query = query.filter_by(user_id=user_id)
result = query.update({'hard_limit': limit})
if not result:
if per_user:
raise exception.ProjectUserQuotaNotFound(project_id=project_id,
user_id=user_id)
else:
raise exception.ProjectQuotaNotFound(project_id=project_id)
###################
@require_context
def quota_class_get(context, class_name, resource):
result = model_query(context, models.QuotaClass, read_deleted="no").\
filter_by(class_name=class_name).\
filter_by(resource=resource).\
first()
if not result:
raise exception.QuotaClassNotFound(class_name=class_name)
return result
def quota_class_get_default(context):
rows = model_query(context, models.QuotaClass, read_deleted="no").\
filter_by(class_name=_DEFAULT_QUOTA_NAME).\
all()
result = {'class_name': _DEFAULT_QUOTA_NAME}
for row in rows:
result[row.resource] = row.hard_limit
return result
@require_context
def quota_class_get_all_by_name(context, class_name):
nova.context.authorize_quota_class_context(context, class_name)
rows = model_query(context, models.QuotaClass, read_deleted="no").\
filter_by(class_name=class_name).\
all()
result = {'class_name': class_name}
for row in rows:
result[row.resource] = row.hard_limit
return result
@require_admin_context
def quota_class_create(context, class_name, resource, limit):
quota_class_ref = models.QuotaClass()
quota_class_ref.class_name = class_name
quota_class_ref.resource = resource
quota_class_ref.hard_limit = limit
quota_class_ref.save()
return quota_class_ref
@require_admin_context
def quota_class_update(context, class_name, resource, limit):
result = model_query(context, models.QuotaClass, read_deleted="no").\
filter_by(class_name=class_name).\
filter_by(resource=resource).\
update({'hard_limit': limit})
if not result:
raise exception.QuotaClassNotFound(class_name=class_name)
###################
@require_context
def quota_usage_get(context, project_id, resource, user_id=None):
query = model_query(context, models.QuotaUsage, read_deleted="no").\
filter_by(project_id=project_id).\
filter_by(resource=resource)
if user_id:
if resource not in PER_PROJECT_QUOTAS:
result = query.filter_by(user_id=user_id).first()
else:
result = query.filter_by(user_id=None).first()
else:
result = query.first()
if not result:
raise exception.QuotaUsageNotFound(project_id=project_id)
return result
def _quota_usage_get_all(context, project_id, user_id=None):
nova.context.authorize_project_context(context, project_id)
query = model_query(context, models.QuotaUsage, read_deleted="no").\
filter_by(project_id=project_id)
result = {'project_id': project_id}
if user_id:
query = query.filter(or_(models.QuotaUsage.user_id == user_id,
models.QuotaUsage.user_id == null()))
result['user_id'] = user_id
rows = query.all()
for row in rows:
if row.resource in result:
result[row.resource]['in_use'] += row.in_use
result[row.resource]['reserved'] += row.reserved
else:
result[row.resource] = dict(in_use=row.in_use,
reserved=row.reserved)
return result
@require_context
def quota_usage_get_all_by_project_and_user(context, project_id, user_id):
return _quota_usage_get_all(context, project_id, user_id=user_id)
@require_context
def quota_usage_get_all_by_project(context, project_id):
return _quota_usage_get_all(context, project_id)
def _quota_usage_create(project_id, user_id, resource, in_use,
reserved, until_refresh, session=None):
quota_usage_ref = models.QuotaUsage()
quota_usage_ref.project_id = project_id
quota_usage_ref.user_id = user_id
quota_usage_ref.resource = resource
quota_usage_ref.in_use = in_use
quota_usage_ref.reserved = reserved
quota_usage_ref.until_refresh = until_refresh
# updated_at is needed for judgement of max_age
quota_usage_ref.updated_at = timeutils.utcnow()
quota_usage_ref.save(session=session)
return quota_usage_ref
@require_admin_context
def quota_usage_update(context, project_id, user_id, resource, **kwargs):
updates = {}
for key in ['in_use', 'reserved', 'until_refresh']:
if key in kwargs:
updates[key] = kwargs[key]
result = model_query(context, models.QuotaUsage, read_deleted="no").\
filter_by(project_id=project_id).\
filter_by(resource=resource).\
filter(or_(models.QuotaUsage.user_id == user_id,
models.QuotaUsage.user_id == null())).\
update(updates)
if not result:
raise exception.QuotaUsageNotFound(project_id=project_id)
###################
def _reservation_create(uuid, usage, project_id, user_id, resource,
delta, expire, session=None):
reservation_ref = models.Reservation()
reservation_ref.uuid = uuid
reservation_ref.usage_id = usage['id']
reservation_ref.project_id = project_id
reservation_ref.user_id = user_id
reservation_ref.resource = resource
reservation_ref.delta = delta
reservation_ref.expire = expire
reservation_ref.save(session=session)
return reservation_ref
###################
# NOTE(johannes): The quota code uses SQL locking to ensure races don't
# cause under or over counting of resources. To avoid deadlocks, this
# code always acquires the lock on quota_usages before acquiring the lock
# on reservations.
def _get_project_user_quota_usages(context, session, project_id,
user_id):
rows = model_query(context, models.QuotaUsage,
read_deleted="no",
session=session).\
filter_by(project_id=project_id).\
with_lockmode('update').\
all()
proj_result = dict()
user_result = dict()
# Get the total count of in_use,reserved
for row in rows:
proj_result.setdefault(row.resource,
dict(in_use=0, reserved=0, total=0))
proj_result[row.resource]['in_use'] += row.in_use
proj_result[row.resource]['reserved'] += row.reserved
proj_result[row.resource]['total'] += (row.in_use + row.reserved)
if row.user_id is None or row.user_id == user_id:
user_result[row.resource] = row
return proj_result, user_result
def _create_quota_usage_if_missing(user_usages, resource, until_refresh,
project_id, user_id, session):
"""Creates a QuotaUsage record and adds to user_usages if not present.
:param user_usages: dict of resource keys to QuotaUsage records. This is
updated if resource is not in user_usages yet or
until_refresh is not None.
:param resource: The resource being checked for quota usage.
:param until_refresh: Count of reservations until usage is refreshed,
int or None
:param max_age: Number of seconds between subsequent usage refreshes.
:param project_id: The project being checked for quota usage.
:param user_id: The user being checked for quota usage.
:param session: DB session holding a transaction lock.
:return: True if a new QuotaUsage record was created and added
to user_usages, False otherwise.
"""
new_usage = None
if resource not in user_usages:
user_id_to_use = user_id
if resource in PER_PROJECT_QUOTAS:
user_id_to_use = None
new_usage = _quota_usage_create(project_id, user_id_to_use, resource,
0, 0, until_refresh or None,
session=session)
user_usages[resource] = new_usage
return new_usage is not None
def _is_quota_refresh_needed(quota_usage, max_age):
"""Determines if a quota usage refresh is needed.
:param quota_usage: A QuotaUsage object for a given resource.
:param max_age: Number of seconds between subsequent usage refreshes.
:return: True if a refresh is needed, False otherwise.
"""
refresh = False
if quota_usage.in_use < 0:
# Negative in_use count indicates a desync, so try to
# heal from that...
refresh = True
elif quota_usage.until_refresh is not None:
quota_usage.until_refresh -= 1
if quota_usage.until_refresh <= 0:
refresh = True
elif max_age and (timeutils.utcnow() -
quota_usage.updated_at).seconds >= max_age:
refresh = True
return refresh
def _refresh_quota_usages(quota_usage, until_refresh, in_use):
"""Refreshes quota usage for the given resource.
:param quota_usage: A QuotaUsage object for a given resource.
:param until_refresh: Count of reservations until usage is refreshed,
int or None
:param in_use: Actual quota usage for the resource.
"""
if quota_usage.in_use != in_use:
LOG.info(_LI('quota_usages out of sync, updating. '
'project_id: %(project_id)s, '
'user_id: %(user_id)s, '
'resource: %(res)s, '
'tracked usage: %(tracked_use)s, '
'actual usage: %(in_use)s'),
{'project_id': quota_usage.project_id,
'user_id': quota_usage.user_id,
'res': quota_usage.resource,
'tracked_use': quota_usage.in_use,
'in_use': in_use})
# Update the usage
quota_usage.in_use = in_use
quota_usage.until_refresh = until_refresh or None
def _raise_overquota_exception(project_quotas, user_quotas, deltas, overs,
project_usages, user_usages):
"""Generates and raises an OverQuota exception.
:param project_quotas: dict of resource quotas (limits) for the project.
:param user_quotas: dict of resource quotas (limits) for the user.
:param deltas: dict of resource keys to positive/negative quota
changes for the resources in a given operation.
:param overs: list of resources that are over-quota for the
operation.
:param project_usages: dict of resource keys to QuotaUsage records for the
project.
:param user_usages: dict of resource keys to QuotaUsage records for the
user.
:raises: nova.exception.OverQuota
"""
if project_quotas == user_quotas:
usages = project_usages
else:
usages = user_usages
usages = dict((k, dict(in_use=v['in_use'], reserved=v['reserved']))
for k, v in usages.items())
headroom = dict((res, user_quotas[res] -
(usages[res]['in_use'] + usages[res]['reserved']))
for res in user_quotas.keys())
# If quota_cores is unlimited [-1]:
# - set cores headroom based on instances headroom:
if user_quotas.get('cores') == -1:
if deltas['cores']:
hc = headroom['instances'] * deltas['cores']
headroom['cores'] = hc / deltas['instances']
else:
headroom['cores'] = headroom['instances']
# If quota_ram is unlimited [-1]:
# - set ram headroom based on instances headroom:
if user_quotas.get('ram') == -1:
if deltas['ram']:
hr = headroom['instances'] * deltas['ram']
headroom['ram'] = hr / deltas['instances']
else:
headroom['ram'] = headroom['instances']
raise exception.OverQuota(overs=sorted(overs), quotas=user_quotas,
usages=usages, headroom=headroom)
def _calculate_overquota(project_quotas, user_quotas, deltas,
project_usages, user_usages):
"""Checks if any resources will go over quota based on the request.
:param project_quotas: dict of resource quotas (limits) for the project.
:param user_quotas: dict of resource quotas (limits) for the user.
:param deltas: dict of resource keys to positive/negative quota
changes for the resources in a given operation.
:param project_usages: dict of resource keys to QuotaUsage records for the
project.
:param user_usages: dict of resource keys to QuotaUsage records for the
user.
:return: list of resources that are over-quota for the
operation.
"""
overs = []
for res, delta in deltas.items():
# We can't go over-quota if we're not reserving anything or if
# we have unlimited quotas.
if user_quotas[res] >= 0 and delta >= 0:
# over if the project usage + delta is more than project quota
if project_quotas[res] < delta + project_usages[res]['total']:
overs.append(res)
# over if the user usage + delta is more than user quota
elif user_quotas[res] < delta + user_usages[res]['total']:
overs.append(res)
return overs
@require_context
@_retry_on_deadlock
def quota_reserve(context, resources, project_quotas, user_quotas, deltas,
expire, until_refresh, max_age, project_id=None,
user_id=None):
elevated = context.elevated()
session = get_session()
with session.begin():
if project_id is None:
project_id = context.project_id
if user_id is None:
user_id = context.user_id
# Get the current usages
project_usages, user_usages = _get_project_user_quota_usages(
context, session, project_id, user_id)
# Handle usage refresh
work = set(deltas.keys())
while work:
resource = work.pop()
# Do we need to refresh the usage?
created = _create_quota_usage_if_missing(user_usages, resource,
until_refresh, project_id,
user_id, session)
refresh = created or _is_quota_refresh_needed(
user_usages[resource], max_age)
# OK, refresh the usage
if refresh:
# Grab the sync routine
sync = QUOTA_SYNC_FUNCTIONS[resources[resource].sync]
updates = sync(elevated, project_id, user_id, session)
for res, in_use in updates.items():
# Make sure we have a destination for the usage!
_create_quota_usage_if_missing(user_usages, res,
until_refresh, project_id,
user_id, session)
_refresh_quota_usages(user_usages[res], until_refresh,
in_use)
# Because more than one resource may be refreshed
# by the call to the sync routine, and we don't
# want to double-sync, we make sure all refreshed
# resources are dropped from the work set.
work.discard(res)
# NOTE(Vek): We make the assumption that the sync
# routine actually refreshes the
# resources that it is the sync routine
# for. We don't check, because this is
# a best-effort mechanism.
# Check for deltas that would go negative
unders = [res for res, delta in deltas.items()
if delta < 0 and
delta + user_usages[res].in_use < 0]
# Now, let's check the quotas
# NOTE(Vek): We're only concerned about positive increments.
# If a project has gone over quota, we want them to
# be able to reduce their usage without any
# problems.
for key, value in user_usages.items():
if key not in project_usages:
project_usages[key] = value
overs = _calculate_overquota(project_quotas, user_quotas, deltas,
project_usages, user_usages)
# NOTE(Vek): The quota check needs to be in the transaction,
# but the transaction doesn't fail just because
# we're over quota, so the OverQuota raise is
# outside the transaction. If we did the raise
# here, our usage updates would be discarded, but
# they're not invalidated by being over-quota.
# Create the reservations
if not overs:
reservations = []
for res, delta in deltas.items():
reservation = _reservation_create(
str(uuid.uuid4()),
user_usages[res],
project_id,
user_id,
res, delta, expire,
session=session)
reservations.append(reservation.uuid)
# Also update the reserved quantity
# NOTE(Vek): Again, we are only concerned here about
# positive increments. Here, though, we're
# worried about the following scenario:
#
# 1) User initiates resize down.
# 2) User allocates a new instance.
# 3) Resize down fails or is reverted.
# 4) User is now over quota.
#
# To prevent this, we only update the
# reserved value if the delta is positive.
if delta > 0:
user_usages[res].reserved += delta
# Apply updates to the usages table
for usage_ref in user_usages.values():
session.add(usage_ref)
if unders:
LOG.warning(_("Change will make usage less than 0 for the following "
"resources: %s"), unders)
if overs:
_raise_overquota_exception(project_quotas, user_quotas, deltas, overs,
project_usages, user_usages)
return reservations
def _quota_reservations_query(session, context, reservations):
"""Return the relevant reservations."""
# Get the listed reservations
return model_query(context, models.Reservation,
read_deleted="no",
session=session).\
filter(models.Reservation.uuid.in_(reservations)).\
with_lockmode('update')
@require_context
@_retry_on_deadlock
def reservation_commit(context, reservations, project_id=None, user_id=None):
session = get_session()
with session.begin():
_project_usages, user_usages = _get_project_user_quota_usages(
context, session, project_id, user_id)
reservation_query = _quota_reservations_query(session, context,
reservations)
for reservation in reservation_query.all():
usage = user_usages[reservation.resource]
if reservation.delta >= 0:
usage.reserved -= reservation.delta
usage.in_use += reservation.delta
reservation_query.soft_delete(synchronize_session=False)
@require_context
@_retry_on_deadlock
def reservation_rollback(context, reservations, project_id=None, user_id=None):
session = get_session()
with session.begin():
_project_usages, user_usages = _get_project_user_quota_usages(
context, session, project_id, user_id)
reservation_query = _quota_reservations_query(session, context,
reservations)
for reservation in reservation_query.all():
usage = user_usages[reservation.resource]
if reservation.delta >= 0:
usage.reserved -= reservation.delta
reservation_query.soft_delete(synchronize_session=False)
@require_admin_context
def quota_destroy_all_by_project_and_user(context, project_id, user_id):
session = get_session()
with session.begin():
model_query(context, models.ProjectUserQuota, session=session,
read_deleted="no").\
filter_by(project_id=project_id).\
filter_by(user_id=user_id).\
soft_delete(synchronize_session=False)
model_query(context, models.QuotaUsage,
session=session, read_deleted="no").\
filter_by(project_id=project_id).\
filter_by(user_id=user_id).\
soft_delete(synchronize_session=False)
model_query(context, models.Reservation,
session=session, read_deleted="no").\
filter_by(project_id=project_id).\
filter_by(user_id=user_id).\
soft_delete(synchronize_session=False)
@require_admin_context
def quota_destroy_all_by_project(context, project_id):
session = get_session()
with session.begin():
model_query(context, models.Quota, session=session,
read_deleted="no").\
filter_by(project_id=project_id).\
soft_delete(synchronize_session=False)
model_query(context, models.ProjectUserQuota, session=session,
read_deleted="no").\
filter_by(project_id=project_id).\
soft_delete(synchronize_session=False)
model_query(context, models.QuotaUsage,
session=session, read_deleted="no").\
filter_by(project_id=project_id).\
soft_delete(synchronize_session=False)
model_query(context, models.Reservation,
session=session, read_deleted="no").\
filter_by(project_id=project_id).\
soft_delete(synchronize_session=False)
@require_admin_context
@_retry_on_deadlock
def reservation_expire(context):
session = get_session()
with session.begin():
current_time = timeutils.utcnow()
reservation_query = model_query(context, models.Reservation,
session=session, read_deleted="no").\
filter(models.Reservation.expire < current_time)
for reservation in reservation_query.join(models.QuotaUsage).all():
if reservation.delta >= 0:
reservation.usage.reserved -= reservation.delta
session.add(reservation.usage)
reservation_query.soft_delete(synchronize_session=False)
###################
def _ec2_volume_get_query(context, session=None):
return model_query(context, models.VolumeIdMapping,
session=session, read_deleted='yes')
def _ec2_snapshot_get_query(context, session=None):
return model_query(context, models.SnapshotIdMapping,
session=session, read_deleted='yes')
@require_context
def ec2_volume_create(context, volume_uuid, id=None):
"""Create ec2 compatible volume by provided uuid."""
ec2_volume_ref = models.VolumeIdMapping()
ec2_volume_ref.update({'uuid': volume_uuid})
if id is not None:
ec2_volume_ref.update({'id': id})
ec2_volume_ref.save()
return ec2_volume_ref
@require_context
def ec2_volume_get_by_uuid(context, volume_uuid):
result = _ec2_volume_get_query(context).\
filter_by(uuid=volume_uuid).\
first()
if not result:
raise exception.VolumeNotFound(volume_id=volume_uuid)
return result
@require_context
def ec2_volume_get_by_id(context, volume_id):
result = _ec2_volume_get_query(context).\
filter_by(id=volume_id).\
first()
if not result:
raise exception.VolumeNotFound(volume_id=volume_id)
return result
@require_context
def ec2_snapshot_create(context, snapshot_uuid, id=None):
"""Create ec2 compatible snapshot by provided uuid."""
ec2_snapshot_ref = models.SnapshotIdMapping()
ec2_snapshot_ref.update({'uuid': snapshot_uuid})
if id is not None:
ec2_snapshot_ref.update({'id': id})
ec2_snapshot_ref.save()
return ec2_snapshot_ref
@require_context
def ec2_snapshot_get_by_ec2_id(context, ec2_id):
result = _ec2_snapshot_get_query(context).\
filter_by(id=ec2_id).\
first()
if not result:
raise exception.SnapshotNotFound(snapshot_id=ec2_id)
return result
@require_context
def ec2_snapshot_get_by_uuid(context, snapshot_uuid):
result = _ec2_snapshot_get_query(context).\
filter_by(uuid=snapshot_uuid).\
first()
if not result:
raise exception.SnapshotNotFound(snapshot_id=snapshot_uuid)
return result
###################
def _block_device_mapping_get_query(context, session=None,
columns_to_join=None, use_slave=False):
if columns_to_join is None:
columns_to_join = []
query = model_query(context, models.BlockDeviceMapping,
session=session, use_slave=use_slave)
for column in columns_to_join:
query = query.options(joinedload(column))
return query
def _scrub_empty_str_values(dct, keys_to_scrub):
"""Remove any keys found in sequence keys_to_scrub from the dict
if they have the value ''.
"""
for key in keys_to_scrub:
if key in dct and dct[key] == '':
del dct[key]
def _from_legacy_values(values, legacy, allow_updates=False):
if legacy:
if allow_updates and block_device.is_safe_for_update(values):
return values
else:
return block_device.BlockDeviceDict.from_legacy(values)
else:
return values
@require_context
def block_device_mapping_create(context, values, legacy=True):
_scrub_empty_str_values(values, ['volume_size'])
values = _from_legacy_values(values, legacy)
bdm_ref = models.BlockDeviceMapping()
bdm_ref.update(values)
bdm_ref.save()
return bdm_ref
@require_context
def block_device_mapping_update(context, bdm_id, values, legacy=True):
_scrub_empty_str_values(values, ['volume_size'])
values = _from_legacy_values(values, legacy, allow_updates=True)
query = _block_device_mapping_get_query(context).filter_by(id=bdm_id)
query.update(values)
return query.first()
def block_device_mapping_update_or_create(context, values, legacy=True):
_scrub_empty_str_values(values, ['volume_size'])
values = _from_legacy_values(values, legacy, allow_updates=True)
session = get_session()
with session.begin():
result = None
# NOTE(xqueralt): Only update a BDM when device_name was provided. We
# allow empty device names so they will be set later by the manager.
if values['device_name']:
query = _block_device_mapping_get_query(context, session=session)
result = query.filter_by(instance_uuid=values['instance_uuid'],
device_name=values['device_name']).first()
if result:
result.update(values)
else:
# Either the device_name doesn't exist in the database yet, or no
# device_name was provided. Both cases mean creating a new BDM.
result = models.BlockDeviceMapping(**values)
result.save(session=session)
# NOTE(xqueralt): Prevent from having multiple swap devices for the
# same instance. This will delete all the existing ones.
if block_device.new_format_is_swap(values):
query = _block_device_mapping_get_query(context, session=session)
query = query.filter_by(instance_uuid=values['instance_uuid'],
source_type='blank', guest_format='swap')
query = query.filter(models.BlockDeviceMapping.id != result.id)
query.soft_delete()
return result
@require_context
def block_device_mapping_get_all_by_instance(context, instance_uuid,
use_slave=False):
return _block_device_mapping_get_query(context, use_slave=use_slave).\
filter_by(instance_uuid=instance_uuid).\
all()
@require_context
def block_device_mapping_get_by_volume_id(context, volume_id,
columns_to_join=None):
return _block_device_mapping_get_query(context,
columns_to_join=columns_to_join).\
filter_by(volume_id=volume_id).\
first()
@require_context
def block_device_mapping_destroy(context, bdm_id):
_block_device_mapping_get_query(context).\
filter_by(id=bdm_id).\
soft_delete()
@require_context
def block_device_mapping_destroy_by_instance_and_volume(context, instance_uuid,
volume_id):
_block_device_mapping_get_query(context).\
filter_by(instance_uuid=instance_uuid).\
filter_by(volume_id=volume_id).\
soft_delete()
@require_context
def block_device_mapping_destroy_by_instance_and_device(context, instance_uuid,
device_name):
_block_device_mapping_get_query(context).\
filter_by(instance_uuid=instance_uuid).\
filter_by(device_name=device_name).\
soft_delete()
###################
def _security_group_create(context, values, session=None):
security_group_ref = models.SecurityGroup()
# FIXME(devcamcar): Unless I do this, rules fails with lazy load exception
# once save() is called. This will get cleaned up in next orm pass.
security_group_ref.rules
security_group_ref.update(values)
try:
security_group_ref.save(session=session)
except db_exc.DBDuplicateEntry:
raise exception.SecurityGroupExists(
project_id=values['project_id'],
security_group_name=values['name'])
return security_group_ref
def _security_group_get_query(context, session=None, read_deleted=None,
project_only=False, join_rules=True):
query = model_query(context, models.SecurityGroup, session=session,
read_deleted=read_deleted, project_only=project_only)
if join_rules:
query = query.options(joinedload_all('rules.grantee_group'))
return query
def _security_group_get_by_names(context, session, project_id, group_names):
"""Get security group models for a project by a list of names.
Raise SecurityGroupNotFoundForProject for a name not found.
"""
query = _security_group_get_query(context, session=session,
read_deleted="no", join_rules=False).\
filter_by(project_id=project_id).\
filter(models.SecurityGroup.name.in_(group_names))
sg_models = query.all()
if len(sg_models) == len(group_names):
return sg_models
# Find the first one missing and raise
group_names_from_models = [x.name for x in sg_models]
for group_name in group_names:
if group_name not in group_names_from_models:
raise exception.SecurityGroupNotFoundForProject(
project_id=project_id, security_group_id=group_name)
# Not Reached
@require_context
def security_group_get_all(context):
return _security_group_get_query(context).all()
@require_context
def security_group_get(context, security_group_id, columns_to_join=None):
query = _security_group_get_query(context, project_only=True).\
filter_by(id=security_group_id)
if columns_to_join is None:
columns_to_join = []
for column in columns_to_join:
if column.startswith('instances'):
query = query.options(joinedload_all(column))
result = query.first()
if not result:
raise exception.SecurityGroupNotFound(
security_group_id=security_group_id)
return result
@require_context
def security_group_get_by_name(context, project_id, group_name,
columns_to_join=None):
query = _security_group_get_query(context,
read_deleted="no", join_rules=False).\
filter_by(project_id=project_id).\
filter_by(name=group_name)
if columns_to_join is None:
columns_to_join = ['instances', 'rules.grantee_group']
for column in columns_to_join:
query = query.options(joinedload_all(column))
result = query.first()
if not result:
raise exception.SecurityGroupNotFoundForProject(
project_id=project_id, security_group_id=group_name)
return result
@require_context
def security_group_get_by_project(context, project_id):
return _security_group_get_query(context, read_deleted="no").\
filter_by(project_id=project_id).\
all()
@require_context
def security_group_get_by_instance(context, instance_uuid):
return _security_group_get_query(context, read_deleted="no").\
join(models.SecurityGroup.instances).\
filter_by(uuid=instance_uuid).\
all()
@require_context
def security_group_in_use(context, group_id):
session = get_session()
with session.begin():
# Are there any instances that haven't been deleted
# that include this group?
inst_assoc = model_query(context,
models.SecurityGroupInstanceAssociation,
read_deleted="no", session=session).\
filter_by(security_group_id=group_id).\
all()
for ia in inst_assoc:
num_instances = model_query(context, models.Instance,
session=session, read_deleted="no").\
filter_by(uuid=ia.instance_uuid).\
count()
if num_instances:
return True
return False
@require_context
def security_group_create(context, values):
return _security_group_create(context, values)
@require_context
def security_group_update(context, security_group_id, values,
columns_to_join=None):
session = get_session()
with session.begin():
query = model_query(context, models.SecurityGroup,
session=session).filter_by(id=security_group_id)
if columns_to_join:
for column in columns_to_join:
query = query.options(joinedload_all(column))
security_group_ref = query.first()
if not security_group_ref:
raise exception.SecurityGroupNotFound(
security_group_id=security_group_id)
security_group_ref.update(values)
name = security_group_ref['name']
project_id = security_group_ref['project_id']
try:
security_group_ref.save(session=session)
except db_exc.DBDuplicateEntry:
raise exception.SecurityGroupExists(
project_id=project_id,
security_group_name=name)
return security_group_ref
def security_group_ensure_default(context):
"""Ensure default security group exists for a project_id."""
try:
return _security_group_ensure_default(context)
except exception.SecurityGroupExists:
# NOTE(rpodolyaka): a concurrent transaction has succeeded first,
# suppress the error and proceed
return security_group_get_by_name(context, context.project_id,
'default')
def _security_group_ensure_default(context, session=None):
if session is None:
session = get_session()
with session.begin(subtransactions=True):
try:
default_group = _security_group_get_by_names(context,
session,
context.project_id,
['default'])[0]
except exception.NotFound:
values = {'name': 'default',
'description': 'default',
'user_id': context.user_id,
'project_id': context.project_id}
default_group = _security_group_create(context, values,
session=session)
usage = model_query(context, models.QuotaUsage,
read_deleted="no", session=session).\
filter_by(project_id=context.project_id).\
filter_by(user_id=context.user_id).\
filter_by(resource='security_groups')
# Create quota usage for auto created default security group
if not usage.first():
_quota_usage_create(context.project_id,
context.user_id,
'security_groups',
1, 0,
None,
session=session)
else:
usage.update({'in_use': int(usage.first().in_use) + 1})
default_rules = _security_group_rule_get_default_query(context,
session=session).all()
for default_rule in default_rules:
# This is suboptimal, it should be programmatic to know
# the values of the default_rule
rule_values = {'protocol': default_rule.protocol,
'from_port': default_rule.from_port,
'to_port': default_rule.to_port,
'cidr': default_rule.cidr,
'parent_group_id': default_group.id,
}
_security_group_rule_create(context,
rule_values,
session=session)
return default_group
@require_context
def security_group_destroy(context, security_group_id):
session = get_session()
with session.begin():
model_query(context, models.SecurityGroup,
session=session).\
filter_by(id=security_group_id).\
soft_delete()
model_query(context, models.SecurityGroupInstanceAssociation,
session=session).\
filter_by(security_group_id=security_group_id).\
soft_delete()
model_query(context, models.SecurityGroupIngressRule,
session=session).\
filter_by(group_id=security_group_id).\
soft_delete()
model_query(context, models.SecurityGroupIngressRule,
session=session).\
filter_by(parent_group_id=security_group_id).\
soft_delete()
def _security_group_count_by_project_and_user(context, project_id, user_id,
session=None):
nova.context.authorize_project_context(context, project_id)
return model_query(context, models.SecurityGroup, read_deleted="no",
session=session).\
filter_by(project_id=project_id).\
filter_by(user_id=user_id).\
count()
###################
def _security_group_rule_create(context, values, session=None):
security_group_rule_ref = models.SecurityGroupIngressRule()
security_group_rule_ref.update(values)
security_group_rule_ref.save(session=session)
return security_group_rule_ref
def _security_group_rule_get_query(context, session=None):
return model_query(context, models.SecurityGroupIngressRule,
session=session)
@require_context
def security_group_rule_get(context, security_group_rule_id):
result = (_security_group_rule_get_query(context).
filter_by(id=security_group_rule_id).
first())
if not result:
raise exception.SecurityGroupNotFoundForRule(
rule_id=security_group_rule_id)
return result
@require_context
def security_group_rule_get_by_security_group(context, security_group_id,
columns_to_join=None):
if columns_to_join is None:
columns_to_join = ['grantee_group.instances.system_metadata',
'grantee_group.instances.info_cache']
query = (_security_group_rule_get_query(context).
filter_by(parent_group_id=security_group_id))
for column in columns_to_join:
query = query.options(joinedload_all(column))
return query.all()
@require_context
def security_group_rule_get_by_security_group_grantee(context,
security_group_id):
return (_security_group_rule_get_query(context).
filter_by(group_id=security_group_id).
all())
@require_context
def security_group_rule_create(context, values):
return _security_group_rule_create(context, values)
@require_context
def security_group_rule_destroy(context, security_group_rule_id):
count = (_security_group_rule_get_query(context).
filter_by(id=security_group_rule_id).
soft_delete())
if count == 0:
raise exception.SecurityGroupNotFoundForRule(
rule_id=security_group_rule_id)
@require_context
def security_group_rule_count_by_group(context, security_group_id):
return (model_query(context, models.SecurityGroupIngressRule,
read_deleted="no").
filter_by(parent_group_id=security_group_id).
count())
#
###################
def _security_group_rule_get_default_query(context, session=None):
return model_query(context, models.SecurityGroupIngressDefaultRule,
session=session)
@require_context
def security_group_default_rule_get(context, security_group_rule_default_id):
result = _security_group_rule_get_default_query(context).\
filter_by(id=security_group_rule_default_id).\
first()
if not result:
raise exception.SecurityGroupDefaultRuleNotFound(
rule_id=security_group_rule_default_id)
return result
@require_admin_context
def security_group_default_rule_destroy(context,
security_group_rule_default_id):
session = get_session()
with session.begin():
count = _security_group_rule_get_default_query(context,
session=session).\
filter_by(id=security_group_rule_default_id).\
soft_delete()
if count == 0:
raise exception.SecurityGroupDefaultRuleNotFound(
rule_id=security_group_rule_default_id)
@require_admin_context
def security_group_default_rule_create(context, values):
security_group_default_rule_ref = models.SecurityGroupIngressDefaultRule()
security_group_default_rule_ref.update(values)
security_group_default_rule_ref.save()
return security_group_default_rule_ref
@require_context
def security_group_default_rule_list(context):
return _security_group_rule_get_default_query(context).\
all()
###################
@require_admin_context
def provider_fw_rule_create(context, rule):
fw_rule_ref = models.ProviderFirewallRule()
fw_rule_ref.update(rule)
fw_rule_ref.save()
return fw_rule_ref
@require_admin_context
def provider_fw_rule_get_all(context):
return model_query(context, models.ProviderFirewallRule).all()
@require_admin_context
def provider_fw_rule_destroy(context, rule_id):
session = get_session()
with session.begin():
session.query(models.ProviderFirewallRule).\
filter_by(id=rule_id).\
soft_delete()
###################
@require_context
def project_get_networks(context, project_id, associate=True):
# NOTE(tr3buchet): as before this function will associate
# a project with a network if it doesn't have one and
# associate is true
result = model_query(context, models.Network, read_deleted="no").\
filter_by(project_id=project_id).\
all()
if not result:
if not associate:
return []
return [network_associate(context, project_id)]
return result
###################
@require_admin_context
def migration_create(context, values):
migration = models.Migration()
migration.update(values)
migration.save()
return migration
@require_admin_context
def migration_update(context, id, values):
session = get_session()
with session.begin():
migration = _migration_get(context, id, session=session)
migration.update(values)
return migration
def _migration_get(context, id, session=None):
result = model_query(context, models.Migration, session=session,
read_deleted="yes").\
filter_by(id=id).\
first()
if not result:
raise exception.MigrationNotFound(migration_id=id)
return result
@require_admin_context
def migration_get(context, id):
return _migration_get(context, id)
@require_admin_context
def migration_get_by_instance_and_status(context, instance_uuid, status):
result = model_query(context, models.Migration, read_deleted="yes").\
filter_by(instance_uuid=instance_uuid).\
filter_by(status=status).\
first()
if not result:
raise exception.MigrationNotFoundByStatus(instance_id=instance_uuid,
status=status)
return result
@require_admin_context
def migration_get_unconfirmed_by_dest_compute(context, confirm_window,
dest_compute, use_slave=False):
confirm_window = (timeutils.utcnow() -
datetime.timedelta(seconds=confirm_window))
return model_query(context, models.Migration, read_deleted="yes",
use_slave=use_slave).\
filter(models.Migration.updated_at <= confirm_window).\
filter_by(status="finished").\
filter_by(dest_compute=dest_compute).\
all()
@require_admin_context
def migration_get_in_progress_by_host_and_node(context, host, node):
return model_query(context, models.Migration).\
filter(or_(and_(models.Migration.source_compute == host,
models.Migration.source_node == node),
and_(models.Migration.dest_compute == host,
models.Migration.dest_node == node))).\
filter(~models.Migration.status.in_(['confirmed', 'reverted',
'error'])).\
options(joinedload_all('instance.system_metadata')).\
all()
@require_admin_context
def migration_get_all_by_filters(context, filters):
query = model_query(context, models.Migration)
if "status" in filters:
query = query.filter(models.Migration.status == filters["status"])
if "host" in filters:
host = filters["host"]
query = query.filter(or_(models.Migration.source_compute == host,
models.Migration.dest_compute == host))
return query.all()
##################
def console_pool_create(context, values):
pool = models.ConsolePool()
pool.update(values)
try:
pool.save()
except db_exc.DBDuplicateEntry:
raise exception.ConsolePoolExists(
host=values["host"],
console_type=values["console_type"],
compute_host=values["compute_host"],
)
return pool
def console_pool_get_by_host_type(context, compute_host, host,
console_type):
result = model_query(context, models.ConsolePool, read_deleted="no").\
filter_by(host=host).\
filter_by(console_type=console_type).\
filter_by(compute_host=compute_host).\
options(joinedload('consoles')).\
first()
if not result:
raise exception.ConsolePoolNotFoundForHostType(
host=host, console_type=console_type,
compute_host=compute_host)
return result
def console_pool_get_all_by_host_type(context, host, console_type):
return model_query(context, models.ConsolePool, read_deleted="no").\
filter_by(host=host).\
filter_by(console_type=console_type).\
options(joinedload('consoles')).\
all()
def console_create(context, values):
console = models.Console()
console.update(values)
console.save()
return console
def console_delete(context, console_id):
session = get_session()
with session.begin():
# NOTE(mdragon): consoles are meant to be transient.
session.query(models.Console).\
filter_by(id=console_id).\
delete()
def console_get_by_pool_instance(context, pool_id, instance_uuid):
result = model_query(context, models.Console, read_deleted="yes").\
filter_by(pool_id=pool_id).\
filter_by(instance_uuid=instance_uuid).\
options(joinedload('pool')).\
first()
if not result:
raise exception.ConsoleNotFoundInPoolForInstance(
pool_id=pool_id, instance_uuid=instance_uuid)
return result
def console_get_all_by_instance(context, instance_uuid, columns_to_join=None):
query = model_query(context, models.Console, read_deleted="yes").\
filter_by(instance_uuid=instance_uuid)
if columns_to_join:
for column in columns_to_join:
query = query.options(joinedload(column))
return query.all()
def console_get(context, console_id, instance_uuid=None):
query = model_query(context, models.Console, read_deleted="yes").\
filter_by(id=console_id).\
options(joinedload('pool'))
if instance_uuid is not None:
query = query.filter_by(instance_uuid=instance_uuid)
result = query.first()
if not result:
if instance_uuid:
raise exception.ConsoleNotFoundForInstance(
console_id=console_id, instance_uuid=instance_uuid)
else:
raise exception.ConsoleNotFound(console_id=console_id)
return result
##################
@require_admin_context
def flavor_create(context, values, projects=None):
"""Create a new instance type. In order to pass in extra specs,
the values dict should contain a 'extra_specs' key/value pair:
{'extra_specs' : {'k1': 'v1', 'k2': 'v2', ...}}
"""
specs = values.get('extra_specs')
specs_refs = []
if specs:
for k, v in specs.iteritems():
specs_ref = models.InstanceTypeExtraSpecs()
specs_ref['key'] = k
specs_ref['value'] = v
specs_refs.append(specs_ref)
values['extra_specs'] = specs_refs
instance_type_ref = models.InstanceTypes()
instance_type_ref.update(values)
if projects is None:
projects = []
session = get_session()
with session.begin():
try:
instance_type_ref.save()
except db_exc.DBDuplicateEntry as e:
if 'flavorid' in e.columns:
raise exception.FlavorIdExists(flavor_id=values['flavorid'])
raise exception.FlavorExists(name=values['name'])
except Exception as e:
raise db_exc.DBError(e)
for project in set(projects):
access_ref = models.InstanceTypeProjects()
access_ref.update({"instance_type_id": instance_type_ref.id,
"project_id": project})
access_ref.save()
return _dict_with_extra_specs(instance_type_ref)
def _dict_with_extra_specs(inst_type_query):
"""Takes an instance or instance type query returned
by sqlalchemy and returns it as a dictionary, converting the
extra_specs entry from a list of dicts:
'extra_specs' : [{'key': 'k1', 'value': 'v1', ...}, ...]
to a single dict:
'extra_specs' : {'k1': 'v1'}
"""
inst_type_dict = dict(inst_type_query)
extra_specs = dict([(x['key'], x['value'])
for x in inst_type_query['extra_specs']])
inst_type_dict['extra_specs'] = extra_specs
return inst_type_dict
def _flavor_get_query(context, session=None, read_deleted=None):
query = model_query(context, models.InstanceTypes, session=session,
read_deleted=read_deleted).\
options(joinedload('extra_specs'))
if not context.is_admin:
the_filter = [models.InstanceTypes.is_public == true()]
the_filter.extend([
models.InstanceTypes.projects.any(project_id=context.project_id)
])
query = query.filter(or_(*the_filter))
return query
@require_context
def flavor_get_all(context, inactive=False, filters=None,
sort_key='flavorid', sort_dir='asc', limit=None,
marker=None):
"""Returns all flavors.
"""
filters = filters or {}
# FIXME(sirp): now that we have the `disabled` field for flavors, we
# should probably remove the use of `deleted` to mark inactive. `deleted`
# should mean truly deleted, e.g. we can safely purge the record out of the
# database.
read_deleted = "yes" if inactive else "no"
query = _flavor_get_query(context, read_deleted=read_deleted)
if 'min_memory_mb' in filters:
query = query.filter(
models.InstanceTypes.memory_mb >= filters['min_memory_mb'])
if 'min_root_gb' in filters:
query = query.filter(
models.InstanceTypes.root_gb >= filters['min_root_gb'])
if 'disabled' in filters:
query = query.filter(
models.InstanceTypes.disabled == filters['disabled'])
if 'is_public' in filters and filters['is_public'] is not None:
the_filter = [models.InstanceTypes.is_public == filters['is_public']]
# if filters['is_public'] and context.project_id is not None:
# the_filter.extend([
# models.InstanceTypes.projects.any(
# project_id=context.project_id, deleted=0)
# ])
if len(the_filter) > 1:
query = query.filter(or_(*the_filter))
else:
query = query.filter(the_filter[0])
marker_row = None
if marker is not None:
marker_row = _flavor_get_query(context, read_deleted=read_deleted).\
filter_by(flavorid=marker).\
first()
if not marker_row:
raise exception.MarkerNotFound(marker)
# query = sqlalchemyutils.paginate_query(query, models.InstanceTypes, limit,
# [sort_key, 'id'],
# marker=marker_row,
# sort_dir=sort_dir)
query = RiakModelQuery(models.InstanceTypes)
inst_types = query.all()
return [_dict_with_extra_specs(i) for i in inst_types]
def _flavor_get_id_from_flavor_query(context, flavor_id, session=None):
return model_query(context, models.InstanceTypes.id, read_deleted="no",
session=session, base_model=models.InstanceTypes).\
filter_by(flavorid=flavor_id)
def _flavor_get_id_from_flavor(context, flavor_id, session=None):
result = _flavor_get_id_from_flavor_query(context, flavor_id,
session=session).\
first()
if not result:
raise exception.FlavorNotFound(flavor_id=flavor_id)
return result[0]
@require_context
def flavor_get(context, id):
"""Returns a dict describing specific flavor."""
result = _flavor_get_query(context).\
filter_by(id=id).\
first()
if not result:
raise exception.FlavorNotFound(flavor_id=id)
return _dict_with_extra_specs(result)
@require_context
def flavor_get_by_name(context, name):
"""Returns a dict describing specific flavor."""
result = _flavor_get_query(context).\
filter_by(name=name).\
first()
if not result:
raise exception.FlavorNotFoundByName(flavor_name=name)
return _dict_with_extra_specs(result)
@require_context
def flavor_get_by_flavor_id(context, flavor_id, read_deleted):
"""Returns a dict describing specific flavor_id."""
result = _flavor_get_query(context, read_deleted=read_deleted).\
filter_by(flavorid=flavor_id).\
order_by(asc("deleted"), asc("id")).\
first()
if not result:
raise exception.FlavorNotFound(flavor_id=flavor_id)
return _dict_with_extra_specs(result)
@require_admin_context
def flavor_destroy(context, name):
"""Marks specific flavor as deleted."""
session = get_session()
with session.begin():
ref = model_query(context, models.InstanceTypes, session=session,
read_deleted="no").\
filter_by(name=name).\
first()
if not ref:
raise exception.FlavorNotFoundByName(flavor_name=name)
ref.soft_delete(session=session)
model_query(context, models.InstanceTypeExtraSpecs,
session=session, read_deleted="no").\
filter_by(instance_type_id=ref['id']).\
soft_delete()
model_query(context, models.InstanceTypeProjects,
session=session, read_deleted="no").\
filter_by(instance_type_id=ref['id']).\
soft_delete()
def _flavor_access_query(context, session=None):
return model_query(context, models.InstanceTypeProjects, session=session,
read_deleted="no")
@require_admin_context
def flavor_access_get_by_flavor_id(context, flavor_id):
"""Get flavor access list by flavor id."""
instance_type_id_subq = \
_flavor_get_id_from_flavor_query(context, flavor_id)
access_refs = _flavor_access_query(context).\
filter_by(instance_type_id=instance_type_id_subq).\
all()
return access_refs
@require_admin_context
def flavor_access_add(context, flavor_id, project_id):
"""Add given tenant to the flavor access list."""
instance_type_id = _flavor_get_id_from_flavor(context, flavor_id)
access_ref = models.InstanceTypeProjects()
access_ref.update({"instance_type_id": instance_type_id,
"project_id": project_id})
try:
access_ref.save()
except db_exc.DBDuplicateEntry:
raise exception.FlavorAccessExists(flavor_id=flavor_id,
project_id=project_id)
return access_ref
@require_admin_context
def flavor_access_remove(context, flavor_id, project_id):
"""Remove given tenant from the flavor access list."""
instance_type_id = _flavor_get_id_from_flavor(context, flavor_id)
count = _flavor_access_query(context).\
filter_by(instance_type_id=instance_type_id).\
filter_by(project_id=project_id).\
soft_delete(synchronize_session=False)
if count == 0:
raise exception.FlavorAccessNotFound(flavor_id=flavor_id,
project_id=project_id)
def _flavor_extra_specs_get_query(context, flavor_id, session=None):
instance_type_id_subq = \
_flavor_get_id_from_flavor_query(context, flavor_id)
return model_query(context, models.InstanceTypeExtraSpecs, session=session,
read_deleted="no").\
filter_by(instance_type_id=instance_type_id_subq)
@require_context
def flavor_extra_specs_get(context, flavor_id):
rows = _flavor_extra_specs_get_query(context, flavor_id).all()
return dict([(row['key'], row['value']) for row in rows])
@require_context
def flavor_extra_specs_get_item(context, flavor_id, key):
result = _flavor_extra_specs_get_query(context, flavor_id).\
filter(models.InstanceTypeExtraSpecs.key == key).\
first()
if not result:
raise exception.FlavorExtraSpecsNotFound(
extra_specs_key=key, flavor_id=flavor_id)
return {result["key"]: result["value"]}
@require_context
def flavor_extra_specs_delete(context, flavor_id, key):
result = _flavor_extra_specs_get_query(context, flavor_id).\
filter(models.InstanceTypeExtraSpecs.key == key).\
soft_delete(synchronize_session=False)
# did not find the extra spec
if result == 0:
raise exception.FlavorExtraSpecsNotFound(
extra_specs_key=key, flavor_id=flavor_id)
@require_context
def flavor_extra_specs_update_or_create(context, flavor_id, specs,
max_retries=10):
for attempt in xrange(max_retries):
try:
session = get_session()
with session.begin():
instance_type_id = _flavor_get_id_from_flavor(context,
flavor_id, session)
spec_refs = model_query(context, models.InstanceTypeExtraSpecs,
session=session, read_deleted="no").\
filter_by(instance_type_id=instance_type_id).\
filter(models.InstanceTypeExtraSpecs.key.in_(specs.keys())).\
all()
existing_keys = set()
for spec_ref in spec_refs:
key = spec_ref["key"]
existing_keys.add(key)
spec_ref.update({"value": specs[key]})
for key, value in specs.iteritems():
if key in existing_keys:
continue
spec_ref = models.InstanceTypeExtraSpecs()
spec_ref.update({"key": key, "value": value,
"instance_type_id": instance_type_id})
session.add(spec_ref)
return specs
except db_exc.DBDuplicateEntry:
# a concurrent transaction has been committed,
# try again unless this was the last attempt
if attempt == max_retries - 1:
raise exception.FlavorExtraSpecUpdateCreateFailed(
id=flavor_id, retries=max_retries)
####################
@require_admin_context
def cell_create(context, values):
cell = models.Cell()
cell.update(values)
try:
cell.save()
except db_exc.DBDuplicateEntry:
raise exception.CellExists(name=values['name'])
return cell
def _cell_get_by_name_query(context, cell_name, session=None):
return model_query(context, models.Cell,
session=session).filter_by(name=cell_name)
@require_admin_context
def cell_update(context, cell_name, values):
session = get_session()
with session.begin():
cell_query = _cell_get_by_name_query(context, cell_name,
session=session)
if not cell_query.update(values):
raise exception.CellNotFound(cell_name=cell_name)
cell = cell_query.first()
return cell
@require_admin_context
def cell_delete(context, cell_name):
return _cell_get_by_name_query(context, cell_name).soft_delete()
@require_admin_context
def cell_get(context, cell_name):
result = _cell_get_by_name_query(context, cell_name).first()
if not result:
raise exception.CellNotFound(cell_name=cell_name)
return result
@require_admin_context
def cell_get_all(context):
return model_query(context, models.Cell, read_deleted="no").all()
########################
# User-provided metadata
def _instance_metadata_get_multi(context, instance_uuids,
session=None, use_slave=False):
if not instance_uuids:
return []
return model_query(context, models.InstanceMetadata,
session=session, use_slave=use_slave).\
filter(
models.InstanceMetadata.instance_uuid.in_(instance_uuids))
def _instance_metadata_get_query(context, instance_uuid, session=None):
return model_query(context, models.InstanceMetadata, session=session,
read_deleted="no").\
filter_by(instance_uuid=instance_uuid)
@require_context
def instance_metadata_get(context, instance_uuid):
rows = _instance_metadata_get_query(context, instance_uuid).all()
return dict((row['key'], row['value']) for row in rows)
@require_context
@_retry_on_deadlock
def instance_metadata_delete(context, instance_uuid, key):
_instance_metadata_get_query(context, instance_uuid).\
filter_by(key=key).\
soft_delete()
@require_context
@_retry_on_deadlock
def instance_metadata_update(context, instance_uuid, metadata, delete):
all_keys = metadata.keys()
session = get_session()
with session.begin(subtransactions=True):
if delete:
_instance_metadata_get_query(context, instance_uuid,
session=session).\
filter(~models.InstanceMetadata.key.in_(all_keys)).\
soft_delete(synchronize_session=False)
already_existing_keys = []
meta_refs = _instance_metadata_get_query(context, instance_uuid,
session=session).\
filter(models.InstanceMetadata.key.in_(all_keys)).\
all()
for meta_ref in meta_refs:
already_existing_keys.append(meta_ref.key)
meta_ref.update({"value": metadata[meta_ref.key]})
new_keys = set(all_keys) - set(already_existing_keys)
for key in new_keys:
meta_ref = models.InstanceMetadata()
meta_ref.update({"key": key, "value": metadata[key],
"instance_uuid": instance_uuid})
session.add(meta_ref)
return metadata
#######################
# System-owned metadata
def _instance_system_metadata_get_multi(context, instance_uuids,
session=None, use_slave=False):
print("[DEBUG] from _instance_system_metadata_get_multi")
print("[DEBUG] instance_uuids => %s" % (instance_uuids))
result = []
for instance_uuid in instance_uuids:
query = model_query(context, models.InstanceSystemMetadata,
session=session, use_slave=use_slave).\
filter(models.InstanceSystemMetadata.instance_uuid==instance_uuid)
result += query.all()
return result
# if not instance_uuids:
# return []
# return model_query(context, models.InstanceSystemMetadata,
# session=session, use_slave=use_slave).\
# filter(
# models.InstanceSystemMetadata.instance_uuid.in_(instance_uuids))
def _instance_system_metadata_get_query(context, instance_uuid, session=None):
return model_query(context, models.InstanceSystemMetadata,
session=session).\
filter_by(instance_uuid=instance_uuid)
@require_context
def instance_system_metadata_get(context, instance_uuid):
rows = _instance_system_metadata_get_query(context, instance_uuid).all()
return dict((row['key'], row['value']) for row in rows)
@require_context
def instance_system_metadata_update(context, instance_uuid, metadata, delete):
all_keys = metadata.keys()
session = get_session()
with session.begin(subtransactions=True):
if delete:
_instance_system_metadata_get_query(context, instance_uuid,
session=session).\
filter(~models.InstanceSystemMetadata.key.in_(all_keys)).\
soft_delete(synchronize_session=False)
already_existing_keys = []
meta_refs = _instance_system_metadata_get_query(context, instance_uuid,
session=session).\
filter(models.InstanceSystemMetadata.key.in_(all_keys)).\
all()
for meta_ref in meta_refs:
already_existing_keys.append(meta_ref.key)
meta_ref.update({"value": metadata[meta_ref.key]})
new_keys = set(all_keys) - set(already_existing_keys)
for key in new_keys:
meta_ref = models.InstanceSystemMetadata()
meta_ref.update({"key": key, "value": metadata[key],
"instance_uuid": instance_uuid})
session.add(meta_ref)
return metadata
####################
@require_admin_context
def agent_build_create(context, values):
agent_build_ref = models.AgentBuild()
agent_build_ref.update(values)
try:
agent_build_ref.save()
except db_exc.DBDuplicateEntry:
raise exception.AgentBuildExists(hypervisor=values['hypervisor'],
os=values['os'], architecture=values['architecture'])
return agent_build_ref
@require_admin_context
def agent_build_get_by_triple(context, hypervisor, os, architecture):
return model_query(context, models.AgentBuild, read_deleted="no").\
filter_by(hypervisor=hypervisor).\
filter_by(os=os).\
filter_by(architecture=architecture).\
first()
@require_admin_context
def agent_build_get_all(context, hypervisor=None):
if hypervisor:
return model_query(context, models.AgentBuild, read_deleted="no").\
filter_by(hypervisor=hypervisor).\
all()
else:
return model_query(context, models.AgentBuild, read_deleted="no").\
all()
@require_admin_context
def agent_build_destroy(context, agent_build_id):
rows_affected = model_query(context, models.AgentBuild).filter_by(
id=agent_build_id).soft_delete()
if rows_affected == 0:
raise exception.AgentBuildNotFound(id=agent_build_id)
@require_admin_context
def agent_build_update(context, agent_build_id, values):
rows_affected = model_query(context, models.AgentBuild).\
filter_by(id=agent_build_id).\
update(values)
if rows_affected == 0:
raise exception.AgentBuildNotFound(id=agent_build_id)
####################
@require_context
def bw_usage_get(context, uuid, start_period, mac, use_slave=False):
return model_query(context, models.BandwidthUsage, read_deleted="yes",
use_slave=use_slave).\
filter_by(start_period=start_period).\
filter_by(uuid=uuid).\
filter_by(mac=mac).\
first()
@require_context
def bw_usage_get_by_uuids(context, uuids, start_period, use_slave=False):
return (
model_query(context, models.BandwidthUsage, read_deleted="yes",
use_slave=use_slave).
filter(models.BandwidthUsage.uuid.in_(uuids)).
filter_by(start_period=start_period).
all()
)
@require_context
@_retry_on_deadlock
def bw_usage_update(context, uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed=None):
session = get_session()
if last_refreshed is None:
last_refreshed = timeutils.utcnow()
# NOTE(comstud): More often than not, we'll be updating records vs
# creating records. Optimize accordingly, trying to update existing
# records. Fall back to creation when no rows are updated.
with session.begin():
values = {'last_refreshed': last_refreshed,
'last_ctr_in': last_ctr_in,
'last_ctr_out': last_ctr_out,
'bw_in': bw_in,
'bw_out': bw_out}
rows = model_query(context, models.BandwidthUsage,
session=session, read_deleted="yes").\
filter_by(start_period=start_period).\
filter_by(uuid=uuid).\
filter_by(mac=mac).\
update(values, synchronize_session=False)
if rows:
return
bwusage = models.BandwidthUsage()
bwusage.start_period = start_period
bwusage.uuid = uuid
bwusage.mac = mac
bwusage.last_refreshed = last_refreshed
bwusage.bw_in = bw_in
bwusage.bw_out = bw_out
bwusage.last_ctr_in = last_ctr_in
bwusage.last_ctr_out = last_ctr_out
try:
bwusage.save(session=session)
except db_exc.DBDuplicateEntry:
# NOTE(sirp): Possible race if two greenthreads attempt to create
# the usage entry at the same time. First one wins.
pass
####################
@require_context
def vol_get_usage_by_time(context, begin):
"""Return volumes usage that have been updated after a specified time."""
return model_query(context, models.VolumeUsage, read_deleted="yes").\
filter(or_(models.VolumeUsage.tot_last_refreshed == null(),
models.VolumeUsage.tot_last_refreshed > begin,
models.VolumeUsage.curr_last_refreshed == null(),
models.VolumeUsage.curr_last_refreshed > begin,
)).\
all()
@require_context
def vol_usage_update(context, id, rd_req, rd_bytes, wr_req, wr_bytes,
instance_id, project_id, user_id, availability_zone,
update_totals=False):
session = get_session()
refreshed = timeutils.utcnow()
with session.begin():
values = {}
# NOTE(dricco): We will be mostly updating current usage records vs
# updating total or creating records. Optimize accordingly.
if not update_totals:
values = {'curr_last_refreshed': refreshed,
'curr_reads': rd_req,
'curr_read_bytes': rd_bytes,
'curr_writes': wr_req,
'curr_write_bytes': wr_bytes,
'instance_uuid': instance_id,
'project_id': project_id,
'user_id': user_id,
'availability_zone': availability_zone}
else:
values = {'tot_last_refreshed': refreshed,
'tot_reads': models.VolumeUsage.tot_reads + rd_req,
'tot_read_bytes': models.VolumeUsage.tot_read_bytes +
rd_bytes,
'tot_writes': models.VolumeUsage.tot_writes + wr_req,
'tot_write_bytes': models.VolumeUsage.tot_write_bytes +
wr_bytes,
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'instance_uuid': instance_id,
'project_id': project_id,
'user_id': user_id,
'availability_zone': availability_zone}
current_usage = model_query(context, models.VolumeUsage,
session=session, read_deleted="yes").\
filter_by(volume_id=id).\
first()
if current_usage:
if (rd_req < current_usage['curr_reads'] or
rd_bytes < current_usage['curr_read_bytes'] or
wr_req < current_usage['curr_writes'] or
wr_bytes < current_usage['curr_write_bytes']):
LOG.info(_("Volume(%s) has lower stats then what is in "
"the database. Instance must have been rebooted "
"or crashed. Updating totals.") % id)
if not update_totals:
values['tot_reads'] = (models.VolumeUsage.tot_reads +
current_usage['curr_reads'])
values['tot_read_bytes'] = (
models.VolumeUsage.tot_read_bytes +
current_usage['curr_read_bytes'])
values['tot_writes'] = (models.VolumeUsage.tot_writes +
current_usage['curr_writes'])
values['tot_write_bytes'] = (
models.VolumeUsage.tot_write_bytes +
current_usage['curr_write_bytes'])
else:
values['tot_reads'] = (models.VolumeUsage.tot_reads +
current_usage['curr_reads'] +
rd_req)
values['tot_read_bytes'] = (
models.VolumeUsage.tot_read_bytes +
current_usage['curr_read_bytes'] + rd_bytes)
values['tot_writes'] = (models.VolumeUsage.tot_writes +
current_usage['curr_writes'] +
wr_req)
values['tot_write_bytes'] = (
models.VolumeUsage.tot_write_bytes +
current_usage['curr_write_bytes'] + wr_bytes)
current_usage.update(values)
current_usage.save(session=session)
session.refresh(current_usage)
return current_usage
vol_usage = models.VolumeUsage()
vol_usage.volume_id = id
vol_usage.instance_uuid = instance_id
vol_usage.project_id = project_id
vol_usage.user_id = user_id
vol_usage.availability_zone = availability_zone
if not update_totals:
vol_usage.curr_last_refreshed = refreshed
vol_usage.curr_reads = rd_req
vol_usage.curr_read_bytes = rd_bytes
vol_usage.curr_writes = wr_req
vol_usage.curr_write_bytes = wr_bytes
else:
vol_usage.tot_last_refreshed = refreshed
vol_usage.tot_reads = rd_req
vol_usage.tot_read_bytes = rd_bytes
vol_usage.tot_writes = wr_req
vol_usage.tot_write_bytes = wr_bytes
vol_usage.save(session=session)
return vol_usage
####################
def s3_image_get(context, image_id):
"""Find local s3 image represented by the provided id."""
result = model_query(context, models.S3Image, read_deleted="yes").\
filter_by(id=image_id).\
first()
if not result:
raise exception.ImageNotFound(image_id=image_id)
return result
def s3_image_get_by_uuid(context, image_uuid):
"""Find local s3 image represented by the provided uuid."""
result = model_query(context, models.S3Image, read_deleted="yes").\
filter_by(uuid=image_uuid).\
first()
if not result:
raise exception.ImageNotFound(image_id=image_uuid)
return result
def s3_image_create(context, image_uuid):
"""Create local s3 image represented by provided uuid."""
try:
s3_image_ref = models.S3Image()
s3_image_ref.update({'uuid': image_uuid})
s3_image_ref.save()
except Exception as e:
raise db_exc.DBError(e)
return s3_image_ref
####################
def _aggregate_get_query(context, model_class, id_field=None, id=None,
session=None, read_deleted=None):
columns_to_join = {models.Aggregate: ['_hosts', '_metadata']}
query = model_query(context, model_class, session=session,
read_deleted=read_deleted)
for c in columns_to_join.get(model_class, []):
query = query.options(joinedload(c))
if id and id_field:
query = query.filter(id_field == id)
return query
def aggregate_create(context, values, metadata=None):
session = get_session()
query = _aggregate_get_query(context,
models.Aggregate,
models.Aggregate.name,
values['name'],
session=session,
read_deleted='no')
aggregate = query.first()
if not aggregate:
aggregate = models.Aggregate()
aggregate.update(values)
aggregate.save(session=session)
# We don't want these to be lazy loaded later. We know there is
# nothing here since we just created this aggregate.
aggregate._hosts = []
aggregate._metadata = []
else:
raise exception.AggregateNameExists(aggregate_name=values['name'])
if metadata:
aggregate_metadata_add(context, aggregate.id, metadata)
return aggregate_get(context, aggregate.id)
def aggregate_get(context, aggregate_id):
query = _aggregate_get_query(context,
models.Aggregate,
models.Aggregate.id,
aggregate_id)
aggregate = query.first()
if not aggregate:
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
return aggregate
def aggregate_get_by_host(context, host, key=None):
"""Return rows that match host (mandatory) and metadata key (optional).
:param host matches host, and is required.
:param key Matches metadata key, if not None.
"""
query = model_query(context, models.Aggregate)
query = query.options(joinedload('_hosts'))
query = query.options(joinedload('_metadata'))
query = query.join('_hosts')
query = query.filter(models.AggregateHost.host == host)
if key:
query = query.join("_metadata").filter(
models.AggregateMetadata.key == key)
return query.all()
def aggregate_metadata_get_by_host(context, host, key=None):
query = model_query(context, models.Aggregate)
query = query.join("_hosts")
query = query.join("_metadata")
query = query.filter(models.AggregateHost.host == host)
query = query.options(contains_eager("_metadata"))
if key:
query = query.filter(models.AggregateMetadata.key == key)
rows = query.all()
metadata = collections.defaultdict(set)
for agg in rows:
for kv in agg._metadata:
metadata[kv['key']].add(kv['value'])
return dict(metadata)
def aggregate_metadata_get_by_metadata_key(context, aggregate_id, key):
query = model_query(context, models.Aggregate)
query = query.join("_metadata")
query = query.filter(models.Aggregate.id == aggregate_id)
query = query.options(contains_eager("_metadata"))
query = query.filter(models.AggregateMetadata.key == key)
rows = query.all()
metadata = collections.defaultdict(set)
for agg in rows:
for kv in agg._metadata:
metadata[kv['key']].add(kv['value'])
return dict(metadata)
def aggregate_host_get_by_metadata_key(context, key):
rows = aggregate_get_by_metadata_key(context, key)
metadata = collections.defaultdict(set)
for agg in rows:
for agghost in agg._hosts:
metadata[agghost.host].add(agg._metadata[0]['value'])
return dict(metadata)
def aggregate_get_by_metadata_key(context, key):
"""Return rows that match metadata key.
:param key Matches metadata key.
"""
query = model_query(context, models.Aggregate)
query = query.join("_metadata")
query = query.filter(models.AggregateMetadata.key == key)
query = query.options(contains_eager("_metadata"))
query = query.options(joinedload("_hosts"))
return query.all()
def aggregate_update(context, aggregate_id, values):
session = get_session()
if "name" in values:
aggregate_by_name = (_aggregate_get_query(context,
models.Aggregate,
models.Aggregate.name,
values['name'],
session=session,
read_deleted='no').first())
if aggregate_by_name and aggregate_by_name.id != aggregate_id:
# there is another aggregate with the new name
raise exception.AggregateNameExists(aggregate_name=values['name'])
aggregate = (_aggregate_get_query(context,
models.Aggregate,
models.Aggregate.id,
aggregate_id,
session=session).first())
set_delete = True
if aggregate:
if "availability_zone" in values:
az = values.pop('availability_zone')
if 'metadata' not in values:
values['metadata'] = {'availability_zone': az}
set_delete = False
else:
values['metadata']['availability_zone'] = az
metadata = values.get('metadata')
if metadata is not None:
aggregate_metadata_add(context,
aggregate_id,
values.pop('metadata'),
set_delete=set_delete)
aggregate.update(values)
aggregate.save(session=session)
values['metadata'] = metadata
return aggregate_get(context, aggregate.id)
else:
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
def aggregate_delete(context, aggregate_id):
session = get_session()
with session.begin():
count = _aggregate_get_query(context,
models.Aggregate,
models.Aggregate.id,
aggregate_id,
session=session).\
soft_delete()
if count == 0:
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
# Delete Metadata
model_query(context,
models.AggregateMetadata, session=session).\
filter_by(aggregate_id=aggregate_id).\
soft_delete()
def aggregate_get_all(context):
return _aggregate_get_query(context, models.Aggregate).all()
def _aggregate_metadata_get_query(context, aggregate_id, session=None,
read_deleted="yes"):
return model_query(context,
models.AggregateMetadata,
read_deleted=read_deleted,
session=session).\
filter_by(aggregate_id=aggregate_id)
@require_aggregate_exists
def aggregate_metadata_get(context, aggregate_id):
rows = model_query(context,
models.AggregateMetadata).\
filter_by(aggregate_id=aggregate_id).all()
return dict([(r['key'], r['value']) for r in rows])
@require_aggregate_exists
def aggregate_metadata_delete(context, aggregate_id, key):
count = _aggregate_get_query(context,
models.AggregateMetadata,
models.AggregateMetadata.aggregate_id,
aggregate_id).\
filter_by(key=key).\
soft_delete()
if count == 0:
raise exception.AggregateMetadataNotFound(aggregate_id=aggregate_id,
metadata_key=key)
@require_aggregate_exists
def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False,
max_retries=10):
all_keys = metadata.keys()
for attempt in xrange(max_retries):
try:
session = get_session()
with session.begin():
query = _aggregate_metadata_get_query(context, aggregate_id,
read_deleted='no',
session=session)
if set_delete:
query.filter(~models.AggregateMetadata.key.in_(all_keys)).\
soft_delete(synchronize_session=False)
query = \
query.filter(models.AggregateMetadata.key.in_(all_keys))
already_existing_keys = set()
for meta_ref in query.all():
key = meta_ref.key
meta_ref.update({"value": metadata[key]})
already_existing_keys.add(key)
new_entries = []
for key, value in metadata.iteritems():
if key in already_existing_keys:
continue
new_entries.append({"key": key,
"value": value,
"aggregate_id": aggregate_id})
if new_entries:
session.execute(
models.AggregateMetadata.__table__.insert(),
new_entries)
return metadata
except db_exc.DBDuplicateEntry:
# a concurrent transaction has been committed,
# try again unless this was the last attempt
with excutils.save_and_reraise_exception() as ctxt:
if attempt < max_retries - 1:
ctxt.reraise = False
else:
msg = _("Add metadata failed for aggregate %(id)s after "
"%(retries)s retries") % {"id": aggregate_id,
"retries": max_retries}
LOG.warn(msg)
@require_aggregate_exists
def aggregate_host_get_all(context, aggregate_id):
rows = model_query(context,
models.AggregateHost).\
filter_by(aggregate_id=aggregate_id).all()
return [r.host for r in rows]
@require_aggregate_exists
def aggregate_host_delete(context, aggregate_id, host):
count = _aggregate_get_query(context,
models.AggregateHost,
models.AggregateHost.aggregate_id,
aggregate_id).\
filter_by(host=host).\
soft_delete()
if count == 0:
raise exception.AggregateHostNotFound(aggregate_id=aggregate_id,
host=host)
@require_aggregate_exists
def aggregate_host_add(context, aggregate_id, host):
host_ref = models.AggregateHost()
host_ref.update({"host": host, "aggregate_id": aggregate_id})
try:
host_ref.save()
except db_exc.DBDuplicateEntry:
raise exception.AggregateHostExists(host=host,
aggregate_id=aggregate_id)
return host_ref
################
def instance_fault_create(context, values):
"""Create a new InstanceFault."""
fault_ref = models.InstanceFault()
fault_ref.update(values)
fault_ref.save()
return dict(fault_ref.iteritems())
def instance_fault_get_by_instance_uuids(context, instance_uuids):
"""Get all instance faults for the provided instance_uuids."""
if not instance_uuids:
return {}
rows = model_query(context, models.InstanceFault, read_deleted='no').\
filter(models.InstanceFault.instance_uuid.in_(
instance_uuids)).\
order_by(desc("created_at"), desc("id")).\
all()
output = {}
for instance_uuid in instance_uuids:
output[instance_uuid] = []
for row in rows:
data = dict(row.iteritems())
output[row['instance_uuid']].append(data)
return output
##################
def action_start(context, values):
convert_objects_related_datetimes(values, 'start_time')
action_ref = models.InstanceAction()
action_ref.update(values)
action_ref.save()
return action_ref
def action_finish(context, values):
convert_objects_related_datetimes(values, 'start_time', 'finish_time')
session = get_session()
with session.begin():
action_ref = model_query(context, models.InstanceAction,
session=session).\
filter_by(instance_uuid=values['instance_uuid']).\
filter_by(request_id=values['request_id']).\
first()
if not action_ref:
raise exception.InstanceActionNotFound(
request_id=values['request_id'],
instance_uuid=values['instance_uuid'])
action_ref.update(values)
return action_ref
def actions_get(context, instance_uuid):
"""Get all instance actions for the provided uuid."""
actions = model_query(context, models.InstanceAction).\
filter_by(instance_uuid=instance_uuid).\
order_by(desc("created_at"), desc("id")).\
all()
return actions
def action_get_by_request_id(context, instance_uuid, request_id):
"""Get the action by request_id and given instance."""
action = _action_get_by_request_id(context, instance_uuid, request_id)
return action
def _action_get_by_request_id(context, instance_uuid, request_id,
session=None):
result = model_query(context, models.InstanceAction, session=session).\
filter_by(instance_uuid=instance_uuid).\
filter_by(request_id=request_id).\
first()
return result
def action_event_start(context, values):
"""Start an event on an instance action."""
convert_objects_related_datetimes(values, 'start_time')
session = get_session()
with session.begin():
action = _action_get_by_request_id(context, values['instance_uuid'],
values['request_id'], session)
if not action:
raise exception.InstanceActionNotFound(
request_id=values['request_id'],
instance_uuid=values['instance_uuid'])
values['action_id'] = action['id']
event_ref = models.InstanceActionEvent()
event_ref.update(values)
session.add(event_ref)
return event_ref
def action_event_finish(context, values):
"""Finish an event on an instance action."""
convert_objects_related_datetimes(values, 'start_time', 'finish_time')
session = get_session()
with session.begin():
action = _action_get_by_request_id(context, values['instance_uuid'],
values['request_id'], session)
if not action:
raise exception.InstanceActionNotFound(
request_id=values['request_id'],
instance_uuid=values['instance_uuid'])
event_ref = model_query(context, models.InstanceActionEvent,
session=session).\
filter_by(action_id=action['id']).\
filter_by(event=values['event']).\
first()
if not event_ref:
raise exception.InstanceActionEventNotFound(action_id=action['id'],
event=values['event'])
event_ref.update(values)
if values['result'].lower() == 'error':
action.update({'message': 'Error'})
return event_ref
def action_events_get(context, action_id):
events = model_query(context, models.InstanceActionEvent).\
filter_by(action_id=action_id).\
order_by(desc("created_at"), desc("id")).\
all()
return events
def action_event_get_by_id(context, action_id, event_id):
event = model_query(context, models.InstanceActionEvent).\
filter_by(action_id=action_id).\
filter_by(id=event_id).\
first()
return event
##################
@require_context
def ec2_instance_create(context, instance_uuid, id=None):
"""Create ec2 compatible instance by provided uuid."""
ec2_instance_ref = models.InstanceIdMapping()
ec2_instance_ref.update({'uuid': instance_uuid}, do_save=False)
if id is not None:
ec2_instance_ref.update({'id': id}, do_save=False)
ec2_instance_ref.save()
return ec2_instance_ref
@require_context
def ec2_instance_get_by_uuid(context, instance_uuid):
result = _ec2_instance_get_query(context).\
filter_by(uuid=instance_uuid).\
first()
if not result:
raise exception.InstanceNotFound(instance_id=instance_uuid)
return result
@require_context
def get_ec2_instance_id_by_uuid(context, instance_id):
result = ec2_instance_get_by_uuid(context, instance_id)
return result['id']
@require_context
def ec2_instance_get_by_id(context, instance_id):
result = _ec2_instance_get_query(context).\
filter_by(id=instance_id).\
first()
if not result:
raise exception.InstanceNotFound(instance_id=instance_id)
return result
@require_context
def get_instance_uuid_by_ec2_id(context, ec2_id):
result = ec2_instance_get_by_id(context, ec2_id)
return result['uuid']
def _ec2_instance_get_query(context, session=None):
return model_query(context,
models.InstanceIdMapping,
session=session,
read_deleted='yes')
def _task_log_get_query(context, task_name, period_beginning,
period_ending, host=None, state=None, session=None):
query = model_query(context, models.TaskLog, session=session).\
filter_by(task_name=task_name).\
filter_by(period_beginning=period_beginning).\
filter_by(period_ending=period_ending)
if host is not None:
query = query.filter_by(host=host)
if state is not None:
query = query.filter_by(state=state)
return query
@require_admin_context
def task_log_get(context, task_name, period_beginning, period_ending, host,
state=None):
return _task_log_get_query(context, task_name, period_beginning,
period_ending, host, state).first()
@require_admin_context
def task_log_get_all(context, task_name, period_beginning, period_ending,
host=None, state=None):
return _task_log_get_query(context, task_name, period_beginning,
period_ending, host, state).all()
@require_admin_context
def task_log_begin_task(context, task_name, period_beginning, period_ending,
host, task_items=None, message=None):
task = models.TaskLog()
task.task_name = task_name
task.period_beginning = period_beginning
task.period_ending = period_ending
task.host = host
task.state = "RUNNING"
if message:
task.message = message
if task_items:
task.task_items = task_items
try:
task.save()
except db_exc.DBDuplicateEntry:
raise exception.TaskAlreadyRunning(task_name=task_name, host=host)
@require_admin_context
def task_log_end_task(context, task_name, period_beginning, period_ending,
host, errors, message=None):
values = dict(state="DONE", errors=errors)
if message:
values["message"] = message
session = get_session()
with session.begin():
rows = _task_log_get_query(context, task_name, period_beginning,
period_ending, host, session=session).\
update(values)
if rows == 0:
# It's not running!
raise exception.TaskNotRunning(task_name=task_name, host=host)
def _get_default_deleted_value(table):
# TODO(dripton): It would be better to introspect the actual default value
# from the column, but I don't see a way to do that in the low-level APIs
# of SQLAlchemy 0.7. 0.8 has better introspection APIs, which we should
# use when Nova is ready to require 0.8.
# NOTE(mikal): this is a little confusing. This method returns the value
# that a _not_deleted_ row would have.
deleted_column_type = table.c.deleted.type
if isinstance(deleted_column_type, Integer):
return 0
elif isinstance(deleted_column_type, Boolean):
return False
elif isinstance(deleted_column_type, String):
return ""
else:
return None
@require_admin_context
def archive_deleted_rows_for_table(context, tablename, max_rows):
"""Move up to max_rows rows from one tables to the corresponding
shadow table. The context argument is only used for the decorator.
:returns: number of rows archived
"""
# NOTE(guochbo): There is a circular import, nova.db.sqlalchemy.utils
# imports nova.db.sqlalchemy.api.
from nova.db.sqlalchemy import utils as db_utils
engine = get_engine()
conn = engine.connect()
metadata = MetaData()
metadata.bind = engine
table = Table(tablename, metadata, autoload=True)
default_deleted_value = _get_default_deleted_value(table)
shadow_tablename = _SHADOW_TABLE_PREFIX + tablename
rows_archived = 0
try:
shadow_table = Table(shadow_tablename, metadata, autoload=True)
except NoSuchTableError:
# No corresponding shadow table; skip it.
return rows_archived
if tablename == "dns_domains":
# We have one table (dns_domains) where the key is called
# "domain" rather than "id"
column = table.c.domain
else:
column = table.c.id
# NOTE(guochbo): Use InsertFromSelect and DeleteFromSelect to avoid
# database's limit of maximum parameter in one SQL statement.
query_insert = sql.select([table],
table.c.deleted != default_deleted_value).\
order_by(column).limit(max_rows)
query_delete = sql.select([column],
table.c.deleted != default_deleted_value).\
order_by(column).limit(max_rows)
insert_statement = sqlalchemyutils.InsertFromSelect(
shadow_table, query_insert)
delete_statement = db_utils.DeleteFromSelect(table, query_delete, column)
try:
# Group the insert and delete in a transaction.
with conn.begin():
conn.execute(insert_statement)
result_delete = conn.execute(delete_statement)
except db_exc.DBError:
# TODO(ekudryashova): replace by DBReferenceError when db layer
# raise it.
# A foreign key constraint keeps us from deleting some of
# these rows until we clean up a dependent table. Just
# skip this table for now; we'll come back to it later.
msg = _("IntegrityError detected when archiving table %s") % tablename
LOG.warn(msg)
return rows_archived
rows_archived = result_delete.rowcount
return rows_archived
@require_admin_context
def archive_deleted_rows(context, max_rows=None):
"""Move up to max_rows rows from production tables to the corresponding
shadow tables.
:returns: Number of rows archived.
"""
# The context argument is only used for the decorator.
tablenames = []
for model_class in models.__dict__.itervalues():
if hasattr(model_class, "__tablename__"):
tablenames.append(model_class.__tablename__)
rows_archived = 0
for tablename in tablenames:
rows_archived += archive_deleted_rows_for_table(context, tablename,
max_rows=max_rows - rows_archived)
if rows_archived >= max_rows:
break
return rows_archived
####################
def _instance_group_get_query(context, model_class, id_field=None, id=None,
session=None, read_deleted=None):
columns_to_join = {models.InstanceGroup: ['_policies', '_members']}
query = model_query(context, model_class, session=session,
read_deleted=read_deleted)
for c in columns_to_join.get(model_class, []):
query = query.options(joinedload(c))
if id and id_field:
query = query.filter(id_field == id)
return query
def instance_group_create(context, values, policies=None,
members=None):
"""Create a new group."""
uuid = values.get('uuid', None)
if uuid is None:
uuid = uuidutils.generate_uuid()
values['uuid'] = uuid
session = get_session()
with session.begin():
try:
group = models.InstanceGroup()
group.update(values)
group.save(session=session)
except db_exc.DBDuplicateEntry:
raise exception.InstanceGroupIdExists(group_uuid=uuid)
# We don't want these to be lazy loaded later. We know there is
# nothing here since we just created this instance group.
group._policies = []
group._members = []
if policies:
_instance_group_policies_add(context, group.id, policies,
session=session)
if members:
_instance_group_members_add(context, group.id, members,
session=session)
return instance_group_get(context, uuid)
def instance_group_get(context, group_uuid):
"""Get a specific group by uuid."""
group = _instance_group_get_query(context,
models.InstanceGroup,
models.InstanceGroup.uuid,
group_uuid).\
first()
if not group:
raise exception.InstanceGroupNotFound(group_uuid=group_uuid)
return group
def instance_group_get_by_instance(context, instance_uuid):
session = get_session()
with session.begin():
group_member = model_query(context, models.InstanceGroupMember,
session=session).\
filter_by(instance_id=instance_uuid).\
first()
if not group_member:
raise exception.InstanceGroupNotFound(group_uuid='')
group = _instance_group_get_query(context, models.InstanceGroup,
models.InstanceGroup.id,
group_member.group_id,
session=session).first()
if not group:
raise exception.InstanceGroupNotFound(
group_uuid=group_member.group_id)
return group
def instance_group_update(context, group_uuid, values):
"""Update the attributes of an group.
If values contains a metadata key, it updates the aggregate metadata
too. Similarly for the policies and members.
"""
session = get_session()
with session.begin():
group = model_query(context,
models.InstanceGroup,
session=session).\
filter_by(uuid=group_uuid).\
first()
if not group:
raise exception.InstanceGroupNotFound(group_uuid=group_uuid)
policies = values.get('policies')
if policies is not None:
_instance_group_policies_add(context,
group.id,
values.pop('policies'),
set_delete=True,
session=session)
members = values.get('members')
if members is not None:
_instance_group_members_add(context,
group.id,
values.pop('members'),
set_delete=True,
session=session)
group.update(values)
if policies:
values['policies'] = policies
if members:
values['members'] = members
def instance_group_delete(context, group_uuid):
"""Delete an group."""
session = get_session()
with session.begin():
group_id = _instance_group_id(context, group_uuid, session=session)
count = _instance_group_get_query(context,
models.InstanceGroup,
models.InstanceGroup.uuid,
group_uuid,
session=session).soft_delete()
if count == 0:
raise exception.InstanceGroupNotFound(group_uuid=group_uuid)
# Delete policies, metadata and members
instance_models = [models.InstanceGroupPolicy,
models.InstanceGroupMember]
for model in instance_models:
model_query(context, model, session=session).\
filter_by(group_id=group_id).\
soft_delete()
def instance_group_get_all(context):
"""Get all groups."""
return _instance_group_get_query(context, models.InstanceGroup).all()
def instance_group_get_all_by_project_id(context, project_id):
"""Get all groups."""
return _instance_group_get_query(context, models.InstanceGroup).\
filter_by(project_id=project_id).\
all()
def _instance_group_count_by_project_and_user(context, project_id,
user_id, session=None):
return model_query(context, models.InstanceGroup, read_deleted="no",
session=session).\
filter_by(project_id=project_id).\
filter_by(user_id=user_id).\
count()
def _instance_group_model_get_query(context, model_class, group_id,
session=None, read_deleted='no'):
return model_query(context,
model_class,
read_deleted=read_deleted,
session=session).\
filter_by(group_id=group_id)
def _instance_group_id(context, group_uuid, session=None):
"""Returns the group database ID for the group UUID."""
result = model_query(context,
models.InstanceGroup.id,
base_model=models.InstanceGroup,
session=session).\
filter_by(uuid=group_uuid).\
first()
if not result:
raise exception.InstanceGroupNotFound(group_uuid=group_uuid)
return result.id
def _instance_group_members_add(context, id, members, set_delete=False,
session=None):
if not session:
session = get_session()
all_members = set(members)
with session.begin(subtransactions=True):
query = _instance_group_model_get_query(context,
models.InstanceGroupMember,
id,
session=session)
if set_delete:
query.filter(~models.InstanceGroupMember.instance_id.in_(
all_members)).\
soft_delete(synchronize_session=False)
query = query.filter(
models.InstanceGroupMember.instance_id.in_(all_members))
already_existing = set()
for member_ref in query.all():
already_existing.add(member_ref.instance_id)
for instance_id in members:
if instance_id in already_existing:
continue
member_ref = models.InstanceGroupMember()
member_ref.update({'instance_id': instance_id,
'group_id': id})
session.add(member_ref)
return members
def instance_group_members_add(context, group_uuid, members,
set_delete=False):
id = _instance_group_id(context, group_uuid)
return _instance_group_members_add(context, id, members,
set_delete=set_delete)
def instance_group_member_delete(context, group_uuid, instance_id):
id = _instance_group_id(context, group_uuid)
count = _instance_group_get_query(context,
models.InstanceGroupMember,
models.InstanceGroupMember.group_id,
id).\
filter_by(instance_id=instance_id).\
soft_delete()
if count == 0:
raise exception.InstanceGroupMemberNotFound(group_uuid=group_uuid,
instance_id=instance_id)
def instance_group_members_get(context, group_uuid):
id = _instance_group_id(context, group_uuid)
instances = model_query(context,
models.InstanceGroupMember.instance_id,
base_model=models.InstanceGroupMember).\
filter_by(group_id=id).all()
return [instance[0] for instance in instances]
def _instance_group_policies_add(context, id, policies, set_delete=False,
session=None):
if not session:
session = get_session()
allpols = set(policies)
with session.begin(subtransactions=True):
query = _instance_group_model_get_query(context,
models.InstanceGroupPolicy,
id,
session=session)
if set_delete:
query.filter(~models.InstanceGroupPolicy.policy.in_(allpols)).\
soft_delete(synchronize_session=False)
query = query.filter(models.InstanceGroupPolicy.policy.in_(allpols))
already_existing = set()
for policy_ref in query.all():
already_existing.add(policy_ref.policy)
for policy in policies:
if policy in already_existing:
continue
policy_ref = models.InstanceGroupPolicy()
policy_ref.update({'policy': policy,
'group_id': id})
session.add(policy_ref)
return policies
def instance_group_policies_add(context, group_uuid, policies,
set_delete=False):
id = _instance_group_id(context, group_uuid)
return _instance_group_policies_add(context, id, policies,
set_delete=set_delete)
def instance_group_policy_delete(context, group_uuid, policy):
id = _instance_group_id(context, group_uuid)
count = _instance_group_get_query(context,
models.InstanceGroupPolicy,
models.InstanceGroupPolicy.group_id,
id).\
filter_by(policy=policy).\
soft_delete()
if count == 0:
raise exception.InstanceGroupPolicyNotFound(group_uuid=group_uuid,
policy=policy)
def instance_group_policies_get(context, group_uuid):
id = _instance_group_id(context, group_uuid)
policies = model_query(context,
models.InstanceGroupPolicy.policy,
base_model=models.InstanceGroupPolicy).\
filter_by(group_id=id).all()
return [policy[0] for policy in policies]
####################
@require_admin_context
def pci_device_get_by_addr(context, node_id, dev_addr):
pci_dev_ref = model_query(context, models.PciDevice).\
filter_by(compute_node_id=node_id).\
filter_by(address=dev_addr).\
first()
if not pci_dev_ref:
raise exception.PciDeviceNotFound(node_id=node_id, address=dev_addr)
return pci_dev_ref
@require_admin_context
def pci_device_get_by_id(context, id):
pci_dev_ref = model_query(context, models.PciDevice).\
filter_by(id=id).\
first()
if not pci_dev_ref:
raise exception.PciDeviceNotFoundById(id=id)
return pci_dev_ref
@require_admin_context
def pci_device_get_all_by_node(context, node_id):
return model_query(context, models.PciDevice).\
filter_by(compute_node_id=node_id).\
all()
@require_context
def pci_device_get_all_by_instance_uuid(context, instance_uuid):
return model_query(context, models.PciDevice).\
filter_by(status='allocated').\
filter_by(instance_uuid=instance_uuid).\
all()
def _instance_pcidevs_get_multi(context, instance_uuids, session=None):
return model_query(context, models.PciDevice, session=session).\
filter_by(status='allocated').\
filter(models.PciDevice.instance_uuid.in_(instance_uuids))
@require_admin_context
def pci_device_destroy(context, node_id, address):
result = model_query(context, models.PciDevice).\
filter_by(compute_node_id=node_id).\
filter_by(address=address).\
soft_delete()
if not result:
raise exception.PciDeviceNotFound(node_id=node_id, address=address)
@require_admin_context
def pci_device_update(context, node_id, address, values):
session = get_session()
with session.begin():
device = model_query(context, models.PciDevice, session=session,
read_deleted="no").\
filter_by(compute_node_id=node_id).\
filter_by(address=address).\
first()
if not device:
device = models.PciDevice()
device.update(values)
session.add(device)
return device
| badock/nova | nova/db/discovery/api.py | Python | apache-2.0 | 232,570 | 0.001303 |
"""LCM type definitions
This file automatically generated by lcm.
DO NOT MODIFY BY HAND!!!!
"""
import cStringIO as StringIO
import struct
import header
class piemos_field_cmd(object):
__slots__ = ["header", "isFlash", "isStart", "isLeft", "rfid_uid"]
def __init__(self):
self.header = None
self.isFlash = False
self.isStart = False
self.isLeft = False
self.rfid_uid = 0
def encode(self):
buf = StringIO.StringIO()
buf.write(piemos_field_cmd._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
assert self.header._get_packed_fingerprint() == header.header._get_packed_fingerprint()
self.header._encode_one(buf)
buf.write(struct.pack(">bbbq", self.isFlash, self.isStart, self.isLeft, self.rfid_uid))
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = StringIO.StringIO(data)
if buf.read(8) != piemos_field_cmd._get_packed_fingerprint():
raise ValueError("Decode error")
return piemos_field_cmd._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = piemos_field_cmd()
self.header = header.header._decode_one(buf)
self.isFlash, self.isStart, self.isLeft, self.rfid_uid = struct.unpack(">bbbq", buf.read(11))
return self
_decode_one = staticmethod(_decode_one)
_hash = None
def _get_hash_recursive(parents):
if piemos_field_cmd in parents: return 0
newparents = parents + [piemos_field_cmd]
tmphash = (0x41930ef51bb056ba+ header.header._get_hash_recursive(newparents)) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if piemos_field_cmd._packed_fingerprint is None:
piemos_field_cmd._packed_fingerprint = struct.pack(">Q", piemos_field_cmd._get_hash_recursive([]))
return piemos_field_cmd._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
| pioneers/topgear | python/forseti2/piemos_field_cmd.py | Python | apache-2.0 | 2,272 | 0.007042 |
#!/usr/bin/env python
from constants import CARTESIAN_COORDS
import colorsys
import sys
class Pattern(object):
center_x, center_y = 0, 0
i = 0
def next_frame(self):
self.i += 1
def get_color(self, x, y):
d = (x ** 2 + y ** 2) ** 0.5
d *= 0.1 # scale the bands
d -= 0.025 * self.i # frame step size
r, g, b = colorsys.hsv_to_rgb(d%1, 1, 1)
red = 255 * r
green = 255 * g
blue = 255 * b
c = (int(red), int(green), int(blue))
return c
p = Pattern()
for frame in range(6000):
for x, y in CARTESIAN_COORDS:
color = p.get_color(x, y)
r, g, b = color
sys.stdout.write(chr(r))
sys.stdout.write(chr(g))
sys.stdout.write(chr(b))
sys.stdout.flush()
p.next_frame()
| godlygeek/LightRender | simple_rainbow.py | Python | mit | 807 | 0.006196 |
"""SCons.Script.SConscript
This module defines the Python API provided to SConscript and SConstruct
files.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import division
__revision__ = "src/engine/SCons/Script/SConscript.py rel_2.4.0:3365:9259ea1c13d7 2015/09/21 14:03:43 bdbaddog"
import SCons
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Environment
import SCons.Errors
import SCons.Node
import SCons.Node.Alias
import SCons.Node.FS
import SCons.Platform
import SCons.SConf
import SCons.Script.Main
import SCons.Tool
import SCons.Util
import collections
import os
import os.path
import re
import sys
import traceback
# The following variables used to live in this module. Some
# SConscript files out there may have referred to them directly as
# SCons.Script.SConscript.*. This is now supported by some special
# handling towards the bottom of the SConscript.__init__.py module.
#Arguments = {}
#ArgList = []
#BuildTargets = TargetList()
#CommandLineTargets = []
#DefaultTargets = []
class SConscriptReturn(Exception):
pass
launch_dir = os.path.abspath(os.curdir)
GlobalDict = None
# global exports set by Export():
global_exports = {}
# chdir flag
sconscript_chdir = 1
def get_calling_namespaces():
"""Return the locals and globals for the function that called
into this module in the current call stack."""
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
# Find the first frame that *isn't* from this file. This means
# that we expect all of the SCons frames that implement an Export()
# or SConscript() call to be in this file, so that we can identify
# the first non-Script.SConscript frame as the user's local calling
# environment, and the locals and globals dictionaries from that
# frame as the calling namespaces. See the comment below preceding
# the DefaultEnvironmentCall block for even more explanation.
while frame.f_globals.get("__name__") == __name__:
frame = frame.f_back
return frame.f_locals, frame.f_globals
def compute_exports(exports):
"""Compute a dictionary of exports given one of the parameters
to the Export() function or the exports argument to SConscript()."""
loc, glob = get_calling_namespaces()
retval = {}
try:
for export in exports:
if SCons.Util.is_Dict(export):
retval.update(export)
else:
try:
retval[export] = loc[export]
except KeyError:
retval[export] = glob[export]
except KeyError, x:
raise SCons.Errors.UserError("Export of non-existent variable '%s'"%x)
return retval
class Frame(object):
"""A frame on the SConstruct/SConscript call stack"""
def __init__(self, fs, exports, sconscript):
self.globals = BuildDefaultGlobals()
self.retval = None
self.prev_dir = fs.getcwd()
self.exports = compute_exports(exports) # exports from the calling SConscript
# make sure the sconscript attr is a Node.
if isinstance(sconscript, SCons.Node.Node):
self.sconscript = sconscript
elif sconscript == '-':
self.sconscript = None
else:
self.sconscript = fs.File(str(sconscript))
# the SConstruct/SConscript call stack:
call_stack = []
# For documentation on the methods in this file, see the scons man-page
def Return(*vars, **kw):
retval = []
try:
fvars = SCons.Util.flatten(vars)
for var in fvars:
for v in var.split():
retval.append(call_stack[-1].globals[v])
except KeyError, x:
raise SCons.Errors.UserError("Return of non-existent variable '%s'"%x)
if len(retval) == 1:
call_stack[-1].retval = retval[0]
else:
call_stack[-1].retval = tuple(retval)
stop = kw.get('stop', True)
if stop:
raise SConscriptReturn
stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :)
def _SConscript(fs, *files, **kw):
top = fs.Top
sd = fs.SConstruct_dir.rdir()
exports = kw.get('exports', [])
# evaluate each SConscript file
results = []
for fn in files:
call_stack.append(Frame(fs, exports, fn))
old_sys_path = sys.path
try:
SCons.Script.sconscript_reading = SCons.Script.sconscript_reading + 1
if fn == "-":
exec sys.stdin in call_stack[-1].globals
else:
if isinstance(fn, SCons.Node.Node):
f = fn
else:
f = fs.File(str(fn))
_file_ = None
# Change directory to the top of the source
# tree to make sure the os's cwd and the cwd of
# fs match so we can open the SConscript.
fs.chdir(top, change_os_dir=1)
if f.rexists():
actual = f.rfile()
_file_ = open(actual.get_abspath(), "r")
elif f.srcnode().rexists():
actual = f.srcnode().rfile()
_file_ = open(actual.get_abspath(), "r")
elif f.has_src_builder():
# The SConscript file apparently exists in a source
# code management system. Build it, but then clear
# the builder so that it doesn't get built *again*
# during the actual build phase.
f.build()
f.built()
f.builder_set(None)
if f.exists():
_file_ = open(f.get_abspath(), "r")
if _file_:
# Chdir to the SConscript directory. Use a path
# name relative to the SConstruct file so that if
# we're using the -f option, we're essentially
# creating a parallel SConscript directory structure
# in our local directory tree.
#
# XXX This is broken for multiple-repository cases
# where the SConstruct and SConscript files might be
# in different Repositories. For now, cross that
# bridge when someone comes to it.
try:
src_dir = kw['src_dir']
except KeyError:
ldir = fs.Dir(f.dir.get_path(sd))
else:
ldir = fs.Dir(src_dir)
if not ldir.is_under(f.dir):
# They specified a source directory, but
# it's above the SConscript directory.
# Do the sensible thing and just use the
# SConcript directory.
ldir = fs.Dir(f.dir.get_path(sd))
try:
fs.chdir(ldir, change_os_dir=sconscript_chdir)
except OSError:
# There was no local directory, so we should be
# able to chdir to the Repository directory.
# Note that we do this directly, not through
# fs.chdir(), because we still need to
# interpret the stuff within the SConscript file
# relative to where we are logically.
fs.chdir(ldir, change_os_dir=0)
os.chdir(actual.dir.get_abspath())
# Append the SConscript directory to the beginning
# of sys.path so Python modules in the SConscript
# directory can be easily imported.
sys.path = [ f.dir.get_abspath() ] + sys.path
# This is the magic line that actually reads up
# and executes the stuff in the SConscript file.
# The locals for this frame contain the special
# bottom-of-the-stack marker so that any
# exceptions that occur when processing this
# SConscript can base the printed frames at this
# level and not show SCons internals as well.
call_stack[-1].globals.update({stack_bottom:1})
old_file = call_stack[-1].globals.get('__file__')
try:
del call_stack[-1].globals['__file__']
except KeyError:
pass
try:
try:
exec _file_ in call_stack[-1].globals
except SConscriptReturn:
pass
finally:
if old_file is not None:
call_stack[-1].globals.update({__file__:old_file})
else:
SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning,
"Ignoring missing SConscript '%s'" % f.get_internal_path())
finally:
SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1
sys.path = old_sys_path
frame = call_stack.pop()
try:
fs.chdir(frame.prev_dir, change_os_dir=sconscript_chdir)
except OSError:
# There was no local directory, so chdir to the
# Repository directory. Like above, we do this
# directly.
fs.chdir(frame.prev_dir, change_os_dir=0)
rdir = frame.prev_dir.rdir()
rdir._create() # Make sure there's a directory there.
try:
os.chdir(rdir.get_abspath())
except OSError, e:
# We still couldn't chdir there, so raise the error,
# but only if actions are being executed.
#
# If the -n option was used, the directory would *not*
# have been created and we should just carry on and
# let things muddle through. This isn't guaranteed
# to work if the SConscript files are reading things
# from disk (for example), but it should work well
# enough for most configurations.
if SCons.Action.execute_actions:
raise e
results.append(frame.retval)
# if we only have one script, don't return a tuple
if len(results) == 1:
return results[0]
else:
return tuple(results)
def SConscript_exception(file=sys.stderr):
"""Print an exception stack trace just for the SConscript file(s).
This will show users who have Python errors where the problem is,
without cluttering the output with all of the internal calls leading
up to where we exec the SConscript."""
exc_type, exc_value, exc_tb = sys.exc_info()
tb = exc_tb
while tb and stack_bottom not in tb.tb_frame.f_locals:
tb = tb.tb_next
if not tb:
# We did not find our exec statement, so this was actually a bug
# in SCons itself. Show the whole stack.
tb = exc_tb
stack = traceback.extract_tb(tb)
try:
type = exc_type.__name__
except AttributeError:
type = str(exc_type)
if type[:11] == "exceptions.":
type = type[11:]
file.write('%s: %s:\n' % (type, exc_value))
for fname, line, func, text in stack:
file.write(' File "%s", line %d:\n' % (fname, line))
file.write(' %s\n' % text)
def annotate(node):
"""Annotate a node with the stack frame describing the
SConscript file and line number that created it."""
tb = sys.exc_info()[2]
while tb and stack_bottom not in tb.tb_frame.f_locals:
tb = tb.tb_next
if not tb:
# We did not find any exec of an SConscript file: what?!
raise SCons.Errors.InternalError("could not find SConscript stack frame")
node.creator = traceback.extract_stack(tb)[0]
# The following line would cause each Node to be annotated using the
# above function. Unfortunately, this is a *huge* performance hit, so
# leave this disabled until we find a more efficient mechanism.
#SCons.Node.Annotate = annotate
class SConsEnvironment(SCons.Environment.Base):
"""An Environment subclass that contains all of the methods that
are particular to the wrapper SCons interface and which aren't
(or shouldn't be) part of the build engine itself.
Note that not all of the methods of this class have corresponding
global functions, there are some private methods.
"""
#
# Private methods of an SConsEnvironment.
#
def _exceeds_version(self, major, minor, v_major, v_minor):
"""Return 1 if 'major' and 'minor' are greater than the version
in 'v_major' and 'v_minor', and 0 otherwise."""
return (major > v_major or (major == v_major and minor > v_minor))
def _get_major_minor_revision(self, version_string):
"""Split a version string into major, minor and (optionally)
revision parts.
This is complicated by the fact that a version string can be
something like 3.2b1."""
version = version_string.split(' ')[0].split('.')
v_major = int(version[0])
v_minor = int(re.match('\d+', version[1]).group())
if len(version) >= 3:
v_revision = int(re.match('\d+', version[2]).group())
else:
v_revision = 0
return v_major, v_minor, v_revision
def _get_SConscript_filenames(self, ls, kw):
"""
Convert the parameters passed to SConscript() calls into a list
of files and export variables. If the parameters are invalid,
throws SCons.Errors.UserError. Returns a tuple (l, e) where l
is a list of SConscript filenames and e is a list of exports.
"""
exports = []
if len(ls) == 0:
try:
dirs = kw["dirs"]
except KeyError:
raise SCons.Errors.UserError("Invalid SConscript usage - no parameters")
if not SCons.Util.is_List(dirs):
dirs = [ dirs ]
dirs = list(map(str, dirs))
name = kw.get('name', 'SConscript')
files = [os.path.join(n, name) for n in dirs]
elif len(ls) == 1:
files = ls[0]
elif len(ls) == 2:
files = ls[0]
exports = self.Split(ls[1])
else:
raise SCons.Errors.UserError("Invalid SConscript() usage - too many arguments")
if not SCons.Util.is_List(files):
files = [ files ]
if kw.get('exports'):
exports.extend(self.Split(kw['exports']))
variant_dir = kw.get('variant_dir') or kw.get('build_dir')
if variant_dir:
if len(files) != 1:
raise SCons.Errors.UserError("Invalid SConscript() usage - can only specify one SConscript with a variant_dir")
duplicate = kw.get('duplicate', 1)
src_dir = kw.get('src_dir')
if not src_dir:
src_dir, fname = os.path.split(str(files[0]))
files = [os.path.join(str(variant_dir), fname)]
else:
if not isinstance(src_dir, SCons.Node.Node):
src_dir = self.fs.Dir(src_dir)
fn = files[0]
if not isinstance(fn, SCons.Node.Node):
fn = self.fs.File(fn)
if fn.is_under(src_dir):
# Get path relative to the source directory.
fname = fn.get_path(src_dir)
files = [os.path.join(str(variant_dir), fname)]
else:
files = [fn.get_abspath()]
kw['src_dir'] = variant_dir
self.fs.VariantDir(variant_dir, src_dir, duplicate)
return (files, exports)
#
# Public methods of an SConsEnvironment. These get
# entry points in the global name space so they can be called
# as global functions.
#
def Configure(self, *args, **kw):
if not SCons.Script.sconscript_reading:
raise SCons.Errors.UserError("Calling Configure from Builders is not supported.")
kw['_depth'] = kw.get('_depth', 0) + 1
return SCons.Environment.Base.Configure(self, *args, **kw)
def Default(self, *targets):
SCons.Script._Set_Default_Targets(self, targets)
def EnsureSConsVersion(self, major, minor, revision=0):
"""Exit abnormally if the SCons version is not late enough."""
# split string to avoid replacement during build process
if SCons.__version__ == '__' + 'VERSION__':
SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning,
"EnsureSConsVersion is ignored for development version")
return
scons_ver = self._get_major_minor_revision(SCons.__version__)
if scons_ver < (major, minor, revision):
if revision:
scons_ver_string = '%d.%d.%d' % (major, minor, revision)
else:
scons_ver_string = '%d.%d' % (major, minor)
print "SCons %s or greater required, but you have SCons %s" % \
(scons_ver_string, SCons.__version__)
sys.exit(2)
def EnsurePythonVersion(self, major, minor):
"""Exit abnormally if the Python version is not late enough."""
if sys.version_info < (major, minor):
v = sys.version.split()[0]
print "Python %d.%d or greater required, but you have Python %s" %(major,minor,v)
sys.exit(2)
def Exit(self, value=0):
sys.exit(value)
def Export(self, *vars, **kw):
for var in vars:
global_exports.update(compute_exports(self.Split(var)))
global_exports.update(kw)
def GetLaunchDir(self):
global launch_dir
return launch_dir
def GetOption(self, name):
name = self.subst(name)
return SCons.Script.Main.GetOption(name)
def Help(self, text):
text = self.subst(text, raw=1)
SCons.Script.HelpFunction(text)
def Import(self, *vars):
try:
frame = call_stack[-1]
globals = frame.globals
exports = frame.exports
for var in vars:
var = self.Split(var)
for v in var:
if v == '*':
globals.update(global_exports)
globals.update(exports)
else:
if v in exports:
globals[v] = exports[v]
else:
globals[v] = global_exports[v]
except KeyError,x:
raise SCons.Errors.UserError("Import of non-existent variable '%s'"%x)
def SConscript(self, *ls, **kw):
if 'build_dir' in kw:
msg = """The build_dir keyword has been deprecated; use the variant_dir keyword instead."""
SCons.Warnings.warn(SCons.Warnings.DeprecatedBuildDirWarning, msg)
def subst_element(x, subst=self.subst):
if SCons.Util.is_List(x):
x = list(map(subst, x))
else:
x = subst(x)
return x
ls = list(map(subst_element, ls))
subst_kw = {}
for key, val in kw.items():
if SCons.Util.is_String(val):
val = self.subst(val)
elif SCons.Util.is_List(val):
result = []
for v in val:
if SCons.Util.is_String(v):
v = self.subst(v)
result.append(v)
val = result
subst_kw[key] = val
files, exports = self._get_SConscript_filenames(ls, subst_kw)
subst_kw['exports'] = exports
return _SConscript(self.fs, *files, **subst_kw)
def SConscriptChdir(self, flag):
global sconscript_chdir
sconscript_chdir = flag
def SetOption(self, name, value):
name = self.subst(name)
SCons.Script.Main.SetOption(name, value)
#
#
#
SCons.Environment.Environment = SConsEnvironment
def Configure(*args, **kw):
if not SCons.Script.sconscript_reading:
raise SCons.Errors.UserError("Calling Configure from Builders is not supported.")
kw['_depth'] = 1
return SCons.SConf.SConf(*args, **kw)
# It's very important that the DefaultEnvironmentCall() class stay in this
# file, with the get_calling_namespaces() function, the compute_exports()
# function, the Frame class and the SConsEnvironment.Export() method.
# These things make up the calling stack leading up to the actual global
# Export() or SConscript() call that the user issued. We want to allow
# users to export local variables that they define, like so:
#
# def func():
# x = 1
# Export('x')
#
# To support this, the get_calling_namespaces() function assumes that
# the *first* stack frame that's not from this file is the local frame
# for the Export() or SConscript() call.
_DefaultEnvironmentProxy = None
def get_DefaultEnvironmentProxy():
global _DefaultEnvironmentProxy
if not _DefaultEnvironmentProxy:
default_env = SCons.Defaults.DefaultEnvironment()
_DefaultEnvironmentProxy = SCons.Environment.NoSubstitutionProxy(default_env)
return _DefaultEnvironmentProxy
class DefaultEnvironmentCall(object):
"""A class that implements "global function" calls of
Environment methods by fetching the specified method from the
DefaultEnvironment's class. Note that this uses an intermediate
proxy class instead of calling the DefaultEnvironment method
directly so that the proxy can override the subst() method and
thereby prevent expansion of construction variables (since from
the user's point of view this was called as a global function,
with no associated construction environment)."""
def __init__(self, method_name, subst=0):
self.method_name = method_name
if subst:
self.factory = SCons.Defaults.DefaultEnvironment
else:
self.factory = get_DefaultEnvironmentProxy
def __call__(self, *args, **kw):
env = self.factory()
method = getattr(env, self.method_name)
return method(*args, **kw)
def BuildDefaultGlobals():
"""
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
"""
global GlobalDict
if GlobalDict is None:
GlobalDict = {}
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m]
return GlobalDict.copy()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| Uli1/mapnik | scons/scons-local-2.4.0/SCons/Script/SConscript.py | Python | lgpl-2.1 | 24,468 | 0.002738 |
import os
import re
import cx_Oracle
import collections
import datetime
earContentionCode = [2200,2210,2220,3140,3150,4130,4210,4700,4920,5000,5010,5710,6850]
#Primary query, Look for all claims/contentions where the participant has at least one contention with an ear-related contention code.
#Organize them based first by participant id, then claim id and finally by profile date descending.
SQL="select rcc.ptcpnt_vet_id, \
bnft_claim_id, \
date_of_claim, \
prfil_dt, \
claim_ro_number, \
cntntn_id, \
cntntn_clsfcn_id, \
cntntn_clmant_txt, \
p.dob, \
end_prdct_type_cd \
from combo_rating_corp_claim rcc \
left join ah2626_person p on p.ptcpnt_vet_id = rcc.ptcpnt_vet_id \
inner join v_ear_claim_source cs on cs.vet_id = rcc.ptcpnt_vet_id and cs.claim_id = rcc.bnft_claim_id \
where prfil_dt >= date_of_claim \
order by rcc.ptcpnt_vet_id desc,bnft_claim_id,prfil_dt"
class AggregateContention:
def __init__(self):
self.VET_ID = None
self.CLAIM_ID = None
self.DOB = 0
self.END_PRODUCT_CODE = None
self.RO_NUMBER = 0
self.CLAIM_DATE = None
self.MAX_PROFILE_DATE = None
self.CONTENTION_COUNT = 0
self.EAR_CONTENTION_COUNT = 0
self.C2200 = 0
self.C2210 = 0
self.C2220 = 0
self.C3140 = 0
self.C3150 = 0
self.C4130 = 0
self.C4210 = 0
self.C4700 = 0
self.C4920 = 0
self.C5000 = 0
self.C5010 = 0
self.C5710 = 0
self.C6850 = 0
self.TXT_LOSS = 0
self.TXT_TINITU = 0
def __str__(self):
from pprint import pprint
return str(vars(self))
class Contention:
def __init__(self, ptcpnt_vet_id, bnft_claim_id, claim_date, prfil_dt, claim_ro_number, cntntn_id, cntntn_clsfcn_id, cntntn_clmant_txt, dob, end_prdct_type_cd):
self.ptcpnt_vet_id = ptcpnt_vet_id
self.bnft_claim_id = bnft_claim_id
self.claim_date = claim_date
self.prfil_dt = prfil_dt
self.claim_ro_number = claim_ro_number
self.cntntn_id = cntntn_id
self.cntntn_clsfcn_id = cntntn_clsfcn_id
self.cntntn_clmant_txt = cntntn_clmant_txt
if not dob is None:
self.dob = int(dob)
else:
self.dob = None
self.end_prdct_type_cd = end_prdct_type_cd
def __str__(self):
from pprint import pprint
return str(vars(self))
connection = cx_Oracle.connect('developer/D3vVV0Rd@127.0.0.1:1521/DEV.BCDSS')
writeCursor = connection.cursor()
writeCursor.prepare('INSERT INTO DEVELOPER.V_EAR_AGGREGATE_CONTENTION (VET_ID, CLAIM_ID, END_PRODUCT_CODE, CLAIM_DATE, CONTENTION_COUNT, EAR_CONTENTION_COUNT, C2200,C2210, C2220,C3140,C3150,C4130,C4210,C4700,C4920,C5000,C5010,C5710, C6850, TXT_LOSS, TXT_TINITU, DOB, RO_NUMBER, MAX_PROFILE_DATE) \
VALUES (:VET_ID, :CLAIM_ID, :END_PRODUCT_CODE, :CLAIM_DATE, :CONTENTION_COUNT, :EAR_CONTENTION_COUNT, \
:C2200, :C2210, :C2220, :C3140, :C3150, :C4130 , :C4210, :C4700, :C4920, :C5000, :C5010, :C5710, :C6850, \
:TXT_LOSS, :TXT_TINITU, \
:DOB, :RO_NUMBER, :MAX_PROFILE_DATE)')
print(str(datetime.datetime.now()))
cursor = connection.cursor()
cursor.execute(SQL)
aggregateContention = None
counterAggregateContention = None
totalContentions = None
totalEarContentions = None
maxProfileDate = None
currBenefitClaim = -1
currParticipant = -1
counter = 0;
for row in cursor:
if counter == 1000: #Commit every 1000 records. Improvement would be to look into aggregate inserts
connection.commit()
counter=0
contention = Contention(row[0],row[1],row[2],row[3],row[4],row[5],row[6], row[7], row[8], row[9]) #Map loose fields into a Contention object. (Contention is a convenience object)
if currBenefitClaim != contention.bnft_claim_id: #Process insert statement and reset aggregation variables when claim id changes
if currBenefitClaim != -1: #Skip if first time through
#Perform all aggregation calculations before inserting and resetting
aggregateContention.CONTENTION_COUNT = sum(totalContentions.values())
aggregateContention.EAR_CONTENTION_COUNT = sum(totalEarContentions.values())
aggregateContention.MAX_PROFILE_DATE = maxProfileDate[currBenefitClaim]
writeCursor.execute(None, {'VET_ID' :aggregateContention.VET_ID, 'CLAIM_ID' :aggregateContention.CLAIM_ID, 'END_PRODUCT_CODE' :aggregateContention.END_PRODUCT_CODE, 'CLAIM_DATE' :aggregateContention.CLAIM_DATE, 'CONTENTION_COUNT' :aggregateContention.CONTENTION_COUNT, 'EAR_CONTENTION_COUNT' :aggregateContention.EAR_CONTENTION_COUNT,
'C2200' :counterAggregateContention.C2200, 'C2210' :counterAggregateContention.C2210, 'C2220' :counterAggregateContention.C2220, 'C3140' :counterAggregateContention.C3140, 'C3150' :counterAggregateContention.C3150, 'C4130' :counterAggregateContention.C4130, 'C4210' :counterAggregateContention.C4210, 'C4700' :counterAggregateContention.C4700, 'C4920' :counterAggregateContention.C4920, 'C5000' :counterAggregateContention.C5000, 'C5010' :counterAggregateContention.C5010, 'C5710' :counterAggregateContention.C5710, 'C6850' :counterAggregateContention.C6850,
'TXT_LOSS' :counterAggregateContention.TXT_LOSS, 'TXT_TINITU' :counterAggregateContention.TXT_TINITU,
'DOB' :aggregateContention.DOB, 'RO_NUMBER' :aggregateContention.RO_NUMBER, 'MAX_PROFILE_DATE' :aggregateContention.MAX_PROFILE_DATE})
counter += 1
currBenefitClaim = contention.bnft_claim_id #Reset claim id
if currParticipant != contention.ptcpnt_vet_id :
currParticipant = contention.ptcpnt_vet_id #Reset participant id
counterAggregateContention = AggregateContention()
#Capture all claim/person level items that do not change per contention
aggregateContention = AggregateContention()
aggregateContention.VET_ID = contention.ptcpnt_vet_id
aggregateContention.CLAIM_ID = currBenefitClaim
aggregateContention.RO_NUMBER = contention.claim_ro_number
aggregateContention.DOB = contention.dob
aggregateContention.CLAIM_DATE = contention.claim_date
aggregateContention.END_PRODUCT_CODE = contention.end_prdct_type_cd
#Reset the counters
totalContentions = collections.Counter();
totalEarContentions = collections.Counter();
maxProfileDate = collections.Counter();
maxProfileDate[currBenefitClaim] = contention.prfil_dt #If a claim has multiple profile dates, because of the sorting, we always end up with the most recent profile date
totalContentions[currBenefitClaim] += 1 #For every contention add one
if contention.cntntn_clsfcn_id in earContentionCode:
totalEarContentions[currBenefitClaim] +=1 #For any contention that is ear-related, add one
#Use regex to look for a hit and then if it hits make it true. No need to track how many times, just true or false
if re.search("Loss",contention.cntntn_clmant_txt,re.IGNORECASE):
counterAggregateContention.TXT_LOSS += 1
if re.search("Tinnitus",contention.cntntn_clmant_txt,re.IGNORECASE):
counterAggregateContention.TXT_TINITU += 1
#Simply test the codes and again true or false
if contention.cntntn_clsfcn_id == 2200:
counterAggregateContention.C2200 += 1
if contention.cntntn_clsfcn_id == 2210:
counterAggregateContention.C2210 += 1
if contention.cntntn_clsfcn_id == 2220:
counterAggregateContention.C2220 += 1
if contention.cntntn_clsfcn_id == 3140:
counterAggregateContention.C3140 += 1
if contention.cntntn_clsfcn_id == 3150:
counterAggregateContention.C3150 += 1
if contention.cntntn_clsfcn_id == 4130:
counterAggregateContention.C4130 += 1
if contention.cntntn_clsfcn_id == 4210:
counterAggregateContention.C4210 += 1
if contention.cntntn_clsfcn_id == 4700:
counterAggregateContention.C4700 += 1
if contention.cntntn_clsfcn_id == 4920:
counterAggregateContention.C4920 += 1
if contention.cntntn_clsfcn_id == 5000:
counterAggregateContention.C5000 += 1
if contention.cntntn_clsfcn_id == 5010:
counterAggregateContention.C5010 += 1
if contention.cntntn_clsfcn_id == 5710:
counterAggregateContention.C5710 += 1
if contention.cntntn_clsfcn_id == 6850:
counterAggregateContention.C6850 += 1
#A bit strange looking but due to Python's identation approach this occurs after the for loop in order to capture the last claim.
aggregateContention.CONTENTION_COUNT = sum(totalContentions.values())
aggregateContention.EAR_CONTENTION_COUNT = sum(totalEarContentions.values())
aggregateContention.MAX_PROFILE_DATE = maxProfileDate[currBenefitClaim]
writeCursor.execute(None, {'VET_ID' :aggregateContention.VET_ID, 'CLAIM_ID' :aggregateContention.CLAIM_ID, 'END_PRODUCT_CODE' :aggregateContention.END_PRODUCT_CODE, 'CLAIM_DATE' :aggregateContention.CLAIM_DATE, 'CONTENTION_COUNT' :aggregateContention.CONTENTION_COUNT, 'EAR_CONTENTION_COUNT' :aggregateContention.EAR_CONTENTION_COUNT,
'C2200' :counterAggregateContention.C2200, 'C2210' :counterAggregateContention.C2210, 'C2220' :counterAggregateContention.C2220, 'C3140' :counterAggregateContention.C3140, 'C3150' :counterAggregateContention.C3150, 'C4130' :counterAggregateContention.C4130, 'C4210' :counterAggregateContention.C4210, 'C4700' :counterAggregateContention.C4700, 'C4920' :counterAggregateContention.C4920, 'C5000' :counterAggregateContention.C5000, 'C5010' :counterAggregateContention.C5010, 'C5710' :counterAggregateContention.C5710, 'C6850' :counterAggregateContention.C6850,
'TXT_LOSS' :counterAggregateContention.TXT_LOSS, 'TXT_TINITU' :counterAggregateContention.TXT_TINITU,
'DOB' :aggregateContention.DOB, 'RO_NUMBER' :aggregateContention.RO_NUMBER, 'MAX_PROFILE_DATE' :aggregateContention.MAX_PROFILE_DATE})
connection.commit()
print(str(datetime.datetime.now()))
writeCursor.close()
cursor.close()
connection.close()
| VHAINNOVATIONS/BCDS | Model/scripts/ear_validation/python/aggregateContention.py | Python | apache-2.0 | 9,448 | 0.037786 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
Explode.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from sextante.core.GeoAlgorithm import GeoAlgorithm
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from sextante.parameters.ParameterVector import ParameterVector
from sextante.core.QGisLayers import QGisLayers
from sextante.outputs.OutputVector import OutputVector
class Explode(GeoAlgorithm):
INPUT = "INPUT"
OUTPUT = "OUTPUT"
#===========================================================================
# def getIcon(self):
# return QtGui.QIcon(os.path.dirname(__file__) + "/../images/toolbox.png")
#===========================================================================
def processAlgorithm(self, progress):
vlayer = QGisLayers.getObjectFromUri(self.getParameterValue(self.INPUT))
output = self.getOutputFromName(self.OUTPUT)
vprovider = vlayer.dataProvider()
fields = vprovider.fields()
writer = output.getVectorWriter(fields, QGis.WKBLineString, vlayer.crs() )
outFeat = QgsFeature()
inGeom = QgsGeometry()
nElement = 0
features = QGisLayers.features(vlayer)
nFeat = len(features)
for feature in features:
nElement += 1
progress.setPercentage((nElement*100)/nFeat)
inGeom = feature.geometry()
atMap = feature.attributes()
segments = self.extractAsSingleSegments( inGeom )
outFeat.setAttributes( atMap )
for segment in segments:
outFeat.setGeometry(segment)
writer.addFeature(outFeat)
del writer
def extractAsSingleSegments( self, geom ):
segments = []
if geom.isMultipart():
multi = geom.asMultiPolyline()
for polyline in multi:
segments.extend( self.getPolylineAsSingleSegments(polyline))
else:
segments.extend( self.getPolylineAsSingleSegments(geom.asPolyline()))
return segments
def getPolylineAsSingleSegments(self, polyline):
segments = []
for i in range(len(polyline)-1):
ptA = polyline[i]
ptB = polyline[i+1]
segment = QgsGeometry.fromPolyline([ptA, ptB])
segments.append(segment)
return segments
def defineCharacteristics(self):
self.name = "Explode lines"
self.group = "Vector geometry tools"
self.addParameter(ParameterVector(self.INPUT, "Input layer",ParameterVector.VECTOR_TYPE_LINE))
self.addOutput(OutputVector(self.OUTPUT, "Output layer"))
| slarosa/QGIS | python/plugins/sextante/algs/Explode.py | Python | gpl-2.0 | 3,641 | 0.006042 |
"""
Settings for Appsembler on CMS in Production.
"""
import sentry_sdk
from openedx.core.djangoapps.appsembler.settings.settings import production_common
def plugin_settings(settings):
"""
Appsembler CMS overrides for both production AND devstack.
Make sure those are compatible for devstack via defensive coding.
This file, however, won't run in test environments.
"""
production_common.plugin_settings(settings)
settings.APPSEMBLER_SECRET_KEY = settings.AUTH_TOKENS.get("APPSEMBLER_SECRET_KEY")
settings.INTERCOM_APP_ID = settings.AUTH_TOKENS.get("INTERCOM_APP_ID")
settings.INTERCOM_APP_SECRET = settings.AUTH_TOKENS.get("INTERCOM_APP_SECRET")
settings.FEATURES['ENABLE_COURSEWARE_INDEX'] = True
settings.FEATURES['ENABLE_LIBRARY_INDEX'] = True
settings.ELASTIC_FIELD_MAPPINGS = {
"start_date": {
"type": "date"
}
}
if settings.SENTRY_DSN:
sentry_sdk.set_tag('app', 'cms')
settings.SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
settings.XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5
settings.HIJACK_LOGIN_REDIRECT_URL = '/home'
| appsembler/edx-platform | openedx/core/djangoapps/appsembler/settings/settings/production_cms.py | Python | agpl-3.0 | 1,171 | 0.003416 |
from test import support
from test.support import bigaddrspacetest, MAX_Py_ssize_t
import unittest
import operator
import sys
class StrTest(unittest.TestCase):
@bigaddrspacetest
def test_concat(self):
s1 = 'x' * MAX_Py_ssize_t
self.assertRaises(OverflowError, operator.add, s1, '?')
@bigaddrspacetest
def test_optimized_concat(self):
x = 'x' * MAX_Py_ssize_t
try:
x = x + '?' # this statement uses a fast path in ceval.c
except OverflowError:
pass
else:
self.fail("should have raised OverflowError")
try:
x += '?' # this statement uses a fast path in ceval.c
except OverflowError:
pass
else:
self.fail("should have raised OverflowError")
self.assertEquals(len(x), MAX_Py_ssize_t)
### the following test is pending a patch
# (http://mail.python.org/pipermail/python-dev/2006-July/067774.html)
#@bigaddrspacetest
#def test_repeat(self):
# self.assertRaises(OverflowError, operator.mul, 'x', MAX_Py_ssize_t + 1)
def test_main():
support.run_unittest(StrTest)
if __name__ == '__main__':
if len(sys.argv) > 1:
support.set_memlimit(sys.argv[1])
test_main()
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.1/Lib/test/test_bigaddrspace.py | Python | mit | 1,284 | 0.003894 |
#Copyright (c) 2011-2012 Litle & Co.
#
#Permission is hereby granted, free of charge, to any person
#obtaining a copy of this software and associated documentation
#files (the "Software"), to deal in the Software without
#restriction, including without limitation the rights to use,
#copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the
#Software is furnished to do so, subject to the following
#conditions:
#
#The above copyright notice and this permission notice shall be
#included in all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
#EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
#OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
#NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
#HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
#WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
#FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
#OTHER DEALINGS IN THE SOFTWARE.
import os, sys
lib_path = os.path.abspath('../all')
sys.path.append(lib_path)
from SetupTest import *
import unittest
class TestAuth(unittest.TestCase):
def testSimpleAuthWithCard(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '1234'
authorization.amount = 106
authorization.orderSource = 'ecommerce'
card = litleXmlFields.cardType()
card.number = "4100000000000000"
card.expDate = "1210"
card.type = 'VI'
authorization.card = card
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEquals("000",response.response)
def testSimpleAuthWithPaypal(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '12344'
authorization.amount = 106
authorization.orderSource = 'ecommerce'
paypal = litleXmlFields.payPal()
paypal.payerId = "1234"
paypal.token = "1234"
paypal.transactionId = '123456'
authorization.paypal = paypal
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEquals("Approved",response.message)
def testSimpleAuthWithSecondaryAmountAndApplepay(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '1234'
authorization.amount = 110
authorization.orderSource = 'ecommerce'
authorization.secondaryAmount = '10'
applepay = litleXmlFields.applepayType()
applepay.data = "4100000000000000"
applepay.signature = "sign"
applepay.version = '1'
header=litleXmlFields.applepayHeaderType()
header.applicationData='e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
header.ephemeralPublicKey ='e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
header.publicKeyHash='e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
header.transactionId='1024'
applepay.header=header
authorization.applepay = applepay
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEquals("Insufficient Funds",response.message)
self.assertEquals(110,response.applepayResponse.transactionAmount)
def testPosWithoutCapabilityAndEntryMode(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '123456'
authorization.amount = 106
authorization.orderSource = 'ecommerce'
pos = litleXmlFields.pos()
pos.cardholderId = "pin"
authorization.pos = pos
card = litleXmlFields.cardType()
card.number = "4100000000000002"
card.expDate = "1210"
card.type = 'VI'
card.cardValidationNum = '1213'
authorization.card = card
litle = litleOnlineRequest(config)
with self.assertRaises(Exception):
litle.sendRequest(authorization)
def testAccountUpdate(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '12344'
authorization.amount = 106
authorization.orderSource = 'ecommerce'
card = litleXmlFields.cardType()
card.number = "4100100000000000"
card.expDate = "1210"
card.type = 'VI'
card.cardValidationNum = '1213'
authorization.card = card
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEquals("4100100000000000",response.accountUpdater.originalCardInfo.number)
def testTrackData(self):
authorization = litleXmlFields.authorization()
authorization.id = 'AX54321678'
authorization.reportGroup = 'RG27'
authorization.orderId = '12z58743y1'
authorization.amount = 12522
authorization.orderSource = 'retail'
billToAddress = litleXmlFields.contact()
billToAddress.zip = '95032'
authorization.billToAddress = billToAddress
card = litleXmlFields.cardType()
card.track = "%B40000001^Doe/JohnP^06041...?;40001=0604101064200?"
authorization.card = card
pos = litleXmlFields.pos()
pos.capability = 'magstripe'
pos.entryMode = 'completeread'
pos.cardholderId = 'signature'
authorization.pos = pos
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEquals('Approved', response.message)
def testListOfTaxAmounts(self):
authorization = litleXmlFields.authorization()
authorization.id = '12345'
authorization.reportGroup = 'Default'
authorization.orderId = '67890'
authorization.amount = 10000
authorization.orderSource = 'ecommerce'
enhanced = litleXmlFields.enhancedData()
dt1 = litleXmlFields.detailTax()
dt1.taxAmount = 100
enhanced.detailTax.append(dt1)
dt2 = litleXmlFields.detailTax()
dt2.taxAmount = 200
enhanced.detailTax.append(dt2)
authorization.enhancedData = enhanced
card = litleXmlFields.cardType()
card.number = '4100000000000001'
card.expDate = '1215'
card.type = 'VI'
authorization.card = card
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEquals('Approved', response.message)
def suite():
suite = unittest.TestSuite()
suite = unittest.TestLoader().loadTestsFromTestCase(TestAuth)
return suite
if __name__ =='__main__':
unittest.main() | Rediker-Software/litle-sdk-for-python | litleSdkPythonTest/functional/TestAuth.py | Python | mit | 7,012 | 0.011266 |
import unittest
from probe import SearchApi, CompanyApi
class TestProbeAPI(unittest.TestCase):
def test_search_company_loanzen(self):
api = SearchApi()
companies = api.companies_get('1.1', filters='{"nameStartsWith": "loanzen"}')
#print type(companies.data), companies.data.companies
self.assertFalse(len(companies.data.companies) == 0)
def test_search_authorized_signatory(self):
api = SearchApi()
directors = api.authorized_signatories_get('1.1', filters='{"pan": "ANQPK6045G"}')
#print directors.data.authorized_signatories
self.assertFalse(len(directors.data.authorized_signatories) == 0)
def test_get_company_details_empty(self):
api = CompanyApi()
company = api.companies_cin_get('1.1', 'U24239DL2002PTC114413')
#print company.data.company
self.assertEquals(company.data.company.cin, 'U24239DL2002PTC114413')
def test_get_company_authorized_signatories(self):
api = CompanyApi()
signatories = api.companies_cin_authorized_signatories_get('1.1', 'U24239DL2002PTC114413')
#print signatories.data.authorized_signatories
self.assertFalse(len(signatories.data.authorized_signatories) == 0)
def test_get_company_charges(self):
api = CompanyApi()
charges = api.companies_cin_charges_get('1.1', 'U24239DL2002PTC114413')
#print charges.data.charges
self.assertFalse(len(charges.data.charges) == 0)
def test_get_company_financials(self):
api = CompanyApi()
financials = api.companies_cin_financials_get('1.1', 'U24239DL2002PTC114413')
print financials.data.financials
if __name__ == '__main__':
unittest.main() | loanzen/probe-py | probe/tests.py | Python | mit | 1,727 | 0.006369 |
"""
InaSAFE Disaster risk assessment tool developed by AusAid -
**GUI Keywords Dialog.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
.. todo:: Check raster is single band
"""
__author__ = 'tim@linfiniti.com'
__version__ = '0.5.0'
__revision__ = '$Format:%H$'
__date__ = '21/02/2011'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import pyqtSignature
from odict import OrderedDict
from safe_qgis.keywords_dialog_base import Ui_KeywordsDialogBase
from safe_qgis.keyword_io import KeywordIO
from safe_qgis.help import Help
from safe_qgis.utilities import getExceptionWithStacktrace
from safe_qgis.exceptions import InvalidParameterException
from safe.common.exceptions import InaSAFEError
# Don't remove this even if it is flagged as unused by your ide
# it is needed for qrc:/ url resolution. See Qt Resources docs.
import safe_qgis.resources # pylint: disable=W0611
#see if we can import pydev - see development docs for details
try:
from pydevd import * # pylint: disable=F0401
print 'Remote debugging is enabled.'
DEBUG = True
except ImportError:
print 'Debugging was disabled'
class KeywordsDialog(QtGui.QDialog, Ui_KeywordsDialogBase):
"""Dialog implementation class for the Risk In A Box keywords editor."""
def __init__(self, parent, iface, theDock=None):
"""Constructor for the dialog.
.. note:: In QtDesigner the advanced editor's predefined keywords
list should be shown in english always, so when adding entries to
cboKeyword, be sure to choose :safe_qgis:`Properties<<` and untick
the :safe_qgis:`translatable` property.
Args:
* parent - parent widget of this dialog
* iface - a Quantum GIS QGisAppInterface instance.
* theDock - Optional dock widget instance that we can notify of
changes to the keywords.
Returns:
not applicable
Raises:
no exceptions explicitly raised
"""
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
self.setWindowTitle(self.tr(
'InaSAFE %s Keywords Editor' % __version__))
self.keywordIO = KeywordIO()
# note the keys should remain untranslated as we need to write
# english to the keywords file. The keys will be written as user data
# in the combo entries.
# .. seealso:: http://www.voidspace.org.uk/python/odict.html
self.standardExposureList = OrderedDict([('population [density]',
self.tr('population [density]')),
('population [count]',
self.tr('population [count]')),
('building',
self.tr('building')),
('building [osm]',
self.tr('building [osm]')),
('building [sigab]',
self.tr('building [sigab]')),
('roads',
self.tr('roads'))])
self.standardHazardList = OrderedDict([('earthquake [MMI]',
self.tr('earthquake [MMI]')),
('tsunami [m]',
self.tr('tsunami [m]')),
('tsunami [wet/dry]',
self.tr('tsunami [wet/dry]')),
('tsunami [feet]',
self.tr('tsunami [feet]')),
('flood [m]',
self.tr('flood [m]')),
('flood [wet/dry]',
self.tr('flood [wet/dry]')),
('flood [feet]', self.tr('flood [feet]')),
('tephra [kg2/m2',
self.tr('tephra [kg2/m2]'))])
# Save reference to the QGIS interface and parent
self.iface = iface
self.parent = parent
self.dock = theDock
# Set up things for context help
myButton = self.buttonBox.button(QtGui.QDialogButtonBox.Help)
QtCore.QObject.connect(myButton, QtCore.SIGNAL('clicked()'),
self.showHelp)
self.helpDialog = None
# set some inital ui state:
self.pbnAdvanced.setChecked(True)
self.pbnAdvanced.toggle()
self.radPredefined.setChecked(True)
self.adjustSize()
#myButton = self.buttonBox.button(QtGui.QDialogButtonBox.Ok)
#myButton.setEnabled(False)
self.layer = self.iface.activeLayer()
if self.layer:
self.loadStateFromKeywords()
def showHelp(self):
"""Load the help text for the keywords safe_qgis"""
if not self.helpDialog:
self.helpDialog = Help(self.iface.mainWindow(), 'keywords')
self.helpDialog.show()
# prevents actions being handled twice
@pyqtSignature('bool')
def on_pbnAdvanced_toggled(self, theFlag):
"""Automatic slot executed when the advanced button is toggled.
.. note:: some of the behaviour for hiding widgets is done using
the signal/slot editor in designer, so if you are trying to figure
out how the interactions work, look there too!
Args:
theFlag - boolean indicating the new checked state of the button
Returns:
None.
Raises:
no exceptions explicitly raised."""
if theFlag:
self.pbnAdvanced.setText(self.tr('Hide advanced editor'))
else:
self.pbnAdvanced.setText(self.tr('Show advanced editor'))
self.adjustSize()
# prevents actions being handled twice
@pyqtSignature('bool')
def on_radHazard_toggled(self, theFlag):
"""Automatic slot executed when the hazard radio is toggled.
Args:
theFlag - boolean indicating the new checked state of the button
Returns:
None.
Raises:
no exceptions explicitly raised."""
if not theFlag:
return
self.setCategory('hazard')
self.updateControlsFromList()
# prevents actions being handled twice
@pyqtSignature('bool')
def on_radExposure_toggled(self, theFlag):
"""Automatic slot executed when the hazard radio is toggled on.
Args:
theFlag - boolean indicating the new checked state of the button
Returns:
None.
Raises:
no exceptions explicitly raised."""
if not theFlag:
return
self.setCategory('exposure')
self.updateControlsFromList()
# prevents actions being handled twice
@pyqtSignature('int')
def on_cboSubcategory_currentIndexChanged(self, theIndex=None):
"""Automatic slot executed when the subcategory is changed.
When the user changes the subcategory, we will extract the
subcategory and dataype or unit (depending on if it is a hazard
or exposure subcategory) from the [] after the name.
Args:
None
Returns:
None.
Raises:
no exceptions explicitly raised."""
del theIndex
myItem = self.cboSubcategory.itemData(
self.cboSubcategory.currentIndex()).toString()
myText = str(myItem)
if myText == self.tr('Not Set'):
self.removeItemByKey('subcategory')
return
myTokens = myText.split(' ')
if len(myTokens) < 1:
self.removeItemByKey('subcategory')
return
mySubcategory = myTokens[0]
self.addListEntry('subcategory', mySubcategory)
# Some subcategories e.g. roads have no units or datatype
if len(myTokens) == 1:
return
myCategory = self.getValueForKey('category')
if 'hazard' == myCategory:
myUnits = myTokens[1].replace('[', '').replace(']', '')
self.addListEntry('unit', myUnits)
if 'exposure' == myCategory:
myDataType = myTokens[1].replace('[', '').replace(']', '')
self.addListEntry('datatype', myDataType)
# prevents actions being handled twice
def setSubcategoryList(self, theEntries, theSelectedItem=None):
"""Helper to populate the subcategory list based on category context.
Args:
* theEntries - an OrderedDict of subcategories. The dict entries
should be ('earthquake', self.tr('earthquake')). See
http://www.voidspace.org.uk/python/odict.html for info on
OrderedDict.
* theSelectedItem - optional parameter indicating which item
should be selected in the combo. If the selected item is not
in theList, it will be appended to it.
Returns:
None.
Raises:
no exceptions explicitly raised.
"""
# To aoid triggering on_cboSubcategory_currentIndexChanged
# we block signals from the combo while updating it
self.cboSubcategory.blockSignals(True)
self.cboSubcategory.clear()
if (theSelectedItem is not None and
theSelectedItem not in theEntries.values() and
theSelectedItem not in theEntries.keys()):
# Add it to the OrderedList
theEntries[theSelectedItem] = theSelectedItem
myIndex = 0
mySelectedIndex = 0
for myKey, myValue in theEntries.iteritems():
if (myValue == theSelectedItem or
myKey == theSelectedItem):
mySelectedIndex = myIndex
myIndex += 1
self.cboSubcategory.addItem(myValue, myKey)
self.cboSubcategory.setCurrentIndex(mySelectedIndex)
self.cboSubcategory.blockSignals(False)
# prevents actions being handled twice
@pyqtSignature('')
def on_pbnAddToList1_clicked(self):
"""Automatic slot executed when the pbnAddToList1 button is pressed.
Args:
None
Returns:
None.
Raises:
no exceptions explicitly raised."""
myCurrentKey = self.tr(self.cboKeyword.currentText())
myCurrentValue = self.lePredefinedValue.text()
self.addListEntry(myCurrentKey, myCurrentValue)
self.updateControlsFromList()
# prevents actions being handled twice
@pyqtSignature('')
def on_pbnAddToList2_clicked(self):
"""Automatic slot executed when the pbnAddToList2 button is pressed.
Args:
None
Returns:
None.
Raises:
no exceptions explicitly raised."""
myCurrentKey = self.leKey.text()
myCurrentValue = self.leValue.text()
if myCurrentKey == 'category' and myCurrentValue == 'hazard':
self.radHazard.blockSignals(True)
self.radHazard.setChecked(True)
self.setSubcategoryList(self.standardHazardList)
self.radHazard.blockSignals(False)
elif myCurrentKey == 'category' and myCurrentValue == 'exposure':
self.radExposure.blockSignals(True)
self.radExposure.setChecked(True)
self.setSubcategoryList(self.standardExposureList)
self.radExposure.blockSignals(False)
elif myCurrentKey == 'category':
#.. todo:: notify the user their category is invalid
pass
self.addListEntry(myCurrentKey, myCurrentValue)
self.updateControlsFromList()
# prevents actions being handled twice
@pyqtSignature('')
def on_pbnRemove_clicked(self):
"""Automatic slot executed when the pbnRemove button is pressed.
It will remove any selected items in the keywords list.
Args:
None
Returns:
None.
Raises:
no exceptions explicitly raised."""
for myItem in self.lstKeywords.selectedItems():
self.lstKeywords.takeItem(self.lstKeywords.row(myItem))
self.updateControlsFromList()
def addListEntry(self, theKey, theValue):
"""Add an item to the keywords list given its key/value.
The key and value must both be valid, non empty strings
or an InvalidKVPException will be raised.
If an entry with the same key exists, it's value will be
replaced with theValue.
It will add the current key/value pair to the list if it is not
already present. The kvp will also be stored in the data of the
listwidgetitem as a simple string delimited with a bar ('|').
Args:
* theKey - string representing the key part of the key
value pair (kvp)
* theValue - string representing the value part of the key
value pair (kvp)
Returns:
None.
Raises:
no exceptions explicitly raised."""
if theKey is None or theKey == '':
return
if theValue is None or theValue == '':
return
myMessage = ''
if ':' in theKey:
theKey = theKey.replace(':', '.')
myMessage = self.tr('Colons are not allowed, replaced with "."')
if ':' in theValue:
theValue = theValue.replace(':', '.')
myMessage = self.tr('Colons are not allowed, replaced with "."')
if myMessage == '':
self.lblMessage.setText('')
self.lblMessage.hide()
else:
self.lblMessage.setText(myMessage)
self.lblMessage.show()
myItem = QtGui.QListWidgetItem(theKey + ':' + theValue)
# we are going to replace, so remove it if it exists already
self.removeItemByKey(theKey)
myData = theKey + '|' + theValue
myItem.setData(QtCore.Qt.UserRole, myData)
self.lstKeywords.insertItem(0, myItem)
def setCategory(self, theCategory):
"""Set the category radio button based on theCategory.
Args:
theCategory - a string which must be either 'hazard' or 'exposure'.
Returns:
False if the radio button could not be updated
Raises:
no exceptions explicitly raised."""
# convert from QString if needed
myCategory = str(theCategory)
if self.getValueForKey('category') == myCategory:
#nothing to do, go home
return True
if myCategory not in ['hazard', 'exposure']:
# .. todo:: report an error to the user
return False
# Special case when category changes, we start on a new slate!
if myCategory == 'hazard':
# only cause a toggle if we actually changed the category
# This will only really be apparent if user manually enters
# category as a keyword
self.reset()
self.radHazard.blockSignals(True)
self.radHazard.setChecked(True)
self.radHazard.blockSignals(False)
self.removeItemByKey('subcategory')
self.removeItemByKey('datatype')
self.addListEntry('category', 'hazard')
myList = self.standardHazardList
self.setSubcategoryList(myList)
else:
self.reset()
self.radExposure.blockSignals(True)
self.radExposure.setChecked(True)
self.radExposure.blockSignals(False)
self.removeItemByKey('subcategory')
self.removeItemByKey('unit')
self.addListEntry('category', 'exposure')
myList = self.standardExposureList
self.setSubcategoryList(myList)
return True
def reset(self, thePrimaryKeywordsOnlyFlag=True):
"""Reset all controls to a blank state.
Args:
thePrimaryKeywordsOnlyFlag - if True (the default), only
reset Subcategory, datatype and units.
Returns:
None
Raises:
no exceptions explicitly raised."""
self.cboSubcategory.clear()
self.removeItemByKey('subcategory')
self.removeItemByKey('datatype')
self.removeItemByKey('unit')
if not thePrimaryKeywordsOnlyFlag:
# Clear everything else too
self.lstKeywords.clear()
self.leKey.clear()
self.leValue.clear()
self.lePredefinedValue.clear()
self.leTitle.clear()
def removeItemByKey(self, theKey):
"""Remove an item from the kvp list given its key.
Args:
theKey - key of item to be removed.
Returns:
None
Raises:
no exceptions explicitly raised."""
for myCounter in range(self.lstKeywords.count()):
myExistingItem = self.lstKeywords.item(myCounter)
myText = myExistingItem.text()
myTokens = myText.split(':')
if len(myTokens) < 2:
break
myKey = myTokens[0]
if myKey == theKey:
# remove it since the key is already present
self.lstKeywords.takeItem(myCounter)
break
def removeItemByValue(self, theValue):
"""Remove an item from the kvp list given its key.
Args:
theValue - value of item to be removed.
Returns:
None
Raises:
no exceptions explicitly raised."""
for myCounter in range(self.lstKeywords.count()):
myExistingItem = self.lstKeywords.item(myCounter)
myText = myExistingItem.text()
myTokens = myText.split(':')
myValue = myTokens[1]
if myValue == theValue:
# remove it since the key is already present
self.lstKeywords.takeItem(myCounter)
break
def getValueForKey(self, theKey):
"""Check if our key list contains a specific key,
and return its value if present.
Args:
theKey- String representing the key to search for
Returns:
Value of key if matched otherwise none
Raises:
no exceptions explicitly raised."""
for myCounter in range(self.lstKeywords.count()):
myExistingItem = self.lstKeywords.item(myCounter)
myText = myExistingItem.text()
myTokens = myText.split(':')
myKey = str(myTokens[0]).strip()
myValue = str(myTokens[1]).strip()
if myKey == theKey:
return myValue
return None
def loadStateFromKeywords(self):
"""Set the ui state to match the keywords of the
currently active layer.
Args:
None
Returns:
None.
Raises:
no exceptions explicitly raised."""
try:
myKeywords = self.keywordIO.readKeywords(self.layer)
except InvalidParameterException:
# layer has no keywords file so just start with a blank slate
# so that subcategory gets populated nicely & we will assume
# exposure to start with
myKeywords = {'category': 'exposure'}
myLayerName = self.layer.name()
if 'title' not in myKeywords:
self.leTitle.setText(myLayerName)
self.lblLayerName.setText(myLayerName)
#if we have a category key, unpack it first so radio button etc get set
if 'category' in myKeywords:
self.setCategory(myKeywords['category'])
myKeywords.pop('category')
for myKey in myKeywords.iterkeys():
self.addListEntry(myKey, myKeywords[myKey])
# now make the rest of the safe_qgis reflect the list entries
self.updateControlsFromList()
def updateControlsFromList(self):
"""Set the ui state to match the keywords of the
currently active layer.
Args:
None
Returns:
None.
Raises:
no exceptions explicitly raised."""
mySubcategory = self.getValueForKey('subcategory')
myUnits = self.getValueForKey('unit')
myType = self.getValueForKey('datatype')
myTitle = self.getValueForKey('title')
if myTitle is not None:
self.leTitle.setText(myTitle)
elif self.layer is not None:
myLayerName = self.layer.name()
self.lblLayerName.setText(myLayerName)
else:
self.lblLayerName.setText('')
if self.radExposure.isChecked():
if mySubcategory is not None and myType is not None:
self.setSubcategoryList(self.standardExposureList,
mySubcategory + ' [' + myType + ']')
elif mySubcategory is not None:
self.setSubcategoryList(self.standardExposureList,
mySubcategory)
else:
self.setSubcategoryList(self.standardExposureList,
self.tr('Not Set'))
else:
if mySubcategory is not None and myUnits is not None:
self.setSubcategoryList(self.standardHazardList,
mySubcategory + ' [' + myUnits + ']')
elif mySubcategory is not None:
self.setSubcategoryList(self.standardHazardList,
mySubcategory)
else:
self.setSubcategoryList(self.standardHazardList,
self.tr('Not Set'))
# prevents actions being handled twice
@pyqtSignature('QString')
def on_leTitle_textEdited(self, theText):
"""Update the keywords list whenver the user changes the title.
This slot is not called is the title is changed programmatically.
Args:
None
Returns:
dict - a dictionary of keyword reflecting the state of the dialog.
Raises:
no exceptions explicitly raised."""
self.addListEntry('title', str(theText))
def getKeywords(self):
"""Obtain the state of the dialog as a keywords dict
Args:
None
Returns:
dict - a dictionary of keyword reflecting the state of the dialog.
Raises:
no exceptions explicitly raised."""
#make sure title is listed
if str(self.leTitle.text()) != '':
self.addListEntry('title', str(self.leTitle.text()))
myKeywords = {}
for myCounter in range(self.lstKeywords.count()):
myExistingItem = self.lstKeywords.item(myCounter)
myText = myExistingItem.text()
myTokens = myText.split(':')
myKey = str(myTokens[0]).strip()
myValue = str(myTokens[1]).strip()
myKeywords[myKey] = myValue
return myKeywords
def accept(self):
"""Automatic slot executed when the ok button is pressed.
It will write out the keywords for the layer that is active.
Args:
None
Returns:
None.
Raises:
no exceptions explicitly raised."""
myKeywords = self.getKeywords()
try:
self.keywordIO.writeKeywords(self.layer, myKeywords)
except InaSAFEError, e:
QtGui.QMessageBox.warning(self, self.tr('InaSAFE'),
((self.tr('An error was encountered when saving the keywords:\n'
'%s' % str(getExceptionWithStacktrace(e))))))
if self.dock is not None:
self.dock.getLayers()
self.close()
| ingenieroariel/inasafe | safe_qgis/keywords_dialog.py | Python | gpl-3.0 | 24,200 | 0.000579 |
#------------------------------------------------------------------------------
# Copyright (c) 2007, Riverbank Computing Limited
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD license.
# However, when used with the GPL version of PyQt the additional terms described in the PyQt GPL exception also apply
#
# Author: Riverbank Computing Limited
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
# Standard library imports.
import sys
# Major package imports.
from pyface.qt import QtCore, QtGui
# Enthought library imports.
from traits.api import Bool, Event, provides, Unicode
# Local imports.
from pyface.i_python_editor import IPythonEditor, MPythonEditor
from pyface.key_pressed_event import KeyPressedEvent
from pyface.widget import Widget
from pyface.ui.qt4.code_editor.code_widget import AdvancedCodeWidget
@provides(IPythonEditor)
class PythonEditor(MPythonEditor, Widget):
""" The toolkit specific implementation of a PythonEditor. See the
IPythonEditor interface for the API documentation.
"""
#### 'IPythonEditor' interface ############################################
dirty = Bool(False)
path = Unicode
show_line_numbers = Bool(True)
#### Events ####
changed = Event
key_pressed = Event(KeyPressedEvent)
###########################################################################
# 'object' interface.
###########################################################################
def __init__(self, parent, **traits):
super(PythonEditor, self).__init__(**traits)
self.control = self._create_control(parent)
###########################################################################
# 'PythonEditor' interface.
###########################################################################
def load(self, path=None):
""" Loads the contents of the editor.
"""
if path is None:
path = self.path
# We will have no path for a new script.
if len(path) > 0:
f = open(self.path, 'r')
text = f.read()
f.close()
else:
text = ''
self.control.code.setPlainText(text)
self.dirty = False
def save(self, path=None):
""" Saves the contents of the editor.
"""
if path is None:
path = self.path
f = open(path, 'w')
f.write(self.control.code.toPlainText())
f.close()
self.dirty = False
def select_line(self, lineno):
""" Selects the specified line.
"""
self.control.code.set_line_column(lineno, 0)
self.control.code.moveCursor(QtGui.QTextCursor.EndOfLine,
QtGui.QTextCursor.KeepAnchor)
###########################################################################
# Trait handlers.
###########################################################################
def _path_changed(self):
self._changed_path()
def _show_line_numbers_changed(self):
if self.control is not None:
self.control.code.line_number_widget.setVisible(
self.show_line_numbers)
self.control.code.update_line_number_width()
###########################################################################
# Private interface.
###########################################################################
def _create_control(self, parent):
""" Creates the toolkit-specific control for the widget.
"""
self.control = control = AdvancedCodeWidget(parent)
self._show_line_numbers_changed()
# Install event filter to trap key presses.
event_filter = PythonEditorEventFilter(self, self.control)
self.control.installEventFilter(event_filter)
self.control.code.installEventFilter(event_filter)
# Connect signals for text changes.
control.code.modificationChanged.connect(self._on_dirty_changed)
control.code.textChanged.connect(self._on_text_changed)
# Load the editor's contents.
self.load()
return control
def _on_dirty_changed(self, dirty):
""" Called whenever a change is made to the dirty state of the
document.
"""
self.dirty = dirty
def _on_text_changed(self):
""" Called whenever a change is made to the text of the document.
"""
self.changed = True
class PythonEditorEventFilter(QtCore.QObject):
""" A thin wrapper around the advanced code widget to handle the key_pressed
Event.
"""
def __init__(self, editor, parent):
super(PythonEditorEventFilter, self).__init__(parent)
self.__editor = editor
def eventFilter(self, obj, event):
""" Reimplemented to trap key presses.
"""
if self.__editor.control and obj == self.__editor.control and \
event.type() == QtCore.QEvent.FocusOut:
# Hack for Traits UI compatibility.
self.__editor.control.emit(QtCore.SIGNAL('lostFocus'))
elif self.__editor.control and obj == self.__editor.control.code and \
event.type() == QtCore.QEvent.KeyPress:
# Pyface doesn't seem to be Unicode aware. Only keep the key code
# if it corresponds to a single Latin1 character.
kstr = event.text()
try:
kcode = ord(str(kstr))
except:
kcode = 0
mods = event.modifiers()
self.key_pressed = KeyPressedEvent(
alt_down = ((mods & QtCore.Qt.AltModifier) ==
QtCore.Qt.AltModifier),
control_down = ((mods & QtCore.Qt.ControlModifier) ==
QtCore.Qt.ControlModifier),
shift_down = ((mods & QtCore.Qt.ShiftModifier) ==
QtCore.Qt.ShiftModifier),
key_code = kcode,
event = event)
return super(PythonEditorEventFilter, self).eventFilter(obj, event)
| geggo/pyface | pyface/ui/qt4/python_editor.py | Python | bsd-3-clause | 6,248 | 0.004001 |
# -*- coding: utf-8 -*-
###############################################################################
#
# UpdateWalk
# Updates an existing walk action.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class UpdateWalk(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the UpdateWalk Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(UpdateWalk, self).__init__(temboo_session, '/Library/Facebook/Actions/Fitness/Walks/UpdateWalk')
def new_input_set(self):
return UpdateWalkInputSet()
def _make_result_set(self, result, path):
return UpdateWalkResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return UpdateWalkChoreographyExecution(session, exec_id, path)
class UpdateWalkInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the UpdateWalk
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The access token retrieved from the final step of the OAuth process.)
"""
super(UpdateWalkInputSet, self)._set_input('AccessToken', value)
def set_ActionID(self, value):
"""
Set the value of the ActionID input for this Choreo. ((required, string) The id of the action to update.)
"""
super(UpdateWalkInputSet, self)._set_input('ActionID', value)
def set_Course(self, value):
"""
Set the value of the Course input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing the course.)
"""
super(UpdateWalkInputSet, self)._set_input('Course', value)
def set_EndTime(self, value):
"""
Set the value of the EndTime input for this Choreo. ((optional, date) The time that the user ended the action (e.g. 2013-06-24T18:53:35+0000).)
"""
super(UpdateWalkInputSet, self)._set_input('EndTime', value)
def set_ExpiresIn(self, value):
"""
Set the value of the ExpiresIn input for this Choreo. ((optional, integer) The amount of time (in milliseconds) from the publish_time that the action will expire.)
"""
super(UpdateWalkInputSet, self)._set_input('ExpiresIn', value)
def set_Message(self, value):
"""
Set the value of the Message input for this Choreo. ((optional, string) A message attached to this action. Setting this parameter requires enabling of message capabilities.)
"""
super(UpdateWalkInputSet, self)._set_input('Message', value)
def set_Place(self, value):
"""
Set the value of the Place input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing the location associated with this action.)
"""
super(UpdateWalkInputSet, self)._set_input('Place', value)
def set_Tags(self, value):
"""
Set the value of the Tags input for this Choreo. ((optional, string) A comma separated list of other profile IDs that also performed this action.)
"""
super(UpdateWalkInputSet, self)._set_input('Tags', value)
class UpdateWalkResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the UpdateWalk Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((boolean) The response from Facebook.)
"""
return self._output.get('Response', None)
class UpdateWalkChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return UpdateWalkResultSet(response, path)
| jordanemedlock/psychtruths | temboo/core/Library/Facebook/Actions/Fitness/Walks/UpdateWalk.py | Python | apache-2.0 | 4,891 | 0.005316 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 12, transform = "Quantization", sigma = 0.0, exog_count = 0, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_Quantization/trend_PolyTrend/cycle_12/ar_12/test_artificial_32_Quantization_PolyTrend_12_12_0.py | Python | bsd-3-clause | 267 | 0.086142 |
# encoding: utf-8
"""Tests for traitlets.traitlets."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#
# Adapted from enthought.traits, Copyright (c) Enthought, Inc.,
# also under the terms of the Modified BSD License.
import pickle
import re
import sys
from ._warnings import expected_warnings
from unittest import TestCase
import pytest
from pytest import mark
from traitlets import (
HasTraits, MetaHasTraits, TraitType, Any, Bool, CBytes, Dict, Enum,
Int, CInt, Long, CLong, Integer, Float, CFloat, Complex, Bytes, Unicode,
TraitError, Union, All, Undefined, Type, This, Instance, TCPAddress,
List, Tuple, ObjectName, DottedObjectName, CRegExp, link, directional_link,
ForwardDeclaredType, ForwardDeclaredInstance, validate, observe, default,
observe_compat, BaseDescriptor, HasDescriptors,
)
import six
def change_dict(*ordered_values):
change_names = ('name', 'old', 'new', 'owner', 'type')
return dict(zip(change_names, ordered_values))
#-----------------------------------------------------------------------------
# Helper classes for testing
#-----------------------------------------------------------------------------
class HasTraitsStub(HasTraits):
def notify_change(self, change):
self._notify_name = change['name']
self._notify_old = change['old']
self._notify_new = change['new']
self._notify_type = change['type']
#-----------------------------------------------------------------------------
# Test classes
#-----------------------------------------------------------------------------
class TestTraitType(TestCase):
def test_get_undefined(self):
class A(HasTraits):
a = TraitType
a = A()
with self.assertRaises(TraitError):
a.a
def test_set(self):
class A(HasTraitsStub):
a = TraitType
a = A()
a.a = 10
self.assertEqual(a.a, 10)
self.assertEqual(a._notify_name, 'a')
self.assertEqual(a._notify_old, Undefined)
self.assertEqual(a._notify_new, 10)
def test_validate(self):
class MyTT(TraitType):
def validate(self, inst, value):
return -1
class A(HasTraitsStub):
tt = MyTT
a = A()
a.tt = 10
self.assertEqual(a.tt, -1)
def test_default_validate(self):
class MyIntTT(TraitType):
def validate(self, obj, value):
if isinstance(value, int):
return value
self.error(obj, value)
class A(HasTraits):
tt = MyIntTT(10)
a = A()
self.assertEqual(a.tt, 10)
# Defaults are validated when the HasTraits is instantiated
class B(HasTraits):
tt = MyIntTT('bad default')
self.assertRaises(TraitError, B)
def test_info(self):
class A(HasTraits):
tt = TraitType
a = A()
self.assertEqual(A.tt.info(), 'any value')
def test_error(self):
class A(HasTraits):
tt = TraitType
a = A()
self.assertRaises(TraitError, A.tt.error, a, 10)
def test_deprecated_dynamic_initializer(self):
class A(HasTraits):
x = Int(10)
def _x_default(self):
return 11
class B(A):
x = Int(20)
class C(A):
def _x_default(self):
return 21
a = A()
self.assertEqual(a._trait_values, {})
self.assertEqual(a.x, 11)
self.assertEqual(a._trait_values, {'x': 11})
b = B()
self.assertEqual(b.x, 20)
self.assertEqual(b._trait_values, {'x': 20})
c = C()
self.assertEqual(c._trait_values, {})
self.assertEqual(c.x, 21)
self.assertEqual(c._trait_values, {'x': 21})
# Ensure that the base class remains unmolested when the _default
# initializer gets overridden in a subclass.
a = A()
c = C()
self.assertEqual(a._trait_values, {})
self.assertEqual(a.x, 11)
self.assertEqual(a._trait_values, {'x': 11})
def test_dynamic_initializer(self):
class A(HasTraits):
x = Int(10)
@default('x')
def _default_x(self):
return 11
class B(A):
x = Int(20)
class C(A):
@default('x')
def _default_x(self):
return 21
a = A()
self.assertEqual(a._trait_values, {})
self.assertEqual(a.x, 11)
self.assertEqual(a._trait_values, {'x': 11})
b = B()
self.assertEqual(b.x, 20)
self.assertEqual(b._trait_values, {'x': 20})
c = C()
self.assertEqual(c._trait_values, {})
self.assertEqual(c.x, 21)
self.assertEqual(c._trait_values, {'x': 21})
# Ensure that the base class remains unmolested when the _default
# initializer gets overridden in a subclass.
a = A()
c = C()
self.assertEqual(a._trait_values, {})
self.assertEqual(a.x, 11)
self.assertEqual(a._trait_values, {'x': 11})
def test_tag_metadata(self):
class MyIntTT(TraitType):
metadata = {'a': 1, 'b': 2}
a = MyIntTT(10).tag(b=3, c=4)
self.assertEqual(a.metadata, {'a': 1, 'b': 3, 'c': 4})
def test_metadata_localized_instance(self):
class MyIntTT(TraitType):
metadata = {'a': 1, 'b': 2}
a = MyIntTT(10)
b = MyIntTT(10)
a.metadata['c'] = 3
# make sure that changing a's metadata didn't change b's metadata
self.assertNotIn('c', b.metadata)
def test_union_metadata(self):
class Foo(HasTraits):
bar = (Int().tag(ta=1) | Dict().tag(ta=2, ti='b')).tag(ti='a')
foo = Foo()
# At this point, no value has been set for bar, so value-specific
# is not set.
self.assertEqual(foo.trait_metadata('bar', 'ta'), None)
self.assertEqual(foo.trait_metadata('bar', 'ti'), 'a')
foo.bar = {}
self.assertEqual(foo.trait_metadata('bar', 'ta'), 2)
self.assertEqual(foo.trait_metadata('bar', 'ti'), 'b')
foo.bar = 1
self.assertEqual(foo.trait_metadata('bar', 'ta'), 1)
self.assertEqual(foo.trait_metadata('bar', 'ti'), 'a')
def test_union_default_value(self):
class Foo(HasTraits):
bar = Union([Dict(), Int()], default_value=1)
foo = Foo()
self.assertEqual(foo.bar, 1)
def test_deprecated_metadata_access(self):
class MyIntTT(TraitType):
metadata = {'a': 1, 'b': 2}
a = MyIntTT(10)
with expected_warnings(["use the instance .metadata dictionary directly"]*2):
a.set_metadata('key', 'value')
v = a.get_metadata('key')
self.assertEqual(v, 'value')
with expected_warnings(["use the instance .help string directly"]*2):
a.set_metadata('help', 'some help')
v = a.get_metadata('help')
self.assertEqual(v, 'some help')
def test_trait_types_deprecated(self):
with expected_warnings(["Traits should be given as instances"]):
class C(HasTraits):
t = Int
def test_trait_types_list_deprecated(self):
with expected_warnings(["Traits should be given as instances"]):
class C(HasTraits):
t = List(Int)
def test_trait_types_tuple_deprecated(self):
with expected_warnings(["Traits should be given as instances"]):
class C(HasTraits):
t = Tuple(Int)
def test_trait_types_dict_deprecated(self):
with expected_warnings(["Traits should be given as instances"]):
class C(HasTraits):
t = Dict(Int)
class TestHasDescriptorsMeta(TestCase):
def test_metaclass(self):
self.assertEqual(type(HasTraits), MetaHasTraits)
class A(HasTraits):
a = Int()
a = A()
self.assertEqual(type(a.__class__), MetaHasTraits)
self.assertEqual(a.a,0)
a.a = 10
self.assertEqual(a.a,10)
class B(HasTraits):
b = Int()
b = B()
self.assertEqual(b.b,0)
b.b = 10
self.assertEqual(b.b,10)
class C(HasTraits):
c = Int(30)
c = C()
self.assertEqual(c.c,30)
c.c = 10
self.assertEqual(c.c,10)
def test_this_class(self):
class A(HasTraits):
t = This()
tt = This()
class B(A):
tt = This()
ttt = This()
self.assertEqual(A.t.this_class, A)
self.assertEqual(B.t.this_class, A)
self.assertEqual(B.tt.this_class, B)
self.assertEqual(B.ttt.this_class, B)
class TestHasDescriptors(TestCase):
def test_setup_instance(self):
class FooDescriptor(BaseDescriptor):
def instance_init(self, inst):
foo = inst.foo # instance should have the attr
class HasFooDescriptors(HasDescriptors):
fd = FooDescriptor()
def setup_instance(self, *args, **kwargs):
self.foo = kwargs.get('foo', None)
super(HasFooDescriptors, self).setup_instance(*args, **kwargs)
hfd = HasFooDescriptors(foo='bar')
class TestHasTraitsNotify(TestCase):
def setUp(self):
self._notify1 = []
self._notify2 = []
def notify1(self, name, old, new):
self._notify1.append((name, old, new))
def notify2(self, name, old, new):
self._notify2.append((name, old, new))
def test_notify_all(self):
class A(HasTraits):
a = Int()
b = Float()
a = A()
a.on_trait_change(self.notify1)
a.a = 0
self.assertEqual(len(self._notify1),0)
a.b = 0.0
self.assertEqual(len(self._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in self._notify1)
a.b = 10.0
self.assertTrue(('b',0.0,10.0) in self._notify1)
self.assertRaises(TraitError,setattr,a,'a','bad string')
self.assertRaises(TraitError,setattr,a,'b','bad string')
self._notify1 = []
a.on_trait_change(self.notify1,remove=True)
a.a = 20
a.b = 20.0
self.assertEqual(len(self._notify1),0)
def test_notify_one(self):
class A(HasTraits):
a = Int()
b = Float()
a = A()
a.on_trait_change(self.notify1, 'a')
a.a = 0
self.assertEqual(len(self._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in self._notify1)
self.assertRaises(TraitError,setattr,a,'a','bad string')
def test_subclass(self):
class A(HasTraits):
a = Int()
class B(A):
b = Float()
b = B()
self.assertEqual(b.a,0)
self.assertEqual(b.b,0.0)
b.a = 100
b.b = 100.0
self.assertEqual(b.a,100)
self.assertEqual(b.b,100.0)
def test_notify_subclass(self):
class A(HasTraits):
a = Int()
class B(A):
b = Float()
b = B()
b.on_trait_change(self.notify1, 'a')
b.on_trait_change(self.notify2, 'b')
b.a = 0
b.b = 0.0
self.assertEqual(len(self._notify1),0)
self.assertEqual(len(self._notify2),0)
b.a = 10
b.b = 10.0
self.assertTrue(('a',0,10) in self._notify1)
self.assertTrue(('b',0.0,10.0) in self._notify2)
def test_static_notify(self):
class A(HasTraits):
a = Int()
_notify1 = []
def _a_changed(self, name, old, new):
self._notify1.append((name, old, new))
a = A()
a.a = 0
# This is broken!!!
self.assertEqual(len(a._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in a._notify1)
class B(A):
b = Float()
_notify2 = []
def _b_changed(self, name, old, new):
self._notify2.append((name, old, new))
b = B()
b.a = 10
b.b = 10.0
self.assertTrue(('a',0,10) in b._notify1)
self.assertTrue(('b',0.0,10.0) in b._notify2)
def test_notify_args(self):
def callback0():
self.cb = ()
def callback1(name):
self.cb = (name,)
def callback2(name, new):
self.cb = (name, new)
def callback3(name, old, new):
self.cb = (name, old, new)
def callback4(name, old, new, obj):
self.cb = (name, old, new, obj)
class A(HasTraits):
a = Int()
a = A()
a.on_trait_change(callback0, 'a')
a.a = 10
self.assertEqual(self.cb,())
a.on_trait_change(callback0, 'a', remove=True)
a.on_trait_change(callback1, 'a')
a.a = 100
self.assertEqual(self.cb,('a',))
a.on_trait_change(callback1, 'a', remove=True)
a.on_trait_change(callback2, 'a')
a.a = 1000
self.assertEqual(self.cb,('a',1000))
a.on_trait_change(callback2, 'a', remove=True)
a.on_trait_change(callback3, 'a')
a.a = 10000
self.assertEqual(self.cb,('a',1000,10000))
a.on_trait_change(callback3, 'a', remove=True)
a.on_trait_change(callback4, 'a')
a.a = 100000
self.assertEqual(self.cb,('a',10000,100000,a))
self.assertEqual(len(a._trait_notifiers['a']['change']), 1)
a.on_trait_change(callback4, 'a', remove=True)
self.assertEqual(len(a._trait_notifiers['a']['change']), 0)
def test_notify_only_once(self):
class A(HasTraits):
listen_to = ['a']
a = Int(0)
b = 0
def __init__(self, **kwargs):
super(A, self).__init__(**kwargs)
self.on_trait_change(self.listener1, ['a'])
def listener1(self, name, old, new):
self.b += 1
class B(A):
c = 0
d = 0
def __init__(self, **kwargs):
super(B, self).__init__(**kwargs)
self.on_trait_change(self.listener2)
def listener2(self, name, old, new):
self.c += 1
def _a_changed(self, name, old, new):
self.d += 1
b = B()
b.a += 1
self.assertEqual(b.b, b.c)
self.assertEqual(b.b, b.d)
b.a += 1
self.assertEqual(b.b, b.c)
self.assertEqual(b.b, b.d)
class TestObserveDecorator(TestCase):
def setUp(self):
self._notify1 = []
self._notify2 = []
def notify1(self, change):
self._notify1.append(change)
def notify2(self, change):
self._notify2.append(change)
def test_notify_all(self):
class A(HasTraits):
a = Int()
b = Float()
a = A()
a.observe(self.notify1)
a.a = 0
self.assertEqual(len(self._notify1),0)
a.b = 0.0
self.assertEqual(len(self._notify1),0)
a.a = 10
change = change_dict('a', 0, 10, a, 'change')
self.assertTrue(change in self._notify1)
a.b = 10.0
change = change_dict('b', 0.0, 10.0, a, 'change')
self.assertTrue(change in self._notify1)
self.assertRaises(TraitError,setattr,a,'a','bad string')
self.assertRaises(TraitError,setattr,a,'b','bad string')
self._notify1 = []
a.unobserve(self.notify1)
a.a = 20
a.b = 20.0
self.assertEqual(len(self._notify1),0)
def test_notify_one(self):
class A(HasTraits):
a = Int()
b = Float()
a = A()
a.observe(self.notify1, 'a')
a.a = 0
self.assertEqual(len(self._notify1),0)
a.a = 10
change = change_dict('a', 0, 10, a, 'change')
self.assertTrue(change in self._notify1)
self.assertRaises(TraitError,setattr,a,'a','bad string')
def test_subclass(self):
class A(HasTraits):
a = Int()
class B(A):
b = Float()
b = B()
self.assertEqual(b.a,0)
self.assertEqual(b.b,0.0)
b.a = 100
b.b = 100.0
self.assertEqual(b.a,100)
self.assertEqual(b.b,100.0)
def test_notify_subclass(self):
class A(HasTraits):
a = Int()
class B(A):
b = Float()
b = B()
b.observe(self.notify1, 'a')
b.observe(self.notify2, 'b')
b.a = 0
b.b = 0.0
self.assertEqual(len(self._notify1),0)
self.assertEqual(len(self._notify2),0)
b.a = 10
b.b = 10.0
change = change_dict('a', 0, 10, b, 'change')
self.assertTrue(change in self._notify1)
change = change_dict('b', 0.0, 10.0, b, 'change')
self.assertTrue(change in self._notify2)
def test_static_notify(self):
class A(HasTraits):
a = Int()
b = Int()
_notify1 = []
_notify_any = []
@observe('a')
def _a_changed(self, change):
self._notify1.append(change)
@observe(All)
def _any_changed(self, change):
self._notify_any.append(change)
a = A()
a.a = 0
self.assertEqual(len(a._notify1),0)
a.a = 10
change = change_dict('a', 0, 10, a, 'change')
self.assertTrue(change in a._notify1)
a.b = 1
self.assertEqual(len(a._notify_any), 2)
change = change_dict('b', 0, 1, a, 'change')
self.assertTrue(change in a._notify_any)
class B(A):
b = Float()
_notify2 = []
@observe('b')
def _b_changed(self, change):
self._notify2.append(change)
b = B()
b.a = 10
b.b = 10.0
change = change_dict('a', 0, 10, b, 'change')
self.assertTrue(change in b._notify1)
change = change_dict('b', 0.0, 10.0, b, 'change')
self.assertTrue(change in b._notify2)
def test_notify_args(self):
def callback0():
self.cb = ()
def callback1(change):
self.cb = change
class A(HasTraits):
a = Int()
a = A()
a.on_trait_change(callback0, 'a')
a.a = 10
self.assertEqual(self.cb,())
a.unobserve(callback0, 'a')
a.observe(callback1, 'a')
a.a = 100
change = change_dict('a', 10, 100, a, 'change')
self.assertEqual(self.cb, change)
self.assertEqual(len(a._trait_notifiers['a']['change']), 1)
a.unobserve(callback1, 'a')
self.assertEqual(len(a._trait_notifiers['a']['change']), 0)
def test_notify_only_once(self):
class A(HasTraits):
listen_to = ['a']
a = Int(0)
b = 0
def __init__(self, **kwargs):
super(A, self).__init__(**kwargs)
self.observe(self.listener1, ['a'])
def listener1(self, change):
self.b += 1
class B(A):
c = 0
d = 0
def __init__(self, **kwargs):
super(B, self).__init__(**kwargs)
self.observe(self.listener2)
def listener2(self, change):
self.c += 1
@observe('a')
def _a_changed(self, change):
self.d += 1
b = B()
b.a += 1
self.assertEqual(b.b, b.c)
self.assertEqual(b.b, b.d)
b.a += 1
self.assertEqual(b.b, b.c)
self.assertEqual(b.b, b.d)
class TestHasTraits(TestCase):
def test_trait_names(self):
class A(HasTraits):
i = Int()
f = Float()
a = A()
self.assertEqual(sorted(a.trait_names()),['f','i'])
self.assertEqual(sorted(A.class_trait_names()),['f','i'])
self.assertTrue(a.has_trait('f'))
self.assertFalse(a.has_trait('g'))
def test_trait_metadata_deprecated(self):
with expected_warnings(['metadata should be set using the \.tag\(\) method']):
class A(HasTraits):
i = Int(config_key='MY_VALUE')
a = A()
self.assertEqual(a.trait_metadata('i','config_key'), 'MY_VALUE')
def test_trait_metadata(self):
class A(HasTraits):
i = Int().tag(config_key='MY_VALUE')
a = A()
self.assertEqual(a.trait_metadata('i','config_key'), 'MY_VALUE')
def test_trait_metadata_default(self):
class A(HasTraits):
i = Int()
a = A()
self.assertEqual(a.trait_metadata('i', 'config_key'), None)
self.assertEqual(a.trait_metadata('i', 'config_key', 'default'), 'default')
def test_traits(self):
class A(HasTraits):
i = Int()
f = Float()
a = A()
self.assertEqual(a.traits(), dict(i=A.i, f=A.f))
self.assertEqual(A.class_traits(), dict(i=A.i, f=A.f))
def test_traits_metadata(self):
class A(HasTraits):
i = Int().tag(config_key='VALUE1', other_thing='VALUE2')
f = Float().tag(config_key='VALUE3', other_thing='VALUE2')
j = Int(0)
a = A()
self.assertEqual(a.traits(), dict(i=A.i, f=A.f, j=A.j))
traits = a.traits(config_key='VALUE1', other_thing='VALUE2')
self.assertEqual(traits, dict(i=A.i))
# This passes, but it shouldn't because I am replicating a bug in
# traits.
traits = a.traits(config_key=lambda v: True)
self.assertEqual(traits, dict(i=A.i, f=A.f, j=A.j))
def test_traits_metadata_deprecated(self):
with expected_warnings(['metadata should be set using the \.tag\(\) method']*2):
class A(HasTraits):
i = Int(config_key='VALUE1', other_thing='VALUE2')
f = Float(config_key='VALUE3', other_thing='VALUE2')
j = Int(0)
a = A()
self.assertEqual(a.traits(), dict(i=A.i, f=A.f, j=A.j))
traits = a.traits(config_key='VALUE1', other_thing='VALUE2')
self.assertEqual(traits, dict(i=A.i))
# This passes, but it shouldn't because I am replicating a bug in
# traits.
traits = a.traits(config_key=lambda v: True)
self.assertEqual(traits, dict(i=A.i, f=A.f, j=A.j))
def test_init(self):
class A(HasTraits):
i = Int()
x = Float()
a = A(i=1, x=10.0)
self.assertEqual(a.i, 1)
self.assertEqual(a.x, 10.0)
def test_positional_args(self):
class A(HasTraits):
i = Int(0)
def __init__(self, i):
super(A, self).__init__()
self.i = i
a = A(5)
self.assertEqual(a.i, 5)
# should raise TypeError if no positional arg given
self.assertRaises(TypeError, A)
#-----------------------------------------------------------------------------
# Tests for specific trait types
#-----------------------------------------------------------------------------
class TestType(TestCase):
def test_default(self):
class B(object): pass
class A(HasTraits):
klass = Type(allow_none=True)
a = A()
self.assertEqual(a.klass, object)
a.klass = B
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', 10)
def test_default_options(self):
class B(object): pass
class C(B): pass
class A(HasTraits):
# Different possible combinations of options for default_value
# and klass. default_value=None is only valid with allow_none=True.
k1 = Type()
k2 = Type(None, allow_none=True)
k3 = Type(B)
k4 = Type(klass=B)
k5 = Type(default_value=None, klass=B, allow_none=True)
k6 = Type(default_value=C, klass=B)
self.assertIs(A.k1.default_value, object)
self.assertIs(A.k1.klass, object)
self.assertIs(A.k2.default_value, None)
self.assertIs(A.k2.klass, object)
self.assertIs(A.k3.default_value, B)
self.assertIs(A.k3.klass, B)
self.assertIs(A.k4.default_value, B)
self.assertIs(A.k4.klass, B)
self.assertIs(A.k5.default_value, None)
self.assertIs(A.k5.klass, B)
self.assertIs(A.k6.default_value, C)
self.assertIs(A.k6.klass, B)
a = A()
self.assertIs(a.k1, object)
self.assertIs(a.k2, None)
self.assertIs(a.k3, B)
self.assertIs(a.k4, B)
self.assertIs(a.k5, None)
self.assertIs(a.k6, C)
def test_value(self):
class B(object): pass
class C(object): pass
class A(HasTraits):
klass = Type(B)
a = A()
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', C)
self.assertRaises(TraitError, setattr, a, 'klass', object)
a.klass = B
def test_allow_none(self):
class B(object): pass
class C(B): pass
class A(HasTraits):
klass = Type(B)
a = A()
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', None)
a.klass = C
self.assertEqual(a.klass, C)
def test_validate_klass(self):
class A(HasTraits):
klass = Type('no strings allowed')
self.assertRaises(ImportError, A)
class A(HasTraits):
klass = Type('rub.adub.Duck')
self.assertRaises(ImportError, A)
def test_validate_default(self):
class B(object): pass
class A(HasTraits):
klass = Type('bad default', B)
self.assertRaises(ImportError, A)
class C(HasTraits):
klass = Type(None, B)
self.assertRaises(TraitError, C)
def test_str_klass(self):
class A(HasTraits):
klass = Type('ipython_genutils.ipstruct.Struct')
from ipython_genutils.ipstruct import Struct
a = A()
a.klass = Struct
self.assertEqual(a.klass, Struct)
self.assertRaises(TraitError, setattr, a, 'klass', 10)
def test_set_str_klass(self):
class A(HasTraits):
klass = Type()
a = A(klass='ipython_genutils.ipstruct.Struct')
from ipython_genutils.ipstruct import Struct
self.assertEqual(a.klass, Struct)
class TestInstance(TestCase):
def test_basic(self):
class Foo(object): pass
class Bar(Foo): pass
class Bah(object): pass
class A(HasTraits):
inst = Instance(Foo, allow_none=True)
a = A()
self.assertTrue(a.inst is None)
a.inst = Foo()
self.assertTrue(isinstance(a.inst, Foo))
a.inst = Bar()
self.assertTrue(isinstance(a.inst, Foo))
self.assertRaises(TraitError, setattr, a, 'inst', Foo)
self.assertRaises(TraitError, setattr, a, 'inst', Bar)
self.assertRaises(TraitError, setattr, a, 'inst', Bah())
def test_default_klass(self):
class Foo(object): pass
class Bar(Foo): pass
class Bah(object): pass
class FooInstance(Instance):
klass = Foo
class A(HasTraits):
inst = FooInstance(allow_none=True)
a = A()
self.assertTrue(a.inst is None)
a.inst = Foo()
self.assertTrue(isinstance(a.inst, Foo))
a.inst = Bar()
self.assertTrue(isinstance(a.inst, Foo))
self.assertRaises(TraitError, setattr, a, 'inst', Foo)
self.assertRaises(TraitError, setattr, a, 'inst', Bar)
self.assertRaises(TraitError, setattr, a, 'inst', Bah())
def test_unique_default_value(self):
class Foo(object): pass
class A(HasTraits):
inst = Instance(Foo,(),{})
a = A()
b = A()
self.assertTrue(a.inst is not b.inst)
def test_args_kw(self):
class Foo(object):
def __init__(self, c): self.c = c
class Bar(object): pass
class Bah(object):
def __init__(self, c, d):
self.c = c; self.d = d
class A(HasTraits):
inst = Instance(Foo, (10,))
a = A()
self.assertEqual(a.inst.c, 10)
class B(HasTraits):
inst = Instance(Bah, args=(10,), kw=dict(d=20))
b = B()
self.assertEqual(b.inst.c, 10)
self.assertEqual(b.inst.d, 20)
class C(HasTraits):
inst = Instance(Foo, allow_none=True)
c = C()
self.assertTrue(c.inst is None)
def test_bad_default(self):
class Foo(object): pass
class A(HasTraits):
inst = Instance(Foo)
a = A()
with self.assertRaises(TraitError):
a.inst
def test_instance(self):
class Foo(object): pass
def inner():
class A(HasTraits):
inst = Instance(Foo())
self.assertRaises(TraitError, inner)
class TestThis(TestCase):
def test_this_class(self):
class Foo(HasTraits):
this = This()
f = Foo()
self.assertEqual(f.this, None)
g = Foo()
f.this = g
self.assertEqual(f.this, g)
self.assertRaises(TraitError, setattr, f, 'this', 10)
def test_this_inst(self):
class Foo(HasTraits):
this = This()
f = Foo()
f.this = Foo()
self.assertTrue(isinstance(f.this, Foo))
def test_subclass(self):
class Foo(HasTraits):
t = This()
class Bar(Foo):
pass
f = Foo()
b = Bar()
f.t = b
b.t = f
self.assertEqual(f.t, b)
self.assertEqual(b.t, f)
def test_subclass_override(self):
class Foo(HasTraits):
t = This()
class Bar(Foo):
t = This()
f = Foo()
b = Bar()
f.t = b
self.assertEqual(f.t, b)
self.assertRaises(TraitError, setattr, b, 't', f)
def test_this_in_container(self):
class Tree(HasTraits):
value = Unicode()
leaves = List(This())
tree = Tree(
value='foo',
leaves=[Tree(value='bar'), Tree(value='buzz')]
)
with self.assertRaises(TraitError):
tree.leaves = [1, 2]
class TraitTestBase(TestCase):
"""A best testing class for basic trait types."""
def assign(self, value):
self.obj.value = value
def coerce(self, value):
return value
def test_good_values(self):
if hasattr(self, '_good_values'):
for value in self._good_values:
self.assign(value)
self.assertEqual(self.obj.value, self.coerce(value))
def test_bad_values(self):
if hasattr(self, '_bad_values'):
for value in self._bad_values:
try:
self.assertRaises(TraitError, self.assign, value)
except AssertionError:
assert False, value
def test_default_value(self):
if hasattr(self, '_default_value'):
self.assertEqual(self._default_value, self.obj.value)
def test_allow_none(self):
if (hasattr(self, '_bad_values') and hasattr(self, '_good_values') and
None in self._bad_values):
trait=self.obj.traits()['value']
try:
trait.allow_none = True
self._bad_values.remove(None)
#skip coerce. Allow None casts None to None.
self.assign(None)
self.assertEqual(self.obj.value,None)
self.test_good_values()
self.test_bad_values()
finally:
#tear down
trait.allow_none = False
self._bad_values.append(None)
def tearDown(self):
# restore default value after tests, if set
if hasattr(self, '_default_value'):
self.obj.value = self._default_value
class AnyTrait(HasTraits):
value = Any()
class AnyTraitTest(TraitTestBase):
obj = AnyTrait()
_default_value = None
_good_values = [10.0, 'ten', u'ten', [10], {'ten': 10},(10,), None, 1j]
_bad_values = []
class UnionTrait(HasTraits):
value = Union([Type(), Bool()])
class UnionTraitTest(TraitTestBase):
obj = UnionTrait(value='ipython_genutils.ipstruct.Struct')
_good_values = [int, float, True]
_bad_values = [[], (0,), 1j]
class OrTrait(HasTraits):
value = Bool() | Unicode()
class OrTraitTest(TraitTestBase):
obj = OrTrait()
_good_values = [True, False, 'ten']
_bad_values = [[], (0,), 1j]
class IntTrait(HasTraits):
value = Int(99, min=-100)
class TestInt(TraitTestBase):
obj = IntTrait()
_default_value = 99
_good_values = [10, -10]
_bad_values = ['ten', u'ten', [10], {'ten': 10}, (10,), None, 1j,
10.1, -10.1, '10L', '-10L', '10.1', '-10.1', u'10L',
u'-10L', u'10.1', u'-10.1', '10', '-10', u'10', -200]
if not six.PY3:
_bad_values.extend([long(10), long(-10), 10*sys.maxint, -10*sys.maxint])
class CIntTrait(HasTraits):
value = CInt('5')
class TestCInt(TraitTestBase):
obj = CIntTrait()
_default_value = 5
_good_values = ['10', '-10', u'10', u'-10', 10, 10.0, -10.0, 10.1]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,),
None, 1j, '10.1', u'10.1']
def coerce(self, n):
return int(n)
class MinBoundCIntTrait(HasTraits):
value = CInt('5', min=3)
class TestMinBoundCInt(TestCInt):
obj = MinBoundCIntTrait()
_default_value = 5
_good_values = [3, 3.0, '3']
_bad_values = [2.6, 2, -3, -3.0]
class LongTrait(HasTraits):
value = Long(99 if six.PY3 else long(99))
class TestLong(TraitTestBase):
obj = LongTrait()
_default_value = 99 if six.PY3 else long(99)
_good_values = [10, -10]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,),
None, 1j, 10.1, -10.1, '10', '-10', '10L', '-10L', '10.1',
'-10.1', u'10', u'-10', u'10L', u'-10L', u'10.1',
u'-10.1']
if not six.PY3:
# maxint undefined on py3, because int == long
_good_values.extend([long(10), long(-10), 10*sys.maxint, -10*sys.maxint])
_bad_values.extend([[long(10)], (long(10),)])
@mark.skipif(six.PY3, reason="not relevant on py3")
def test_cast_small(self):
"""Long casts ints to long"""
self.obj.value = 10
self.assertEqual(type(self.obj.value), long)
class MinBoundLongTrait(HasTraits):
value = Long(99 if six.PY3 else long(99), min=5)
class TestMinBoundLong(TraitTestBase):
obj = MinBoundLongTrait()
_default_value = 99 if six.PY3 else long(99)
_good_values = [5, 10]
_bad_values = [4, -10]
class MaxBoundLongTrait(HasTraits):
value = Long(5 if six.PY3 else long(5), max=10)
class TestMaxBoundLong(TraitTestBase):
obj = MaxBoundLongTrait()
_default_value = 5 if six.PY3 else long(5)
_good_values = [10, -2]
_bad_values = [11, 20]
class CLongTrait(HasTraits):
value = CLong('5')
class TestCLong(TraitTestBase):
obj = CLongTrait()
_default_value = 5 if six.PY3 else long(5)
_good_values = ['10', '-10', u'10', u'-10', 10, 10.0, -10.0, 10.1]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,),
None, 1j, '10.1', u'10.1']
def coerce(self, n):
return int(n) if six.PY3 else long(n)
class MaxBoundCLongTrait(HasTraits):
value = CLong('5', max=10)
class TestMaxBoundCLong(TestCLong):
obj = MaxBoundCLongTrait()
_default_value = 5 if six.PY3 else long(5)
_good_values = [10, '10', 10.3]
_bad_values = [11.0, '11']
class IntegerTrait(HasTraits):
value = Integer(1)
class TestInteger(TestLong):
obj = IntegerTrait()
_default_value = 1
def coerce(self, n):
return int(n)
@mark.skipif(six.PY3, reason="not relevant on py3")
def test_cast_small(self):
"""Integer casts small longs to int"""
self.obj.value = long(100)
self.assertEqual(type(self.obj.value), int)
class MinBoundIntegerTrait(HasTraits):
value = Integer(5, min=3)
class TestMinBoundInteger(TraitTestBase):
obj = MinBoundIntegerTrait()
_default_value = 5
_good_values = 3, 20
_bad_values = [2, -10]
class MaxBoundIntegerTrait(HasTraits):
value = Integer(1, max=3)
class TestMaxBoundInteger(TraitTestBase):
obj = MaxBoundIntegerTrait()
_default_value = 1
_good_values = 3, -2
_bad_values = [4, 10]
class FloatTrait(HasTraits):
value = Float(99.0, max=200.0)
class TestFloat(TraitTestBase):
obj = FloatTrait()
_default_value = 99.0
_good_values = [10, -10, 10.1, -10.1]
_bad_values = ['ten', u'ten', [10], {'ten': 10}, (10,), None,
1j, '10', '-10', '10L', '-10L', '10.1', '-10.1', u'10',
u'-10', u'10L', u'-10L', u'10.1', u'-10.1', 201.0]
if not six.PY3:
_bad_values.extend([long(10), long(-10)])
class CFloatTrait(HasTraits):
value = CFloat('99.0', max=200.0)
class TestCFloat(TraitTestBase):
obj = CFloatTrait()
_default_value = 99.0
_good_values = [10, 10.0, 10.5, '10.0', '10', '-10', '10.0', u'10']
_bad_values = ['ten', u'ten', [10], {'ten': 10}, (10,), None, 1j,
200.1, '200.1']
def coerce(self, v):
return float(v)
class ComplexTrait(HasTraits):
value = Complex(99.0-99.0j)
class TestComplex(TraitTestBase):
obj = ComplexTrait()
_default_value = 99.0-99.0j
_good_values = [10, -10, 10.1, -10.1, 10j, 10+10j, 10-10j,
10.1j, 10.1+10.1j, 10.1-10.1j]
_bad_values = [u'10L', u'-10L', 'ten', [10], {'ten': 10},(10,), None]
if not six.PY3:
_bad_values.extend([long(10), long(-10)])
class BytesTrait(HasTraits):
value = Bytes(b'string')
class TestBytes(TraitTestBase):
obj = BytesTrait()
_default_value = b'string'
_good_values = [b'10', b'-10', b'10L',
b'-10L', b'10.1', b'-10.1', b'string']
_bad_values = [10, -10, 10.1, -10.1, 1j, [10],
['ten'],{'ten': 10},(10,), None, u'string']
if not six.PY3:
_bad_values.extend([long(10), long(-10)])
class UnicodeTrait(HasTraits):
value = Unicode(u'unicode')
class TestUnicode(TraitTestBase):
obj = UnicodeTrait()
_default_value = u'unicode'
_good_values = ['10', '-10', '10L', '-10L', '10.1',
'-10.1', '', u'', 'string', u'string', u"€"]
_bad_values = [10, -10, 10.1, -10.1, 1j,
[10], ['ten'], [u'ten'], {'ten': 10},(10,), None]
if not six.PY3:
_bad_values.extend([long(10), long(-10)])
class ObjectNameTrait(HasTraits):
value = ObjectName("abc")
class TestObjectName(TraitTestBase):
obj = ObjectNameTrait()
_default_value = "abc"
_good_values = ["a", "gh", "g9", "g_", "_G", u"a345_"]
_bad_values = [1, "", u"€", "9g", "!", "#abc", "aj@", "a.b", "a()", "a[0]",
None, object(), object]
if sys.version_info[0] < 3:
_bad_values.append(u"þ")
else:
_good_values.append(u"þ") # þ=1 is valid in Python 3 (PEP 3131).
class DottedObjectNameTrait(HasTraits):
value = DottedObjectName("a.b")
class TestDottedObjectName(TraitTestBase):
obj = DottedObjectNameTrait()
_default_value = "a.b"
_good_values = ["A", "y.t", "y765.__repr__", "os.path.join", u"os.path.join"]
_bad_values = [1, u"abc.€", "_.@", ".", ".abc", "abc.", ".abc.", None]
if sys.version_info[0] < 3:
_bad_values.append(u"t.þ")
else:
_good_values.append(u"t.þ")
class TCPAddressTrait(HasTraits):
value = TCPAddress()
class TestTCPAddress(TraitTestBase):
obj = TCPAddressTrait()
_default_value = ('127.0.0.1',0)
_good_values = [('localhost',0),('192.168.0.1',1000),('www.google.com',80)]
_bad_values = [(0,0),('localhost',10.0),('localhost',-1), None]
class ListTrait(HasTraits):
value = List(Int())
class TestList(TraitTestBase):
obj = ListTrait()
_default_value = []
_good_values = [[], [1], list(range(10)), (1,2)]
_bad_values = [10, [1,'a'], 'a']
def coerce(self, value):
if value is not None:
value = list(value)
return value
class Foo(object):
pass
class NoneInstanceListTrait(HasTraits):
value = List(Instance(Foo))
class TestNoneInstanceList(TraitTestBase):
obj = NoneInstanceListTrait()
_default_value = []
_good_values = [[Foo(), Foo()], []]
_bad_values = [[None], [Foo(), None]]
class InstanceListTrait(HasTraits):
value = List(Instance(__name__+'.Foo'))
class TestInstanceList(TraitTestBase):
obj = InstanceListTrait()
def test_klass(self):
"""Test that the instance klass is properly assigned."""
self.assertIs(self.obj.traits()['value']._trait.klass, Foo)
_default_value = []
_good_values = [[Foo(), Foo()], []]
_bad_values = [['1', 2,], '1', [Foo], None]
class UnionListTrait(HasTraits):
value = List(Int() | Bool())
class TestUnionListTrait(HasTraits):
obj = UnionListTrait()
_default_value = []
_good_values = [[True, 1], [False, True]]
_bad_values = [[1, 'True'], False]
class LenListTrait(HasTraits):
value = List(Int(), [0], minlen=1, maxlen=2)
class TestLenList(TraitTestBase):
obj = LenListTrait()
_default_value = [0]
_good_values = [[1], [1,2], (1,2)]
_bad_values = [10, [1,'a'], 'a', [], list(range(3))]
def coerce(self, value):
if value is not None:
value = list(value)
return value
class TupleTrait(HasTraits):
value = Tuple(Int(allow_none=True), default_value=(1,))
class TestTupleTrait(TraitTestBase):
obj = TupleTrait()
_default_value = (1,)
_good_values = [(1,), (0,), [1]]
_bad_values = [10, (1, 2), ('a'), (), None]
def coerce(self, value):
if value is not None:
value = tuple(value)
return value
def test_invalid_args(self):
self.assertRaises(TypeError, Tuple, 5)
self.assertRaises(TypeError, Tuple, default_value='hello')
t = Tuple(Int(), CBytes(), default_value=(1,5))
class LooseTupleTrait(HasTraits):
value = Tuple((1,2,3))
class TestLooseTupleTrait(TraitTestBase):
obj = LooseTupleTrait()
_default_value = (1,2,3)
_good_values = [(1,), [1], (0,), tuple(range(5)), tuple('hello'), ('a',5), ()]
_bad_values = [10, 'hello', {}, None]
def coerce(self, value):
if value is not None:
value = tuple(value)
return value
def test_invalid_args(self):
self.assertRaises(TypeError, Tuple, 5)
self.assertRaises(TypeError, Tuple, default_value='hello')
t = Tuple(Int(), CBytes(), default_value=(1,5))
class MultiTupleTrait(HasTraits):
value = Tuple(Int(), Bytes(), default_value=[99,b'bottles'])
class TestMultiTuple(TraitTestBase):
obj = MultiTupleTrait()
_default_value = (99,b'bottles')
_good_values = [(1,b'a'), (2,b'b')]
_bad_values = ((),10, b'a', (1,b'a',3), (b'a',1), (1, u'a'))
class CRegExpTrait(HasTraits):
value = CRegExp(r'')
class TestCRegExp(TraitTestBase):
def coerce(self, value):
return re.compile(value)
obj = CRegExpTrait()
_default_value = re.compile(r'')
_good_values = [r'\d+', re.compile(r'\d+')]
_bad_values = ['(', None, ()]
class DictTrait(HasTraits):
value = Dict()
def test_dict_assignment():
d = dict()
c = DictTrait()
c.value = d
d['a'] = 5
assert d == c.value
assert c.value is d
class UniformlyValidatedDictTrait(HasTraits):
value = Dict(trait=Unicode(),
default_value={'foo': '1'})
class TestInstanceUniformlyValidatedDict(TraitTestBase):
obj = UniformlyValidatedDictTrait()
_default_value = {'foo': '1'}
_good_values = [{'foo': '0', 'bar': '1'}]
_bad_values = [{'foo': 0, 'bar': '1'}]
class KeyValidatedDictTrait(HasTraits):
value = Dict(traits={'foo': Int()},
default_value={'foo': 1})
class TestInstanceKeyValidatedDict(TraitTestBase):
obj = KeyValidatedDictTrait()
_default_value = {'foo': 1}
_good_values = [{'foo': 0, 'bar': '1'}, {'foo': 0, 'bar': 1}]
_bad_values = [{'foo': '0', 'bar': '1'}]
class FullyValidatedDictTrait(HasTraits):
value = Dict(trait=Unicode(),
traits={'foo': Int()},
default_value={'foo': 1})
class TestInstanceFullyValidatedDict(TraitTestBase):
obj = FullyValidatedDictTrait()
_default_value = {'foo': 1}
_good_values = [{'foo': 0, 'bar': '1'}, {'foo': 1, 'bar': '2'}]
_bad_values = [{'foo': 0, 'bar': 1}, {'foo': '0', 'bar': '1'}]
def test_dict_default_value():
"""Check that the `{}` default value of the Dict traitlet constructor is
actually copied."""
class Foo(HasTraits):
d1 = Dict()
d2 = Dict()
foo = Foo()
assert foo.d1 == {}
assert foo.d2 == {}
assert foo.d1 is not foo.d2
class TestValidationHook(TestCase):
def test_parity_trait(self):
"""Verify that the early validation hook is effective"""
class Parity(HasTraits):
value = Int(0)
parity = Enum(['odd', 'even'], default_value='even')
@validate('value')
def _value_validate(self, proposal):
value = proposal['value']
if self.parity == 'even' and value % 2:
raise TraitError('Expected an even number')
if self.parity == 'odd' and (value % 2 == 0):
raise TraitError('Expected an odd number')
return value
u = Parity()
u.parity = 'odd'
u.value = 1 # OK
with self.assertRaises(TraitError):
u.value = 2 # Trait Error
u.parity = 'even'
u.value = 2 # OK
def test_multiple_validate(self):
"""Verify that we can register the same validator to multiple names"""
class OddEven(HasTraits):
odd = Int(1)
even = Int(0)
@validate('odd', 'even')
def check_valid(self, proposal):
if proposal['trait'].name == 'odd' and not proposal['value'] % 2:
raise TraitError('odd should be odd')
if proposal['trait'].name == 'even' and proposal['value'] % 2:
raise TraitError('even should be even')
u = OddEven()
u.odd = 3 # OK
with self.assertRaises(TraitError):
u.odd = 2 # Trait Error
u.even = 2 # OK
with self.assertRaises(TraitError):
u.even = 3 # Trait Error
class TestLink(TestCase):
def test_connect_same(self):
"""Verify two traitlets of the same type can be linked together using link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Conenct the two classes.
c = link((a, 'value'), (b, 'value'))
# Make sure the values are the same at the point of linking.
self.assertEqual(a.value, b.value)
# Change one of the values to make sure they stay in sync.
a.value = 5
self.assertEqual(a.value, b.value)
b.value = 6
self.assertEqual(a.value, b.value)
def test_link_different(self):
"""Verify two traitlets of different types can be linked together using link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
class B(HasTraits):
count = Int()
a = A(value=9)
b = B(count=8)
# Conenct the two classes.
c = link((a, 'value'), (b, 'count'))
# Make sure the values are the same at the point of linking.
self.assertEqual(a.value, b.count)
# Change one of the values to make sure they stay in sync.
a.value = 5
self.assertEqual(a.value, b.count)
b.count = 4
self.assertEqual(a.value, b.count)
def test_unlink(self):
"""Verify two linked traitlets can be unlinked."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Connect the two classes.
c = link((a, 'value'), (b, 'value'))
a.value = 4
c.unlink()
# Change one of the values to make sure they don't stay in sync.
a.value = 5
self.assertNotEqual(a.value, b.value)
def test_callbacks(self):
"""Verify two linked traitlets have their callbacks called once."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
class B(HasTraits):
count = Int()
a = A(value=9)
b = B(count=8)
# Register callbacks that count.
callback_count = []
def a_callback(name, old, new):
callback_count.append('a')
a.on_trait_change(a_callback, 'value')
def b_callback(name, old, new):
callback_count.append('b')
b.on_trait_change(b_callback, 'count')
# Connect the two classes.
c = link((a, 'value'), (b, 'count'))
# Make sure b's count was set to a's value once.
self.assertEqual(''.join(callback_count), 'b')
del callback_count[:]
# Make sure a's value was set to b's count once.
b.count = 5
self.assertEqual(''.join(callback_count), 'ba')
del callback_count[:]
# Make sure b's count was set to a's value once.
a.value = 4
self.assertEqual(''.join(callback_count), 'ab')
del callback_count[:]
class TestDirectionalLink(TestCase):
def test_connect_same(self):
"""Verify two traitlets of the same type can be linked together using directional_link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Conenct the two classes.
c = directional_link((a, 'value'), (b, 'value'))
# Make sure the values are the same at the point of linking.
self.assertEqual(a.value, b.value)
# Change one the value of the source and check that it synchronizes the target.
a.value = 5
self.assertEqual(b.value, 5)
# Change one the value of the target and check that it has no impact on the source
b.value = 6
self.assertEqual(a.value, 5)
def test_tranform(self):
"""Test transform link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Conenct the two classes.
c = directional_link((a, 'value'), (b, 'value'), lambda x: 2 * x)
# Make sure the values are correct at the point of linking.
self.assertEqual(b.value, 2 * a.value)
# Change one the value of the source and check that it modifies the target.
a.value = 5
self.assertEqual(b.value, 10)
# Change one the value of the target and check that it has no impact on the source
b.value = 6
self.assertEqual(a.value, 5)
def test_link_different(self):
"""Verify two traitlets of different types can be linked together using link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
class B(HasTraits):
count = Int()
a = A(value=9)
b = B(count=8)
# Conenct the two classes.
c = directional_link((a, 'value'), (b, 'count'))
# Make sure the values are the same at the point of linking.
self.assertEqual(a.value, b.count)
# Change one the value of the source and check that it synchronizes the target.
a.value = 5
self.assertEqual(b.count, 5)
# Change one the value of the target and check that it has no impact on the source
b.value = 6
self.assertEqual(a.value, 5)
def test_unlink(self):
"""Verify two linked traitlets can be unlinked."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Connect the two classes.
c = directional_link((a, 'value'), (b, 'value'))
a.value = 4
c.unlink()
# Change one of the values to make sure they don't stay in sync.
a.value = 5
self.assertNotEqual(a.value, b.value)
class Pickleable(HasTraits):
i = Int()
@observe('i')
def _i_changed(self, change): pass
@validate('i')
def _i_validate(self, commit):
return commit['value']
j = Int()
def __init__(self):
with self.hold_trait_notifications():
self.i = 1
self.on_trait_change(self._i_changed, 'i')
def test_pickle_hastraits():
c = Pickleable()
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
p = pickle.dumps(c, protocol)
c2 = pickle.loads(p)
assert c2.i == c.i
assert c2.j == c.j
c.i = 5
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
p = pickle.dumps(c, protocol)
c2 = pickle.loads(p)
assert c2.i == c.i
assert c2.j == c.j
def test_hold_trait_notifications():
changes = []
class Test(HasTraits):
a = Integer(0)
b = Integer(0)
def _a_changed(self, name, old, new):
changes.append((old, new))
def _b_validate(self, value, trait):
if value != 0:
raise TraitError('Only 0 is a valid value')
return value
# Test context manager and nesting
t = Test()
with t.hold_trait_notifications():
with t.hold_trait_notifications():
t.a = 1
assert t.a == 1
assert changes == []
t.a = 2
assert t.a == 2
with t.hold_trait_notifications():
t.a = 3
assert t.a == 3
assert changes == []
t.a = 4
assert t.a == 4
assert changes == []
t.a = 4
assert t.a == 4
assert changes == []
assert changes == [(0, 4)]
# Test roll-back
try:
with t.hold_trait_notifications():
t.b = 1 # raises a Trait error
except:
pass
assert t.b == 0
class RollBack(HasTraits):
bar = Int()
def _bar_validate(self, value, trait):
if value:
raise TraitError('foobar')
return value
class TestRollback(TestCase):
def test_roll_back(self):
def assign_rollback():
RollBack(bar=1)
self.assertRaises(TraitError, assign_rollback)
class CacheModification(HasTraits):
foo = Int()
bar = Int()
def _bar_validate(self, value, trait):
self.foo = value
return value
def _foo_validate(self, value, trait):
self.bar = value
return value
def test_cache_modification():
CacheModification(foo=1)
CacheModification(bar=1)
class OrderTraits(HasTraits):
notified = Dict()
a = Unicode()
b = Unicode()
c = Unicode()
d = Unicode()
e = Unicode()
f = Unicode()
g = Unicode()
h = Unicode()
i = Unicode()
j = Unicode()
k = Unicode()
l = Unicode()
def _notify(self, name, old, new):
"""check the value of all traits when each trait change is triggered
This verifies that the values are not sensitive
to dict ordering when loaded from kwargs
"""
# check the value of the other traits
# when a given trait change notification fires
self.notified[name] = {
c: getattr(self, c) for c in 'abcdefghijkl'
}
def __init__(self, **kwargs):
self.on_trait_change(self._notify)
super(OrderTraits, self).__init__(**kwargs)
def test_notification_order():
d = {c:c for c in 'abcdefghijkl'}
obj = OrderTraits()
assert obj.notified == {}
obj = OrderTraits(**d)
notifications = {
c: d for c in 'abcdefghijkl'
}
assert obj.notified == notifications
###
# Traits for Forward Declaration Tests
###
class ForwardDeclaredInstanceTrait(HasTraits):
value = ForwardDeclaredInstance('ForwardDeclaredBar', allow_none=True)
class ForwardDeclaredTypeTrait(HasTraits):
value = ForwardDeclaredType('ForwardDeclaredBar', allow_none=True)
class ForwardDeclaredInstanceListTrait(HasTraits):
value = List(ForwardDeclaredInstance('ForwardDeclaredBar'))
class ForwardDeclaredTypeListTrait(HasTraits):
value = List(ForwardDeclaredType('ForwardDeclaredBar'))
###
# End Traits for Forward Declaration Tests
###
###
# Classes for Forward Declaration Tests
###
class ForwardDeclaredBar(object):
pass
class ForwardDeclaredBarSub(ForwardDeclaredBar):
pass
###
# End Classes for Forward Declaration Tests
###
###
# Forward Declaration Tests
###
class TestForwardDeclaredInstanceTrait(TraitTestBase):
obj = ForwardDeclaredInstanceTrait()
_default_value = None
_good_values = [None, ForwardDeclaredBar(), ForwardDeclaredBarSub()]
_bad_values = ['foo', 3, ForwardDeclaredBar, ForwardDeclaredBarSub]
class TestForwardDeclaredTypeTrait(TraitTestBase):
obj = ForwardDeclaredTypeTrait()
_default_value = None
_good_values = [None, ForwardDeclaredBar, ForwardDeclaredBarSub]
_bad_values = ['foo', 3, ForwardDeclaredBar(), ForwardDeclaredBarSub()]
class TestForwardDeclaredInstanceList(TraitTestBase):
obj = ForwardDeclaredInstanceListTrait()
def test_klass(self):
"""Test that the instance klass is properly assigned."""
self.assertIs(self.obj.traits()['value']._trait.klass, ForwardDeclaredBar)
_default_value = []
_good_values = [
[ForwardDeclaredBar(), ForwardDeclaredBarSub()],
[],
]
_bad_values = [
ForwardDeclaredBar(),
[ForwardDeclaredBar(), 3, None],
'1',
# Note that this is the type, not an instance.
[ForwardDeclaredBar],
[None],
None,
]
class TestForwardDeclaredTypeList(TraitTestBase):
obj = ForwardDeclaredTypeListTrait()
def test_klass(self):
"""Test that the instance klass is properly assigned."""
self.assertIs(self.obj.traits()['value']._trait.klass, ForwardDeclaredBar)
_default_value = []
_good_values = [
[ForwardDeclaredBar, ForwardDeclaredBarSub],
[],
]
_bad_values = [
ForwardDeclaredBar,
[ForwardDeclaredBar, 3],
'1',
# Note that this is an instance, not the type.
[ForwardDeclaredBar()],
[None],
None,
]
###
# End Forward Declaration Tests
###
class TestDynamicTraits(TestCase):
def setUp(self):
self._notify1 = []
def notify1(self, name, old, new):
self._notify1.append((name, old, new))
def test_notify_all(self):
class A(HasTraits):
pass
a = A()
self.assertTrue(not hasattr(a, 'x'))
self.assertTrue(not hasattr(a, 'y'))
# Dynamically add trait x.
a.add_traits(x=Int())
self.assertTrue(hasattr(a, 'x'))
self.assertTrue(isinstance(a, (A, )))
# Dynamically add trait y.
a.add_traits(y=Float())
self.assertTrue(hasattr(a, 'y'))
self.assertTrue(isinstance(a, (A, )))
self.assertEqual(a.__class__.__name__, A.__name__)
# Create a new instance and verify that x and y
# aren't defined.
b = A()
self.assertTrue(not hasattr(b, 'x'))
self.assertTrue(not hasattr(b, 'y'))
# Verify that notification works like normal.
a.on_trait_change(self.notify1)
a.x = 0
self.assertEqual(len(self._notify1), 0)
a.y = 0.0
self.assertEqual(len(self._notify1), 0)
a.x = 10
self.assertTrue(('x', 0, 10) in self._notify1)
a.y = 10.0
self.assertTrue(('y', 0.0, 10.0) in self._notify1)
self.assertRaises(TraitError, setattr, a, 'x', 'bad string')
self.assertRaises(TraitError, setattr, a, 'y', 'bad string')
self._notify1 = []
a.on_trait_change(self.notify1, remove=True)
a.x = 20
a.y = 20.0
self.assertEqual(len(self._notify1), 0)
def test_enum_no_default():
class C(HasTraits):
t = Enum(['a', 'b'])
c = C()
c.t = 'a'
assert c.t == 'a'
c = C()
with pytest.raises(TraitError):
t = c.t
c = C(t='b')
assert c.t == 'b'
def test_default_value_repr():
class C(HasTraits):
t = Type('traitlets.HasTraits')
t2 = Type(HasTraits)
n = Integer(0)
lis = List()
d = Dict()
assert C.t.default_value_repr() == "'traitlets.HasTraits'"
assert C.t2.default_value_repr() == "'traitlets.traitlets.HasTraits'"
assert C.n.default_value_repr() == '0'
assert C.lis.default_value_repr() == '[]'
assert C.d.default_value_repr() == '{}'
class TransitionalClass(HasTraits):
d = Any()
@default('d')
def _d_default(self):
return TransitionalClass
parent_super = False
calls_super = Integer(0)
@default('calls_super')
def _calls_super_default(self):
return -1
@observe('calls_super')
@observe_compat
def _calls_super_changed(self, change):
self.parent_super = change
parent_override = False
overrides = Integer(0)
@observe('overrides')
@observe_compat
def _overrides_changed(self, change):
self.parent_override = change
class SubClass(TransitionalClass):
def _d_default(self):
return SubClass
subclass_super = False
def _calls_super_changed(self, name, old, new):
self.subclass_super = True
super(SubClass, self)._calls_super_changed(name, old, new)
subclass_override = False
def _overrides_changed(self, name, old, new):
self.subclass_override = True
def test_subclass_compat():
obj = SubClass()
obj.calls_super = 5
assert obj.parent_super
assert obj.subclass_super
obj.overrides = 5
assert obj.subclass_override
assert not obj.parent_override
assert obj.d is SubClass
class DefinesHandler(HasTraits):
parent_called = False
trait = Integer()
@observe('trait')
def handler(self, change):
self.parent_called = True
class OverridesHandler(DefinesHandler):
child_called = False
@observe('trait')
def handler(self, change):
self.child_called = True
def test_subclass_override_observer():
obj = OverridesHandler()
obj.trait = 5
assert obj.child_called
assert not obj.parent_called
class DoesntRegisterHandler(DefinesHandler):
child_called = False
def handler(self, change):
self.child_called = True
def test_subclass_override_not_registered():
"""Subclass that overrides observer and doesn't re-register unregisters both"""
obj = DoesntRegisterHandler()
obj.trait = 5
assert not obj.child_called
assert not obj.parent_called
class AddsHandler(DefinesHandler):
child_called = False
@observe('trait')
def child_handler(self, change):
self.child_called = True
def test_subclass_add_observer():
obj = AddsHandler()
obj.trait = 5
assert obj.child_called
assert obj.parent_called
def test_observe_iterables():
class C(HasTraits):
i = Integer()
s = Unicode()
c = C()
recorded = {}
def record(change):
recorded['change'] = change
# observe with names=set
c.observe(record, names={'i', 's'})
c.i = 5
assert recorded['change'].name == 'i'
assert recorded['change'].new == 5
c.s = 'hi'
assert recorded['change'].name == 's'
assert recorded['change'].new == 'hi'
# observe with names=custom container with iter, contains
class MyContainer(object):
def __init__(self, container):
self.container = container
def __iter__(self):
return iter(self.container)
def __contains__(self, key):
return key in self.container
c.observe(record, names=MyContainer({'i', 's'}))
c.i = 10
assert recorded['change'].name == 'i'
assert recorded['change'].new == 10
c.s = 'ok'
assert recorded['change'].name == 's'
assert recorded['change'].new == 'ok'
def test_super_args():
class SuperRecorder(object):
def __init__(self, *args, **kwargs):
self.super_args = args
self.super_kwargs = kwargs
class SuperHasTraits(HasTraits, SuperRecorder):
i = Integer()
obj = SuperHasTraits('a1', 'a2', b=10, i=5, c='x')
assert obj.i == 5
assert not hasattr(obj, 'b')
assert not hasattr(obj, 'c')
assert obj.super_args == ('a1' , 'a2')
assert obj.super_kwargs == {'b': 10 , 'c': 'x'}
def test_super_bad_args():
class SuperHasTraits(HasTraits):
a = Integer()
if sys.version_info < (3,):
# Legacy Python, object.__init__ warns itself, instead of raising
w = ['object.__init__']
else:
w = ["Passing unrecoginized arguments"]
with expected_warnings(w):
obj = SuperHasTraits(a=1, b=2)
assert obj.a == 1
assert not hasattr(obj, 'b')
| unnikrishnankgs/va | venv/lib/python3.5/site-packages/traitlets/tests/test_traitlets.py | Python | bsd-2-clause | 66,467 | 0.005793 |
import os
import functools
import codecs
import pickle
from . import utils
class Memozo(object):
def __init__(self, path='./'):
self.base_path = path
memozo_file = os.path.join(self.base_path, utils.MEMOZO_FILE_NAME)
if not os.path.exists(memozo_file):
with codecs.open(memozo_file, 'w', encoding=utils.ENCODING) as f:
f.write('datetime\thash\tfile name\tfunction name\tparameters\n')
f.write('--------\t----\t---------\t-------------\t----------\n')
def __call__(self, name=None, ext='file'):
def wrapper(func):
_name = func.__name__ if name is None else name
@functools.wraps(func)
def _wrapper(*args, **kwargs):
bound_args = utils.get_bound_args(func, *args, **kwargs)
args_str = utils.get_args_str(bound_args)
sha1 = utils.get_hash(_name, func.__name__, args_str)
file_path = os.path.join(self.base_path, "{}_{}.{}".format(_name, sha1, ext))
if utils.log_exisits(self.base_path, _name, func.__name__, args_str) and os.path.exists(file_path):
with open(file_path, 'r') as f:
obj = f.readlines()
return obj
obj = func(*args, **kwargs)
with open(file_path, 'w') as f:
f.writelines(obj)
utils.write(self.base_path, _name, func.__name__, args_str)
return obj
return _wrapper
return wrapper
def codecs(self, name=None, ext='file', encoding=None):
def wrapper(func):
_name = func.__name__ if name is None else name
@functools.wraps(func)
def _wrapper(*args, **kwargs):
bound_args = utils.get_bound_args(func, *args, **kwargs)
args_str = utils.get_args_str(bound_args)
sha1 = utils.get_hash(_name, func.__name__, args_str)
file_path = os.path.join(self.base_path, "{}_{}.{}".format(_name, sha1, ext))
if utils.log_exisits(self.base_path, _name, func.__name__, args_str) and os.path.exists(file_path):
with codecs.open(file_path, 'r', encoding) as f:
obj = f.readlines()
return obj
obj = func(*args, **kwargs)
with codecs.open(file_path, 'w', encoding) as f:
f.writelines(obj)
utils.write(self.base_path, _name, func.__name__, args_str)
return obj
return _wrapper
return wrapper
def generator(self, name=None, ext='file', line_type='str', delimiter='\t'):
def wrapper(func):
_name = func.__name__ if name is None else name
@functools.wraps(func)
def _wrapper(*args, **kwargs):
# get cached data path
bound_args = utils.get_bound_args(func, *args, **kwargs)
args_str = utils.get_args_str(bound_args)
sha1 = utils.get_hash(_name, func.__name__, args_str)
file_path = os.path.join(self.base_path, "{}_{}.{}".format(_name, sha1, ext))
# if cached data exists, return generator using cached data
if utils.log_exisits(self.base_path, _name, func.__name__, args_str) and os.path.exists(file_path):
def gen_cached_data():
with codecs.open(file_path, 'r', utils.ENCODING) as f:
for line in f:
if line_type == 'tuple':
line = line.split(delimiter)
yield line
return gen_cached_data()
gen = func(*args, **kwargs)
# if no cached data exists, generator not only yield value but save value at each iteration
def generator_with_cache(gen, file_path):
with codecs.open(file_path, 'w', utils.ENCODING) as f:
for e in gen:
if line_type == 'str':
f.write(e)
elif line_type == 'tuple':
f.write(delimiter.join(e) + '\n')
yield e
utils.write(self.base_path, _name, func.__name__, args_str)
return generator_with_cache(gen, file_path)
return _wrapper
return wrapper
def pickle(self, name=None, ext='pickle', protocol=None):
def wrapper(func):
_name = func.__name__ if name is None else name
@functools.wraps(func)
def _wrapper(*args, **kwargs):
bound_args = utils.get_bound_args(func, *args, **kwargs)
args_str = utils.get_args_str(bound_args)
sha1 = utils.get_hash(_name, func.__name__, args_str)
file_path = os.path.join(self.base_path, "{}_{}.{}".format(_name, sha1, ext))
if utils.log_exisits(self.base_path, _name, func.__name__, args_str) and os.path.exists(file_path):
with open(file_path, 'rb') as f:
obj = pickle.load(f)
return obj
obj = func(*args, **kwargs)
with open(file_path, 'wb') as f:
pickle.dump(obj, f, protocol=protocol)
utils.write(self.base_path, _name, func.__name__, args_str)
return obj
return _wrapper
return wrapper
| sotetsuk/memozo | memozo/memozo.py | Python | mit | 5,664 | 0.002119 |
from .evaluate import Evaluator
from .dataset import GraphPropPredDataset
try:
from .dataset_pyg import PygGraphPropPredDataset
except ImportError:
pass
try:
from .dataset_dgl import DglGraphPropPredDataset
from .dataset_dgl import collate_dgl
except (ImportError, OSError):
pass
| snap-stanford/ogb | ogb/graphproppred/__init__.py | Python | mit | 302 | 0 |
# -*- encoding: utf-8 -*-
import csv
from allauth.socialaccount.models import SocialToken
from django.core.management.base import BaseCommand
from apps.filters.filter import get_api
import os
from django.conf import settings
from yaml import load
from apps.filters.models import Collection, Entry, Category
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def handle(self, *args, **options):
folder = settings.APPS_DIR.path('filters', 'data').root
config_path = os.path.join(folder, 'collections.yaml')
assert os.path.exists(config_path)
with open(config_path, 'r') as fio:
config = load(fio.read())
for item in config:
collection, _ = Collection.objects.get_or_create(
title=item['name'],
)
if not collection.description:
collection.description = item['description']
collection.save()
with open(os.path.join(folder, item['file']), 'r') as fio:
reader = csv.DictReader(fio)
for i, row in enumerate(reader):
categories = []
for category in row['category'].split(','):
categories.append(Category.objects.get_or_create(title=category.strip())[0])
entry, _ = Entry.objects.get_or_create(value=row['value'], type=row['type'])
entry.category.add(*categories)
collection.entries.add(entry)
| WarmongeR1/feedly-filter | apps/filters/management/commands/load_data.py | Python | mit | 1,534 | 0.001304 |
#-----------------------------------------------------------------
# pycparser: cdecl.py
#
# Example of the CDECL tool using pycparser. CDECL "explains" C type
# declarations in plain English.
#
# The AST generated by pycparser from the given declaration is traversed
# recursively to build the explanation. Note that the declaration must be a
# valid external declaration in C. All the types used in it must be defined with
# typedef, or parsing will fail. The definition can be arbitrary - pycparser
# doesn't really care what the type is defined to be, only that it's a type.
#
# For example:
#
# c_decl = 'typedef int Node; const Node* (*ar)[10];'
#
# explain_c_declaration(c_decl)
# => ar is a pointer to array[10] of pointer to const Node
#
# struct and typedef are expanded when according arguments are set:
#
# explain_c_declaration(c_decl, expand_typedef=True)
# => ar is a pointer to array[10] of pointer to const int
#
# c_decl = 'struct P {int x; int y;} p;'
#
# explain_c_declaration(c_decl)
# => p is a struct P
#
# explain_c_declaration(c_decl, expand_struct=True)
# => p is a struct P containing {x is a int, y is a int}
#
# Eli Bendersky [http://eli.thegreenplace.net]
# License: BSD
#-----------------------------------------------------------------
import copy
import sys
# This is not required if you've installed pycparser into
# your site-packages/ with setup.py
#
sys.path.extend(['.', '..'])
from pycparser import c_parser, c_ast
def explain_c_declaration(c_decl, expand_struct=False, expand_typedef=False):
""" Parses the declaration in c_decl and returns a text
explanation as a string.
The last external node of the string is used, to allow
earlier typedefs for used types.
"""
parser = c_parser.CParser()
try:
node = parser.parse(c_decl, filename='<stdin>')
except c_parser.ParseError:
e = sys.exc_info()[1]
return "Parse error:" + str(e)
if (not isinstance(node, c_ast.FileAST) or
not isinstance(node.ext[-1], c_ast.Decl)
):
return "Not a valid declaration"
try:
expanded = expand_struct_typedef(node.ext[-1], node,
expand_struct=expand_struct,
expand_typedef=expand_typedef)
except Exception as e:
return "Not a valid declaration: " + str(e)
return _explain_decl_node(expanded)
def _explain_decl_node(decl_node):
""" Receives a c_ast.Decl note and returns its explanation in
English.
"""
storage = ' '.join(decl_node.storage) + ' ' if decl_node.storage else ''
return (decl_node.name +
" is a " +
storage +
_explain_type(decl_node.type))
def _explain_type(decl):
""" Recursively explains a type decl node
"""
typ = type(decl)
if typ == c_ast.TypeDecl:
quals = ' '.join(decl.quals) + ' ' if decl.quals else ''
return quals + _explain_type(decl.type)
elif typ == c_ast.Typename or typ == c_ast.Decl:
return _explain_type(decl.type)
elif typ == c_ast.IdentifierType:
return ' '.join(decl.names)
elif typ == c_ast.PtrDecl:
quals = ' '.join(decl.quals) + ' ' if decl.quals else ''
return quals + 'pointer to ' + _explain_type(decl.type)
elif typ == c_ast.ArrayDecl:
arr = 'array'
if decl.dim: arr += '[%s]' % decl.dim.value
return arr + " of " + _explain_type(decl.type)
elif typ == c_ast.FuncDecl:
if decl.args:
params = [_explain_type(param) for param in decl.args.params]
args = ', '.join(params)
else:
args = ''
return ('function(%s) returning ' % (args) +
_explain_type(decl.type))
elif typ == c_ast.Struct:
decls = [_explain_decl_node(mem_decl) for mem_decl in decl.decls]
members = ', '.join(decls)
return ('struct%s ' % (' ' + decl.name if decl.name else '') +
('containing {%s}' % members if members else ''))
def expand_struct_typedef(cdecl, file_ast, expand_struct=False, expand_typedef=False):
"""Expand struct & typedef in context of file_ast and return a new expanded node"""
decl_copy = copy.deepcopy(cdecl)
_expand_in_place(decl_copy, file_ast, expand_struct, expand_typedef)
return decl_copy
def _expand_in_place(decl, file_ast, expand_struct=False, expand_typedef=False):
"""Recursively expand struct & typedef in place, throw Exception if
undeclared struct or typedef are used
"""
typ = type(decl)
if typ in (c_ast.Decl, c_ast.TypeDecl, c_ast.PtrDecl, c_ast.ArrayDecl):
decl.type = _expand_in_place(decl.type, file_ast, expand_struct, expand_typedef)
elif typ == c_ast.Struct:
if not decl.decls:
struct = _find_struct(decl.name, file_ast)
if not struct:
raise Exception('using undeclared struct %s' % decl.name)
decl.decls = struct.decls
for i, mem_decl in enumerate(decl.decls):
decl.decls[i] = _expand_in_place(mem_decl, file_ast, expand_struct, expand_typedef)
if not expand_struct:
decl.decls = []
elif (typ == c_ast.IdentifierType and
decl.names[0] not in ('int', 'char')):
typedef = _find_typedef(decl.names[0], file_ast)
if not typedef:
raise Exception('using undeclared type %s' % decl.names[0])
if expand_typedef:
return typedef.type
return decl
def _find_struct(name, file_ast):
"""Receives a struct name and return declared struct object in file_ast
"""
for node in file_ast.ext:
if (type(node) == c_ast.Decl and
type(node.type) == c_ast.Struct and
node.type.name == name):
return node.type
def _find_typedef(name, file_ast):
"""Receives a type name and return typedef object in file_ast
"""
for node in file_ast.ext:
if type(node) == c_ast.Typedef and node.name == name:
return node
if __name__ == "__main__":
if len(sys.argv) > 1:
c_decl = sys.argv[1]
else:
c_decl = "char *(*(**foo[][8])())[];"
print("Explaining the declaration: " + c_decl + "\n")
print(explain_c_declaration(c_decl) + "\n")
| CtheSky/pycparser | examples/cdecl.py | Python | bsd-3-clause | 6,339 | 0.001893 |
# Copyright (C) 2011, 2012 Abhijit Mahabal
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this
# program. If not, see <http://www.gnu.org/licenses/>
"""Exception classes in FARG core.
.. Note::
Exceptions specific to an individual application should live in that application's
directory.
"""
from itertools import takewhile
import traceback
class FargError(Exception):
"""Base class for untrappable errors (indicating bugs)."""
def __init__(self, msg=''):
Exception.__init__(self)
#: Message to be displayed.
self.msg = msg
self.stack_trace = list(takewhile((lambda x: x.find('FargError.__init__') == -1),
traceback.format_stack(limit=8)))
print('FargError: %s:%s' % (msg, self.stack_trace))
def __str__(self):
return 'FargError:' + self.msg + str(self.stack_trace)
class FargException(Exception):
"""Base class for FARG-specific exceptions."""
pass
class BatchModeStopException(Exception):
"""Base class of ways of stopping during batch mode.
Look at the subclasses (in this file) for details.
"""
def __init__(self, *, codelet_count):
Exception.__init__(self)
#: Number of codelets that had been run when the exception was raised.
self.codelet_count = codelet_count
class StoppingConditionMet(BatchModeStopException):
"""When a stopping condition is specified, this indicates that it has been reached."""
def __str__(self):
return 'StoppingConditionMet after %d codelets' % self.codelet_count
class SuccessfulCompletion(BatchModeStopException):
"""Raised when the problem has been fully solved.
What fully solved means depends on the application, of course. For Seqsee, this means
currently means "Sequence has been extended to all known terms.".
"""
pass
class AnswerFoundException(BatchModeStopException):
"""Raised by a subspace when it believes that an answer has been found."""
def __init__(self, answer, *, codelet_count):
BatchModeStopException.__init__(self, codelet_count=codelet_count)
self.answer = answer
class NoAnswerException(BatchModeStopException):
"""Raised by a subspace when it is realized that no answer is forthcoming."""
def __init__(self, *, codelet_count):
BatchModeStopException.__init__(self, codelet_count=codelet_count)
| amahabal/PySeqsee | farg/core/exceptions.py | Python | gpl-3.0 | 2,834 | 0.008116 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
time_trigger_opts = [
cfg.IntOpt('min_interval',
default=60 * 60,
help='The minimum interval of two adjacent time points. '
'min_interval >= (max_window_time * 2)'),
cfg.IntOpt('min_window_time',
default=900,
help='The minimum window time'),
cfg.IntOpt('max_window_time',
default=1800,
help='The maximum window time'),
cfg.StrOpt('time_format',
default='calendar',
choices=['crontab', 'calendar'],
help='The type of time format which is used to compute time'),
cfg.IntOpt('trigger_poll_interval',
default=15,
help='Interval, in seconds, in which Karbor will poll for '
'trigger events'),
cfg.StrOpt('scheduling_strategy',
default='multi_node',
help='Time trigger scheduling strategy '
)
]
CONF = cfg.CONF
CONF.register_opts(time_trigger_opts)
| openstack/smaug | karbor/services/operationengine/engine/triggers/timetrigger/__init__.py | Python | apache-2.0 | 1,625 | 0 |
"""Module will classes related to PV row geometries"""
import numpy as np
from pvfactors.config import COLOR_DIC
from pvfactors.geometry.base import \
BaseSide, _coords_from_center_tilt_length, PVSegment
from shapely.geometry import GeometryCollection, LineString
from pvfactors.geometry.timeseries import \
TsShadeCollection, TsLineCoords, TsSurface
from pvlib.tools import cosd, sind
class TsPVRow(object):
"""Timeseries PV row class: this class is a vectorized version of the
PV row geometries. The coordinates and attributes (front and back sides)
are all vectorized."""
def __init__(self, ts_front_side, ts_back_side, xy_center, index=None,
full_pvrow_coords=None):
"""Initialize timeseries PV row with its front and back sides.
Parameters
----------
ts_front_side : :py:class:`~pvfactors.geometry.pvrow.TsSide`
Timeseries front side of the PV row
ts_back_side : :py:class:`~pvfactors.geometry.pvrow.TsSide`
Timeseries back side of the PV row
xy_center : tuple of float
x and y coordinates of the PV row center point (invariant)
index : int, optional
index of the PV row (Default = None)
full_pvrow_coords : \
:py:class:`~pvfactors.geometry.timeseries.TsLineCoords`, optional
Timeseries coordinates of the full PV row, end to end
(Default = None)
"""
self.front = ts_front_side
self.back = ts_back_side
self.xy_center = xy_center
self.index = index
self.full_pvrow_coords = full_pvrow_coords
@classmethod
def from_raw_inputs(cls, xy_center, width, rotation_vec,
cut, shaded_length_front, shaded_length_back,
index=None, param_names=None):
"""Create timeseries PV row using raw inputs.
Note: shading will always be zero when pv rows are flat.
Parameters
----------
xy_center : tuple of float
x and y coordinates of the PV row center point (invariant)
width : float
width of the PV rows [m]
rotation_vec : np.ndarray
Timeseries rotation values of the PV row [deg]
cut : dict
Discretization scheme of the PV row. Eg {'front': 2, 'back': 4}.
Will create segments of equal length on the designated sides.
shaded_length_front : np.ndarray
Timeseries values of front side shaded length [m]
shaded_length_back : np.ndarray
Timeseries values of back side shaded length [m]
index : int, optional
Index of the pv row (default = None)
param_names : list of str, optional
List of names of surface parameters to use when creating geometries
(Default = None)
Returns
-------
New timeseries PV row object
"""
# Calculate full pvrow coords
pvrow_coords = TsPVRow._calculate_full_coords(
xy_center, width, rotation_vec)
# Calculate normal vectors
dx = pvrow_coords.b2.x - pvrow_coords.b1.x
dy = pvrow_coords.b2.y - pvrow_coords.b1.y
normal_vec_front = np.array([-dy, dx])
# Calculate front side coords
ts_front = TsSide.from_raw_inputs(
xy_center, width, rotation_vec, cut.get('front', 1),
shaded_length_front, n_vector=normal_vec_front,
param_names=param_names)
# Calculate back side coords
ts_back = TsSide.from_raw_inputs(
xy_center, width, rotation_vec, cut.get('back', 1),
shaded_length_back, n_vector=-normal_vec_front,
param_names=param_names)
return cls(ts_front, ts_back, xy_center, index=index,
full_pvrow_coords=pvrow_coords)
@staticmethod
def _calculate_full_coords(xy_center, width, rotation):
"""Method to calculate the full PV row coordinaltes.
Parameters
----------
xy_center : tuple of float
x and y coordinates of the PV row center point (invariant)
width : float
width of the PV rows [m]
rotation : np.ndarray
Timeseries rotation values of the PV row [deg]
Returns
-------
coords: :py:class:`~pvfactors.geometry.timeseries.TsLineCoords`
Timeseries coordinates of full PV row
"""
x_center, y_center = xy_center
radius = width / 2.
# Calculate coords
x1 = radius * cosd(rotation + 180.) + x_center
y1 = radius * sind(rotation + 180.) + y_center
x2 = radius * cosd(rotation) + x_center
y2 = radius * sind(rotation) + y_center
coords = TsLineCoords.from_array(np.array([[x1, y1], [x2, y2]]))
return coords
def surfaces_at_idx(self, idx):
"""Get all PV surface geometries in timeseries PV row for a certain
index.
Parameters
----------
idx : int
Index to use to generate PV surface geometries
Returns
-------
list of :py:class:`~pvfactors.geometry.base.PVSurface` objects
List of PV surfaces
"""
pvrow = self.at(idx)
return pvrow.all_surfaces
def plot_at_idx(self, idx, ax, color_shaded=COLOR_DIC['pvrow_shaded'],
color_illum=COLOR_DIC['pvrow_illum'],
with_surface_index=False):
"""Plot timeseries PV row at a certain index.
Parameters
----------
idx : int
Index to use to plot timeseries PV rows
ax : :py:class:`matplotlib.pyplot.axes` object
Axes for plotting
color_shaded : str, optional
Color to use for plotting the shaded surfaces (Default =
COLOR_DIC['pvrow_shaded'])
color_shaded : str, optional
Color to use for plotting the illuminated surfaces (Default =
COLOR_DIC['pvrow_illum'])
with_surface_index : bool, optional
Plot the surfaces with their index values (Default = False)
"""
pvrow = self.at(idx)
pvrow.plot(ax, color_shaded=color_shaded,
color_illum=color_illum, with_index=with_surface_index)
def at(self, idx):
"""Generate a PV row geometry for the desired index.
Parameters
----------
idx : int
Index to use to generate PV row geometry
Returns
-------
pvrow : :py:class:`~pvfactors.geometry.pvrow.PVRow`
"""
front_geom = self.front.at(idx)
back_geom = self.back.at(idx)
original_line = LineString(
self.full_pvrow_coords.as_array[:, :, idx])
pvrow = PVRow(front_side=front_geom, back_side=back_geom,
index=self.index, original_linestring=original_line)
return pvrow
def update_params(self, new_dict):
"""Update timeseries surface parameters of the PV row.
Parameters
----------
new_dict : dict
Parameters to add or update for the surfaces
"""
self.front.update_params(new_dict)
self.back.update_params(new_dict)
@property
def n_ts_surfaces(self):
"""Number of timeseries surfaces in the ts PV row"""
return self.front.n_ts_surfaces + self.back.n_ts_surfaces
@property
def all_ts_surfaces(self):
"""List of all timeseries surfaces"""
return self.front.all_ts_surfaces + self.back.all_ts_surfaces
@property
def centroid(self):
"""Centroid point of the timeseries pv row"""
centroid = (self.full_pvrow_coords.centroid
if self.full_pvrow_coords is not None else None)
return centroid
@property
def length(self):
"""Length of both sides of the timeseries PV row"""
return self.front.length + self.back.length
@property
def highest_point(self):
"""Timeseries point coordinates of highest point of PV row"""
high_pt = (self.full_pvrow_coords.highest_point
if self.full_pvrow_coords is not None else None)
return high_pt
class TsSide(object):
"""Timeseries side class: this class is a vectorized version of the
BaseSide geometries. The coordinates and attributes (list of segments,
normal vector) are all vectorized."""
def __init__(self, segments, n_vector=None):
"""Initialize timeseries side using list of timeseries segments.
Parameters
----------
segments : list of :py:class:`~pvfactors.geometry.pvrow.TsSegment`
List of timeseries segments of the side
n_vector : np.ndarray, optional
Timeseries normal vectors of the side (Default = None)
"""
self.list_segments = segments
self.n_vector = n_vector
@classmethod
def from_raw_inputs(cls, xy_center, width, rotation_vec, cut,
shaded_length, n_vector=None, param_names=None):
"""Create timeseries side using raw PV row inputs.
Note: shading will always be zero when PV rows are flat.
Parameters
----------
xy_center : tuple of float
x and y coordinates of the PV row center point (invariant)
width : float
width of the PV rows [m]
rotation_vec : np.ndarray
Timeseries rotation values of the PV row [deg]
cut : int
Discretization scheme of the PV side.
Will create segments of equal length.
shaded_length : np.ndarray
Timeseries values of side shaded length from lowest point [m]
n_vector : np.ndarray, optional
Timeseries normal vectors of the side
param_names : list of str, optional
List of names of surface parameters to use when creating geometries
(Default = None)
Returns
-------
New timeseries side object
"""
mask_tilted_to_left = rotation_vec >= 0
# Create Ts segments
x_center, y_center = xy_center
radius = width / 2.
segment_length = width / cut
is_not_flat = rotation_vec != 0.
# Calculate coords of shading point
r_shade = radius - shaded_length
x_sh = np.where(
mask_tilted_to_left,
r_shade * cosd(rotation_vec + 180.) + x_center,
r_shade * cosd(rotation_vec) + x_center)
y_sh = np.where(
mask_tilted_to_left,
r_shade * sind(rotation_vec + 180.) + y_center,
r_shade * sind(rotation_vec) + y_center)
# Calculate coords
list_segments = []
for i in range(cut):
# Calculate segment coords
r1 = radius - i * segment_length
r2 = radius - (i + 1) * segment_length
x1 = r1 * cosd(rotation_vec + 180.) + x_center
y1 = r1 * sind(rotation_vec + 180.) + y_center
x2 = r2 * cosd(rotation_vec + 180) + x_center
y2 = r2 * sind(rotation_vec + 180) + y_center
segment_coords = TsLineCoords.from_array(
np.array([[x1, y1], [x2, y2]]))
# Determine lowest and highest points of segment
x_highest = np.where(mask_tilted_to_left, x2, x1)
y_highest = np.where(mask_tilted_to_left, y2, y1)
x_lowest = np.where(mask_tilted_to_left, x1, x2)
y_lowest = np.where(mask_tilted_to_left, y1, y2)
# Calculate illum and shaded coords
x2_illum, y2_illum = x_highest, y_highest
x1_shaded, y1_shaded, x2_shaded, y2_shaded = \
x_lowest, y_lowest, x_lowest, y_lowest
mask_all_shaded = (y_sh > y_highest) & (is_not_flat)
mask_partial_shaded = (y_sh > y_lowest) & (~ mask_all_shaded) \
& (is_not_flat)
# Calculate second boundary point of shade
x2_shaded = np.where(mask_all_shaded, x_highest, x2_shaded)
x2_shaded = np.where(mask_partial_shaded, x_sh, x2_shaded)
y2_shaded = np.where(mask_all_shaded, y_highest, y2_shaded)
y2_shaded = np.where(mask_partial_shaded, y_sh, y2_shaded)
x1_illum = x2_shaded
y1_illum = y2_shaded
illum_coords = TsLineCoords.from_array(
np.array([[x1_illum, y1_illum], [x2_illum, y2_illum]]))
shaded_coords = TsLineCoords.from_array(
np.array([[x1_shaded, y1_shaded], [x2_shaded, y2_shaded]]))
# Create illuminated and shaded collections
is_shaded = False
illum = TsShadeCollection(
[TsSurface(illum_coords, n_vector=n_vector,
param_names=param_names, shaded=is_shaded)],
is_shaded)
is_shaded = True
shaded = TsShadeCollection(
[TsSurface(shaded_coords, n_vector=n_vector,
param_names=param_names, shaded=is_shaded)],
is_shaded)
# Create segment
segment = TsSegment(segment_coords, illum, shaded,
n_vector=n_vector, index=i)
list_segments.append(segment)
return cls(list_segments, n_vector=n_vector)
def surfaces_at_idx(self, idx):
"""Get all PV surface geometries in timeseries side for a certain
index.
Parameters
----------
idx : int
Index to use to generate PV surface geometries
Returns
-------
list of :py:class:`~pvfactors.geometry.base.PVSurface` objects
List of PV surfaces
"""
side_geom = self.at(idx)
return side_geom.all_surfaces
def at(self, idx):
"""Generate a side geometry for the desired index.
Parameters
----------
idx : int
Index to use to generate side geometry
Returns
-------
side : :py:class:`~pvfactors.geometry.base.BaseSide`
"""
list_geom_segments = []
for ts_seg in self.list_segments:
list_geom_segments.append(ts_seg.at(idx))
side = BaseSide(list_geom_segments)
return side
def plot_at_idx(self, idx, ax, color_shaded=COLOR_DIC['pvrow_shaded'],
color_illum=COLOR_DIC['pvrow_illum']):
"""Plot timeseries side at a certain index.
Parameters
----------
idx : int
Index to use to plot timeseries side
ax : :py:class:`matplotlib.pyplot.axes` object
Axes for plotting
color_shaded : str, optional
Color to use for plotting the shaded surfaces (Default =
COLOR_DIC['pvrow_shaded'])
color_shaded : str, optional
Color to use for plotting the illuminated surfaces (Default =
COLOR_DIC['pvrow_illum'])
"""
side_geom = self.at(idx)
side_geom.plot(ax, color_shaded=color_shaded, color_illum=color_illum,
with_index=False)
@property
def shaded_length(self):
"""Timeseries shaded length of the side."""
length = 0.
for seg in self.list_segments:
length += seg.shaded.length
return length
@property
def length(self):
"""Timeseries length of side."""
length = 0.
for seg in self.list_segments:
length += seg.length
return length
def get_param_weighted(self, param):
"""Get timeseries parameter for the side, after weighting by
surface length.
Parameters
----------
param : str
Name of parameter
Returns
-------
np.ndarray
Weighted parameter values
"""
return self.get_param_ww(param) / self.length
def get_param_ww(self, param):
"""Get timeseries parameter from the side's surfaces with weight, i.e.
after multiplying by the surface lengths.
Parameters
----------
param: str
Surface parameter to return
Returns
-------
np.ndarray
Timeseries parameter values multiplied by weights
Raises
------
KeyError
if parameter name not in a surface parameters
"""
value = 0.
for seg in self.list_segments:
value += seg.get_param_ww(param)
return value
def update_params(self, new_dict):
"""Update timeseries surface parameters of the side.
Parameters
----------
new_dict : dict
Parameters to add or update for the surfaces
"""
for seg in self.list_segments:
seg.update_params(new_dict)
@property
def n_ts_surfaces(self):
"""Number of timeseries surfaces in the ts side"""
n_ts_surfaces = 0
for ts_segment in self.list_segments:
n_ts_surfaces += ts_segment.n_ts_surfaces
return n_ts_surfaces
@property
def all_ts_surfaces(self):
"""List of all timeseries surfaces"""
all_ts_surfaces = []
for ts_segment in self.list_segments:
all_ts_surfaces += ts_segment.all_ts_surfaces
return all_ts_surfaces
class TsSegment(object):
"""A TsSegment is a timeseries segment that has a timeseries shaded
collection and a timeseries illuminated collection."""
def __init__(self, coords, illum_collection, shaded_collection,
index=None, n_vector=None):
"""Initialize timeseries segment using segment coordinates and
timeseries illuminated and shaded surfaces.
Parameters
----------
coords : :py:class:`~pvfactors.geometry.timeseries.TsLineCoords`
Timeseries coordinates of full segment
illum_collection : \
:py:class:`~pvfactors.geometry.timeseries.TsShadeCollection`
Timeseries collection for illuminated part of segment
shaded_collection : \
:py:class:`~pvfactors.geometry.timeseries.TsShadeCollection`
Timeseries collection for shaded part of segment
index : int, optional
Index of segment (Default = None)
n_vector : np.ndarray, optional
Timeseries normal vectors of the side (Default = None)
"""
self.coords = coords
self.illum = illum_collection
self.shaded = shaded_collection
self.index = index
self.n_vector = n_vector
def surfaces_at_idx(self, idx):
"""Get all PV surface geometries in timeseries segment for a certain
index.
Parameters
----------
idx : int
Index to use to generate PV surface geometries
Returns
-------
list of :py:class:`~pvfactors.geometry.base.PVSurface` objects
List of PV surfaces
"""
segment = self.at(idx)
return segment.all_surfaces
def plot_at_idx(self, idx, ax, color_shaded=COLOR_DIC['pvrow_shaded'],
color_illum=COLOR_DIC['pvrow_illum']):
"""Plot timeseries segment at a certain index.
Parameters
----------
idx : int
Index to use to plot timeseries segment
ax : :py:class:`matplotlib.pyplot.axes` object
Axes for plotting
color_shaded : str, optional
Color to use for plotting the shaded surfaces (Default =
COLOR_DIC['pvrow_shaded'])
color_shaded : str, optional
Color to use for plotting the illuminated surfaces (Default =
COLOR_DIC['pvrow_illum'])
"""
segment = self.at(idx)
segment.plot(ax, color_shaded=color_shaded, color_illum=color_illum,
with_index=False)
def at(self, idx):
"""Generate a PV segment geometry for the desired index.
Parameters
----------
idx : int
Index to use to generate PV segment geometry
Returns
-------
segment : :py:class:`~pvfactors.geometry.base.PVSegment`
"""
# Create illum collection
illum_collection = self.illum.at(idx)
# Create shaded collection
shaded_collection = self.shaded.at(idx)
# Create PV segment
segment = PVSegment(illum_collection=illum_collection,
shaded_collection=shaded_collection,
index=self.index)
return segment
@property
def length(self):
"""Timeseries length of segment."""
return self.illum.length + self.shaded.length
@property
def shaded_length(self):
"""Timeseries length of shaded part of segment."""
return self.shaded.length
@property
def centroid(self):
"""Timeseries point coordinates of the segment's centroid"""
return self.coords.centroid
def get_param_weighted(self, param):
"""Get timeseries parameter for the segment, after weighting by
surface length.
Parameters
----------
param : str
Name of parameter
Returns
-------
np.ndarray
Weighted parameter values
"""
return self.get_param_ww(param) / self.length
def get_param_ww(self, param):
"""Get timeseries parameter from the segment's surfaces with weight,
i.e. after multiplying by the surface lengths.
Parameters
----------
param: str
Surface parameter to return
Returns
-------
np.ndarray
Timeseries parameter values multiplied by weights
"""
return self.illum.get_param_ww(param) + self.shaded.get_param_ww(param)
def update_params(self, new_dict):
"""Update timeseries surface parameters of the segment.
Parameters
----------
new_dict : dict
Parameters to add or update for the surfaces
"""
self.illum.update_params(new_dict)
self.shaded.update_params(new_dict)
@property
def highest_point(self):
"""Timeseries point coordinates of highest point of segment"""
return self.coords.highest_point
@property
def lowest_point(self):
"""Timeseries point coordinates of lowest point of segment"""
return self.coords.lowest_point
@property
def all_ts_surfaces(self):
"""List of all timeseries surfaces in segment"""
return self.illum.list_ts_surfaces + self.shaded.list_ts_surfaces
@property
def n_ts_surfaces(self):
"""Number of timeseries surfaces in the segment"""
return self.illum.n_ts_surfaces + self.shaded.n_ts_surfaces
class PVRowSide(BaseSide):
"""A PV row side represents the whole surface of one side of a PV row.
At its core it will contain a fixed number of
:py:class:`~pvfactors.geometry.base.PVSegment` objects that will together
constitue one side of a PV row: a PV row side can also be
"discretized" into multiple segments"""
def __init__(self, list_segments=[]):
"""Initialize PVRowSide using its base class
:py:class:`pvfactors.geometry.base.BaseSide`
Parameters
----------
list_segments : list of :py:class:`~pvfactors.geometry.base.PVSegment`
List of PV segments for PV row side.
"""
super(PVRowSide, self).__init__(list_segments)
class PVRow(GeometryCollection):
"""A PV row is made of two PV row sides, a front and a back one."""
def __init__(self, front_side=PVRowSide(), back_side=PVRowSide(),
index=None, original_linestring=None):
"""Initialize PV row.
Parameters
----------
front_side : :py:class:`~pvfactors.geometry.pvrow.PVRowSide`, optional
Front side of the PV Row (Default = Empty PVRowSide)
back_side : :py:class:`~pvfactors.geometry.pvrow.PVRowSide`, optional
Back side of the PV Row (Default = Empty PVRowSide)
index : int, optional
Index of PV row (Default = None)
original_linestring : :py:class:`shapely.geometry.LineString`, optional
Full continuous linestring that the PV row will be made of
(Default = None)
"""
self.front = front_side
self.back = back_side
self.index = index
self.original_linestring = original_linestring
self._all_surfaces = None
super(PVRow, self).__init__([self.front, self.back])
@classmethod
def from_linestring_coords(cls, coords, shaded=False, normal_vector=None,
index=None, cut={}, param_names=[]):
"""Create a PV row with a single PV surface and using linestring
coordinates.
Parameters
----------
coords : list
List of linestring coordinates for the surface
shaded : bool, optional
Shading status desired for the PVRow sides (Default = False)
normal_vector : list, optional
Normal vector for the surface (Default = None)
index : int, optional
Index of PV row (Default = None)
cut : dict, optional
Scheme to decide how many segments to create on each side.
Eg {'front': 3, 'back': 2} will lead to 3 segments on front side
and 2 segments on back side. (Default = {})
param_names : list of str, optional
Names of the surface parameters, eg reflectivity, total incident
irradiance, temperature, etc. (Default = [])
Returns
-------
:py:class:`~pvfactors.geometry.pvrow.PVRow` object
"""
index_single_segment = 0
front_side = PVRowSide.from_linestring_coords(
coords, shaded=shaded, normal_vector=normal_vector,
index=index_single_segment, n_segments=cut.get('front', 1),
param_names=param_names)
if normal_vector is not None:
back_n_vec = - np.array(normal_vector)
else:
back_n_vec = - front_side.n_vector
back_side = PVRowSide.from_linestring_coords(
coords, shaded=shaded, normal_vector=back_n_vec,
index=index_single_segment, n_segments=cut.get('back', 1),
param_names=param_names)
return cls(front_side=front_side, back_side=back_side, index=index,
original_linestring=LineString(coords))
@classmethod
def from_center_tilt_width(cls, xy_center, tilt, width, surface_azimuth,
axis_azimuth, shaded=False, normal_vector=None,
index=None, cut={}, param_names=[]):
"""Create a PV row using mainly the coordinates of the line center,
a tilt angle, and its length.
Parameters
----------
xy_center : tuple
x, y coordinates of center point of desired linestring
tilt : float
surface tilt angle desired [deg]
length : float
desired length of linestring [m]
surface_azimuth : float
Surface azimuth of PV surface [deg]
axis_azimuth : float
Axis azimuth of the PV surface, i.e. direction of axis of rotation
[deg]
shaded : bool, optional
Shading status desired for the PVRow sides (Default = False)
normal_vector : list, optional
Normal vector for the surface (Default = None)
index : int, optional
Index of PV row (Default = None)
cut : dict, optional
Scheme to decide how many segments to create on each side.
Eg {'front': 3, 'back': 2} will lead to 3 segments on front side
and 2 segments on back side. (Default = {})
param_names : list of str, optional
Names of the surface parameters, eg reflectivity, total incident
irradiance, temperature, etc. (Default = [])
Returns
-------
:py:class:`~pvfactors.geometry.pvrow.PVRow` object
"""
coords = _coords_from_center_tilt_length(xy_center, tilt, width,
surface_azimuth, axis_azimuth)
return cls.from_linestring_coords(coords, shaded=shaded,
normal_vector=normal_vector,
index=index, cut=cut,
param_names=param_names)
def plot(self, ax, color_shaded=COLOR_DIC['pvrow_shaded'],
color_illum=COLOR_DIC['pvrow_illum'], with_index=False):
"""Plot the surfaces of the PV Row.
Parameters
----------
ax : :py:class:`matplotlib.pyplot.axes` object
Axes for plotting
color_shaded : str, optional
Color to use for plotting the shaded surfaces (Default =
COLOR_DIC['pvrow_shaded'])
color_shaded : str, optional
Color to use for plotting the illuminated surfaces (Default =
COLOR_DIC['pvrow_illum'])
with_index : bool
Flag to annotate surfaces with their indices (Default = False)
"""
self.front.plot(ax, color_shaded=color_shaded, color_illum=color_illum,
with_index=with_index)
self.back.plot(ax, color_shaded=color_shaded, color_illum=color_illum,
with_index=with_index)
@property
def boundary(self):
"""Boundaries of the PV Row's orginal linestring."""
return self.original_linestring.boundary
@property
def highest_point(self):
"""Highest point of the PV Row."""
b1, b2 = self.boundary
highest_point = b1 if b1.y > b2.y else b2
return highest_point
@property
def lowest_point(self):
"""Lowest point of the PV Row."""
b1, b2 = self.boundary
lowest_point = b1 if b1.y < b2.y else b2
return lowest_point
@property
def all_surfaces(self):
"""List of all the surfaces in the PV row."""
if self._all_surfaces is None:
self._all_surfaces = []
self._all_surfaces += self.front.all_surfaces
self._all_surfaces += self.back.all_surfaces
return self._all_surfaces
@property
def surface_indices(self):
"""List of all surface indices in the PV Row."""
list_indices = []
list_indices += self.front.surface_indices
list_indices += self.back.surface_indices
return list_indices
def update_params(self, new_dict):
"""Update surface parameters for both front and back sides.
Parameters
----------
new_dict : dict
Parameters to add or update for the surface
"""
self.front.update_params(new_dict)
self.back.update_params(new_dict)
| SunPower/pvfactors | pvfactors/geometry/pvrow.py | Python | bsd-3-clause | 31,109 | 0 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.backend.jvm.jar_dependency_utils import M2Coordinate, ResolvedJar
from pants.backend.jvm.tasks.classpath_products import ClasspathProducts
from pants_test.jvm.jvm_tool_task_test_base import JvmToolTaskTestBase
class JvmBinaryTaskTestBase(JvmToolTaskTestBase):
"""
:API: public
"""
def create_artifact(self, org, name, rev, classifier=None, ext=None, materialize=True):
"""
:API: public
:param string org: The maven dependency `groupId`.
:param string name: The maven dependency `artifactId`.
:param string rev: The maven dependency `version`.
:param string classifier: The maven dependency `classifier`.
:param string ext: There is no direct maven parallel, but the maven `packaging` value of the
depended-on artifact for simple cases, and in more complex cases the
extension of the artifact. For example, 'bundle' packaging implies an
extension of 'jar'. Defaults to 'jar'.
:param bool materialize: `False` to populate the returned resolved_jar with a `pants_path` that
does not exist; defaults to `True` and `touch`es the `pants_path`.
:returns: A resolved jar describing the artifact.
:rtype: :class:`pants.backend.jvm.jar_dependency_utils.ResolvedJar`
"""
coordinate = M2Coordinate(org=org, name=name, rev=rev, classifier=classifier, ext=ext)
cache_path = 'not/a/real/cache/path'
jar_name = coordinate.artifact_filename
pants_path = self.create_workdir_file(jar_name) if materialize else os.path.join(self.pants_workdir,
jar_name)
return ResolvedJar(coordinate=coordinate, cache_path=cache_path, pants_path=pants_path)
def iter_files(self, dir_path):
"""Returns an iterator over the files found under the given `dir_path`.
:API: public
:param string dir_path: The path of the directory tree to scan for files.
:returns: An iterator of the relative paths of files found under `dir_path`.
:rtype: :class:`collections.Iterator` of string
"""
for root_dir, _, files in os.walk(dir_path):
for f in files:
yield os.path.relpath(os.path.join(root_dir, f), dir_path)
def ensure_classpath_products(self, context):
"""Gets or creates the classpath products expected by `JvmBinaryTask`.
:API: public
:param context: The pants run context to get/create/associate classpath products with.
:type context: :class:`pants.goal.context.Context`
:returns: The classpath products associated with the given `context`
:rtype: :class:`pants.backend.jvm.tasks.classpath_products.ClasspathProducts`
"""
return context.products.get_data('runtime_classpath', init_func=ClasspathProducts.init_func(self.pants_workdir))
| manasapte/pants | tests/python/pants_test/backend/jvm/tasks/jvm_binary_task_test_base.py | Python | apache-2.0 | 3,154 | 0.006658 |
"""Graphical example illustrating improvement of convergence of KMeans
when cluster centers are initialized by KMeans++ algorithm.
In this example, 4 vertices of a rectangle are chosen: (0,0) (0,100) (10,0) (10,100).
There are 500 points normally distributed about each vertex.
Therefore, the ideal cluster centers for k=2 are the global minima ie (5,0) (5,100).
Written (W) 2014 Parijat Mazumdar
"""
from pylab import figure,clf,plot,linspace,pi,show
from numpy import array,ones,zeros,cos,sin,concatenate
from numpy.random import randn
from modshogun import *
k=2
num=500
d1=concatenate((randn(1,num),10.*randn(1,num)),0)
d2=concatenate((randn(1,num),10.*randn(1,num)),0)+array([[10.],[0.]])
d3=concatenate((randn(1,num),10.*randn(1,num)),0)+array([[0.],[100.]])
d4=concatenate((randn(1,num),10.*randn(1,num)),0)+array([[10.],[100.]])
traindata=concatenate((d1,d2,d3,d4),1)
feat_train=RealFeatures(traindata)
distance=EuclideanDistance(feat_train,feat_train)
kmeans=KMeans(k, distance, True)
kmeans.train()
centerspp=kmeans.get_cluster_centers()
radipp=kmeans.get_radiuses()
kmeans.set_use_kmeanspp(False)
kmeans.train()
centers=kmeans.get_cluster_centers()
radi=kmeans.get_radiuses()
figure('KMeans with KMeans++')
clf()
plot(d1[0],d1[1],'rx')
plot(d2[0],d2[1],'bx',hold=True)
plot(d3[0],d3[1],'gx',hold=True)
plot(d4[0],d4[1],'cx',hold=True)
plot(centerspp[0,:], centerspp[1,:], 'ko',hold=True)
for i in xrange(k):
t = linspace(0, 2*pi, 100)
plot(radipp[i]*cos(t)+centerspp[0,i],radipp[i]*sin(t)+centerspp[1,i],'k-', hold=True)
figure('KMeans w/o KMeans++')
clf()
plot(d1[0],d1[1],'rx')
plot(d2[0],d2[1],'bx',hold=True)
plot(d3[0],d3[1],'gx',hold=True)
plot(d4[0],d4[1],'cx',hold=True)
plot(centers[0,:], centers[1,:], 'ko',hold=True)
for i in xrange(k):
t = linspace(0, 2*pi, 100)
plot(radi[i]*cos(t)+centers[0,i],radi[i]*sin(t)+centers[1,i],'k-', hold=True)
show()
| abhiatgithub/shogun-toolbox | examples/undocumented/python_modular/graphical/cluster_kpp.py | Python | gpl-3.0 | 1,891 | 0.050238 |
from __main__ import settings
import logging
import datetime
import os
from pythonjsonlogger import jsonlogger
# if we wanna log disnake stuff https://docs.disnake.dev/en/latest/logging.html?highlight=logger
# we can also get the root logger, which will give us a ton of info for all the libraries we have
if not os.path.exists("logs"):
os.makedirs("logs")
def setup_logger():
logger = logging.getLogger("mangologger")
if settings.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
# Console Logging
if settings.debug:
consoleout = logging.StreamHandler()
logger.addHandler(consoleout)
# JSON file logging
timestamp = datetime.datetime.now().strftime("%Y-%m-%d__%I_%M%p")
filehandler = logging.FileHandler(filename=f"logs/mangolog_{timestamp}.log", encoding="utf-8", mode="w")
jsonformatter = jsonlogger.JsonFormatter()
filehandler.setFormatter(jsonformatter)
logger.addHandler(filehandler)
return logger | mdiller/MangoByte | cogs/utils/logger.py | Python | mit | 962 | 0.025988 |
# Copyright: 2013, Grigoriy Petukhov
# Author: Grigoriy Petukhov (http://lorien.name)
# License: BSD
from __future__ import absolute_import
#import email
import logging
#import urllib
#try:
#from cStringIO import StringIO
#except ImportError:
#from io import BytesIO as StringIO
#import threading
import random
#try:
#from urlparse import urlsplit, urlunsplit
#except ImportError:
#from urllib.parse import urlsplit, urlunsplit
#import pycurl
#import tempfile
#import os.path
#from ..base import UploadContent, UploadFile
#from .. import error
from ..response import Response
from ..tools.http import encode_cookies, smart_urlencode, normalize_unicode,\
normalize_http_values, normalize_post_data
from ..tools.user_agent import random_user_agent
from ..base import Grab
from grab.kit import Kit
logger = logging.getLogger('grab.transport.kit')
class KitTransport(object):
"""
Grab network transport powered with QtWebKit module
"""
def __init__(self):
self.kit = Kit()
#def setup_body_file(self, storage_dir, storage_filename):
#if storage_filename is None:
#handle, path = tempfile.mkstemp(dir=storage_dir)
#else:
#path = os.path.join(storage_dir, storage_filename)
#self.body_file = open(path, 'wb')
#self.body_path = path
def reset(self):
self.request_object = {
'url': None,
'cookies': {},
'method': None,
'data': None,
'user_agent': None,
}
self.response = None
#self.response_head_chunks = []
#self.response_body_chunks = []
#self.response_body_bytes_read = 0
#self.verbose_logging = False
#self.body_file = None
#self.body_path = None
## Maybe move to super-class???
self.request_head = ''
self.request_body = ''
self.request_log = ''
def process_config(self, grab):
self.request_object['url'] = grab.config['url']
self.request_object['method'] = grab.request_method.lower()
if grab.config['cookiefile']:
grab.load_cookies(grab.config['cookiefile'])
if grab.config['cookies']:
if not isinstance(grab.config['cookies'], dict):
raise error.GrabMisuseError('cookies option shuld be a dict')
self.request_object['cookies'] = grab.config['cookies']
if grab.request_method == 'POST':
if grab.config['multipart_post']:
raise NotImplementedError
elif grab.config['post']:
post_data = normalize_post_data(grab.config['post'], grab.config['charset'])
else:
post_data = None
self.request_object['data'] = post_data
if grab.config['user_agent'] is None:
if grab.config['user_agent_file'] is not None:
with open(grab.config['user_agent_file']) as inf:
lines = inf.read().splitlines()
grab.config['user_agent'] = random.choice(lines)
else:
pass
# I think that it does not make sense
# to create random user agents for webkit transport
#grab.config['user_agent'] = random_user_agent()
self.request_object['user_agent'] = grab.config['user_agent']
def request(self):
req = self.request_object
self.kit_response = self.kit.request(
url=req['url'],
cookies=req['cookies'],
method=req['method'],
data=req['data'],
user_agent=req['user_agent'],
)
def prepare_response(self, grab):
return self.kit_response
def extract_cookies(self):
"""
Extract cookies.
"""
return self.kit_response.cookies
def __getstate__(self):
"""
Reset curl attribute which could not be pickled.
"""
state = self.__dict__.copy()
state['kit'] = None
return state
def __setstate__(self, state):
"""
Create pycurl instance after Grag instance was restored
from pickled state.
"""
state['kit'] = Kit()
self.__dict__ = state
class GrabKit(Grab):
def __init__(self, response_body=None, transport='grab.transport.curl.CurlTransport',
**kwargs):
super(GrabKit, self).__init__(response_body=response_body,
transport='grab.transport.kit.KitTransport',
**kwargs)
| boooka/GeoPowerOff | venv/lib/python2.7/site-packages/grab/transport/kit.py | Python | apache-2.0 | 4,620 | 0.009307 |
__version__ = '0.1.0'
__description__ = 'Gantt charts!'
| thomasleese/gantt-charts | ganttcharts/__init__.py | Python | mit | 56 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import datetime
from getpass import getuser
import json
from os import environ
from pipes import quote
from socket import gethostname
from time import time
from .exceptions import NodeLockedException
from .utils import cached_property, tempfile
from .utils.text import (
blue,
bold,
format_duration,
format_timestamp,
mark_for_translation as _,
parse_duration,
red,
wrap_question,
)
from .utils.ui import io
HARD_LOCK_PATH = "/tmp/bundlewrap.lock"
HARD_LOCK_FILE = HARD_LOCK_PATH + "/info"
SOFT_LOCK_PATH = "/tmp/bundlewrap.softlock.d"
SOFT_LOCK_FILE = "/tmp/bundlewrap.softlock.d/{id}"
def identity():
return environ.get('BW_IDENTITY', "{}@{}".format(
getuser(),
gethostname(),
))
class NodeLock(object):
def __init__(self, node, interactive=False, ignore=False):
self.node = node
self.ignore = ignore
self.interactive = interactive
def __enter__(self):
if self.node.os == 'kubernetes':
# no locking required
return self
with tempfile() as local_path:
if not self.ignore:
with io.job(_("{node} checking hard lock status").format(node=bold(self.node.name))):
result = self.node.run("mkdir " + quote(HARD_LOCK_PATH), may_fail=True)
if result.return_code != 0:
self.node.download(HARD_LOCK_FILE, local_path)
with open(local_path, 'r') as f:
try:
info = json.loads(f.read())
except:
io.stderr(_(
"{warning} corrupted lock on {node}: "
"unable to read or parse lock file contents "
"(clear it with `bw run {node} 'rm -R {path}'`)"
).format(
node=self.node.name,
path=HARD_LOCK_FILE,
warning=red(_("WARNING")),
))
info = {}
expired = False
try:
d = info['date']
except KeyError:
info['date'] = _("<unknown>")
info['duration'] = _("<unknown>")
else:
duration = datetime.now() - datetime.fromtimestamp(d)
info['date'] = format_timestamp(d)
info['duration'] = format_duration(duration)
if duration > parse_duration(environ.get('BW_HARDLOCK_EXPIRY', "8h")):
expired = True
io.debug("ignoring expired hard lock on {}".format(self.node.name))
if 'user' not in info:
info['user'] = _("<unknown>")
if expired or self.ignore or (self.interactive and io.ask(
self._warning_message_hard(info),
False,
epilogue=blue("?") + " " + bold(self.node.name),
)):
pass
else:
raise NodeLockedException(info)
with io.job(_("{node} uploading lock file").format(node=bold(self.node.name))):
if self.ignore:
self.node.run("mkdir -p " + quote(HARD_LOCK_PATH))
with open(local_path, 'w') as f:
f.write(json.dumps({
'date': time(),
'user': identity(),
}))
self.node.upload(local_path, HARD_LOCK_FILE)
return self
def __exit__(self, type, value, traceback):
if self.node.os == 'kubernetes':
# no locking required
return
with io.job(_("{node} removing hard lock").format(node=bold(self.node.name))):
result = self.node.run("rm -R {}".format(quote(HARD_LOCK_PATH)), may_fail=True)
if result.return_code != 0:
io.stderr(_("{x} {node} could not release hard lock").format(
node=bold(self.node.name),
x=red("!"),
))
def _warning_message_hard(self, info):
return wrap_question(
red(_("NODE LOCKED")),
_(
"Looks like somebody is currently using BundleWrap on this node.\n"
"You should let them finish or override the lock if it has gone stale.\n"
"\n"
"locked by {user}\n"
" since {date} ({duration} ago)"
).format(
user=bold(info['user']),
date=info['date'],
duration=info['duration'],
),
bold(_("Override lock?")),
prefix="{x} {node} ".format(node=bold(self.node.name), x=blue("?")),
)
@cached_property
def soft_locks(self):
return softlock_list(self.node)
@cached_property
def my_soft_locks(self):
for lock in self.soft_locks:
if lock['user'] == identity():
yield lock
@cached_property
def other_peoples_soft_locks(self):
for lock in self.soft_locks:
if lock['user'] != identity():
yield lock
def softlock_add(node, lock_id, comment="", expiry="8h", item_selectors=None):
assert node.os != 'kubernetes'
if "\n" in comment:
raise ValueError(_("Lock comments must not contain any newlines"))
if not item_selectors:
item_selectors = ["*"]
expiry_timedelta = parse_duration(expiry)
now = time()
expiry_timestamp = now + expiry_timedelta.days * 86400 + expiry_timedelta.seconds
content = json.dumps({
'comment': comment,
'date': now,
'expiry': expiry_timestamp,
'id': lock_id,
'items': item_selectors,
'user': identity(),
}, indent=None, sort_keys=True)
with tempfile() as local_path:
with open(local_path, 'w') as f:
f.write(content + "\n")
node.run("mkdir -p " + quote(SOFT_LOCK_PATH))
node.upload(local_path, SOFT_LOCK_FILE.format(id=lock_id), mode='0644')
node.repo.hooks.lock_add(node.repo, node, lock_id, item_selectors, expiry_timestamp, comment)
return lock_id
def softlock_list(node):
if node.os == 'kubernetes':
return []
with io.job(_("{} checking soft locks").format(bold(node.name))):
cat = node.run("cat {}".format(SOFT_LOCK_FILE.format(id="*")), may_fail=True)
if cat.return_code != 0:
return []
result = []
for line in cat.stdout.decode('utf-8').strip().split("\n"):
try:
result.append(json.loads(line.strip()))
except json.decoder.JSONDecodeError:
io.stderr(_(
"{x} {node} unable to parse soft lock file contents, ignoring: {line}"
).format(
x=red("!"),
node=bold(node.name),
line=line.strip(),
))
for lock in result[:]:
if lock['expiry'] < time():
io.debug(_("removing expired soft lock {id} from node {node}").format(
id=lock['id'],
node=node.name,
))
softlock_remove(node, lock['id'])
result.remove(lock)
return result
def softlock_remove(node, lock_id):
assert node.os != 'kubernetes'
io.debug(_("removing soft lock {id} from node {node}").format(
id=lock_id,
node=node.name,
))
node.run("rm {}".format(SOFT_LOCK_FILE.format(id=lock_id)))
node.repo.hooks.lock_remove(node.repo, node, lock_id)
| timbuchwaldt/bundlewrap | bundlewrap/lock.py | Python | gpl-3.0 | 8,144 | 0.002456 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from .base import TestExecutor
class ProcessTestExecutor(TestExecutor):
def __init__(self, *args, **kwargs):
TestExecutor.__init__(self, *args, **kwargs)
self.binary = self.browser.binary
self.interactive = self.browser.interactive
def setup(self, runner):
self.runner = runner
self.runner.send_message("init_succeeded")
return True
def is_alive(self):
return True
def do_test(self, test):
raise NotImplementedError
| indykish/servo | tests/wpt/harness/wptrunner/executors/process.py | Python | mpl-2.0 | 701 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('training', '0004_auto_20160627_1106'),
]
operations = [
migrations.AddField(
model_name='trainesscourserecord',
name='createdby',
field=models.ForeignKey(related_name='createdby', default=4, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
migrations.AddField(
model_name='trainesscourserecord',
name='createtimestamp',
field=models.DateField(default=django.utils.timezone.now, null=True, verbose_name=b'Creation Timestamp', blank=True),
),
migrations.AddField(
model_name='trainesscourserecord',
name='modifiedby',
field=models.ForeignKey(related_name='modifiedby', default=4, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
migrations.AddField(
model_name='trainesscourserecord',
name='modifytimestamp',
field=models.DateField(default=django.utils.timezone.now, null=True, verbose_name=b'Modification Timestamp', blank=True),
),
]
| akademikbilisim/ab-kurs-kayit | abkayit/training/migrations/0005_auto_20160627_1414.py | Python | gpl-3.0 | 1,403 | 0.002851 |
from .patch import post_load
| OCA/server-tools | module_change_auto_install/__init__.py | Python | agpl-3.0 | 29 | 0 |
SPIDER_MODULES = ['censible_links.spiders']
DEFAULT_ITEM_CLASS = 'censible_links.items.Page'
| pmart123/censible_links | censible_links/settings.py | Python | bsd-3-clause | 93 | 0 |
'''
Tests for student activation and login
'''
import json
import unittest
from django.test import TestCase
from django.test.client import Client
from django.test.utils import override_settings
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import HttpResponseBadRequest, HttpResponse
import httpretty
from mock import patch
from social.apps.django_app.default.models import UserSocialAuth
from external_auth.models import ExternalAuthMap
from student.tests.factories import UserFactory, RegistrationFactory, UserProfileFactory
from student.views import login_oauth_token
from third_party_auth.tests.utils import (
ThirdPartyOAuthTestMixin,
ThirdPartyOAuthTestMixinFacebook,
ThirdPartyOAuthTestMixinGoogle
)
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
class LoginTest(TestCase):
'''
Test student.views.login_user() view
'''
def setUp(self):
super(LoginTest, self).setUp()
# Create one user and save it to the database
self.user = UserFactory.build(username='test', email='test@edx.org')
self.user.set_password('test_password')
self.user.save()
# Create a registration for the user
RegistrationFactory(user=self.user)
# Create a profile for the user
UserProfileFactory(user=self.user)
# Create the test client
self.client = Client()
cache.clear()
# Store the login url
try:
self.url = reverse('login_post')
except NoReverseMatch:
self.url = reverse('login')
def test_login_success(self):
response, mock_audit_log = self._login_response('test@edx.org', 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success', u'test@edx.org'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_success_no_pii(self):
response, mock_audit_log = self._login_response('test@edx.org', 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success'])
self._assert_not_in_audit_log(mock_audit_log, 'info', [u'test@edx.org'])
def test_login_success_unicode_email(self):
unicode_email = u'test' + unichr(40960) + u'@edx.org'
self.user.email = unicode_email
self.user.save()
response, mock_audit_log = self._login_response(unicode_email, 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success', unicode_email])
def test_login_fail_no_user_exists(self):
nonexistent_email = u'not_a_user@edx.org'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email', nonexistent_email])
@patch.dict("django.conf.settings.FEATURES", {'ADVANCED_SECURITY': True})
def test_login_fail_incorrect_email_with_advanced_security(self):
nonexistent_email = u'not_a_user@edx.org'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email', nonexistent_email])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_fail_no_user_exists_no_pii(self):
nonexistent_email = u'not_a_user@edx.org'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [nonexistent_email])
def test_login_fail_wrong_password(self):
response, mock_audit_log = self._login_response('test@edx.org', 'wrong_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'test@edx.org', u'invalid'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_fail_wrong_password_no_pii(self):
response, mock_audit_log = self._login_response('test@edx.org', 'wrong_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'invalid'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [u'test@edx.org'])
def test_login_not_activated(self):
# De-activate the user
self.user.is_active = False
self.user.save()
# Should now be unable to login
response, mock_audit_log = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=False,
value="This account has not been activated")
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Account not active for user'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_not_activated_no_pii(self):
# De-activate the user
self.user.is_active = False
self.user.save()
# Should now be unable to login
response, mock_audit_log = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=False,
value="This account has not been activated")
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Account not active for user'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [u'test'])
def test_login_unicode_email(self):
unicode_email = u'test@edx.org' + unichr(40960)
response, mock_audit_log = self._login_response(unicode_email, 'test_password')
self._assert_response(response, success=False)
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', unicode_email])
def test_login_unicode_password(self):
unicode_password = u'test_password' + unichr(1972)
response, mock_audit_log = self._login_response('test@edx.org', unicode_password)
self._assert_response(response, success=False)
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'test@edx.org', u'invalid'])
def test_logout_logging(self):
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
logout_url = reverse('logout')
with patch('student.models.AUDIT_LOG') as mock_audit_log:
response = self.client.post(logout_url)
self.assertEqual(response.status_code, 302)
self._assert_audit_log(mock_audit_log, 'info', [u'Logout', u'test'])
def test_login_user_info_cookie(self):
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
# Verify the format of the "user info" cookie set on login
cookie = self.client.cookies[settings.EDXMKTG_USER_INFO_COOKIE_NAME]
user_info = json.loads(cookie.value)
# Check that the version is set
self.assertEqual(user_info["version"], settings.EDXMKTG_USER_INFO_COOKIE_VERSION)
# Check that the username and email are set
self.assertEqual(user_info["username"], self.user.username)
self.assertEqual(user_info["email"], self.user.email)
# Check that the URLs are absolute
for url in user_info["header_urls"].values():
self.assertIn("http://testserver/", url)
def test_logout_deletes_mktg_cookies(self):
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
# Check that the marketing site cookies have been set
self.assertIn(settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, self.client.cookies)
self.assertIn(settings.EDXMKTG_USER_INFO_COOKIE_NAME, self.client.cookies)
# Log out
logout_url = reverse('logout')
response = self.client.post(logout_url)
# Check that the marketing site cookies have been deleted
# (cookies are deleted by setting an expiration date in 1970)
for cookie_name in [settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, settings.EDXMKTG_USER_INFO_COOKIE_NAME]:
cookie = self.client.cookies[cookie_name]
self.assertIn("01-Jan-1970", cookie.get('expires'))
@override_settings(
EDXMKTG_LOGGED_IN_COOKIE_NAME=u"unicode-logged-in",
EDXMKTG_USER_INFO_COOKIE_NAME=u"unicode-user-info",
)
def test_unicode_mktg_cookie_names(self):
# When logged in cookie names are loaded from JSON files, they may
# have type `unicode` instead of `str`, which can cause errors
# when calling Django cookie manipulation functions.
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
response = self.client.post(reverse('logout'))
self.assertRedirects(response, "/")
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_logout_logging_no_pii(self):
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
logout_url = reverse('logout')
with patch('student.models.AUDIT_LOG') as mock_audit_log:
response = self.client.post(logout_url)
self.assertEqual(response.status_code, 302)
self._assert_audit_log(mock_audit_log, 'info', [u'Logout'])
self._assert_not_in_audit_log(mock_audit_log, 'info', [u'test'])
def test_login_ratelimited_success(self):
# Try (and fail) logging in with fewer attempts than the limit of 30
# and verify that you can still successfully log in afterwards.
for i in xrange(20):
password = u'test_password{0}'.format(i)
response, _audit_log = self._login_response('test@edx.org', password)
self._assert_response(response, success=False)
# now try logging in with a valid password
response, _audit_log = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
def test_login_ratelimited(self):
# try logging in 30 times, the default limit in the number of failed
# login attempts in one 5 minute period before the rate gets limited
for i in xrange(30):
password = u'test_password{0}'.format(i)
self._login_response('test@edx.org', password)
# check to see if this response indicates that this was ratelimited
response, _audit_log = self._login_response('test@edx.org', 'wrong_password')
self._assert_response(response, success=False, value='Too many failed login attempts')
@patch.dict("django.conf.settings.FEATURES", {'PREVENT_CONCURRENT_LOGINS': True})
def test_single_session(self):
creds = {'email': 'test@edx.org', 'password': 'test_password'}
client1 = Client()
client2 = Client()
response = client1.post(self.url, creds)
self._assert_response(response, success=True)
# Reload the user from the database
self.user = User.objects.get(pk=self.user.pk)
self.assertEqual(self.user.profile.get_meta()['session_id'], client1.session.session_key)
# second login should log out the first
response = client2.post(self.url, creds)
self._assert_response(response, success=True)
try:
# this test can be run with either lms or studio settings
# since studio does not have a dashboard url, we should
# look for another url that is login_required, in that case
url = reverse('dashboard')
except NoReverseMatch:
url = reverse('upload_transcripts')
response = client1.get(url)
# client1 will be logged out
self.assertEqual(response.status_code, 302)
@patch.dict("django.conf.settings.FEATURES", {'PREVENT_CONCURRENT_LOGINS': True})
def test_single_session_with_no_user_profile(self):
"""
Assert that user login with cas (Central Authentication Service) is
redirect to dashboard in case of lms or upload_transcripts in case of
cms
"""
user = UserFactory.build(username='tester', email='tester@edx.org')
user.set_password('test_password')
user.save()
# Assert that no profile is created.
self.assertFalse(hasattr(user, 'profile'))
creds = {'email': 'tester@edx.org', 'password': 'test_password'}
client1 = Client()
client2 = Client()
response = client1.post(self.url, creds)
self._assert_response(response, success=True)
# Reload the user from the database
user = User.objects.get(pk=user.pk)
# Assert that profile is created.
self.assertTrue(hasattr(user, 'profile'))
# second login should log out the first
response = client2.post(self.url, creds)
self._assert_response(response, success=True)
try:
# this test can be run with either lms or studio settings
# since studio does not have a dashboard url, we should
# look for another url that is login_required, in that case
url = reverse('dashboard')
except NoReverseMatch:
url = reverse('upload_transcripts')
response = client1.get(url)
# client1 will be logged out
self.assertEqual(response.status_code, 302)
@patch.dict("django.conf.settings.FEATURES", {'PREVENT_CONCURRENT_LOGINS': True})
def test_single_session_with_url_not_having_login_required_decorator(self):
# accessing logout url as it does not have login-required decorator it will avoid redirect
# and go inside the enforce_single_login
creds = {'email': 'test@edx.org', 'password': 'test_password'}
client1 = Client()
client2 = Client()
response = client1.post(self.url, creds)
self._assert_response(response, success=True)
# Reload the user from the database
self.user = User.objects.get(pk=self.user.pk)
self.assertEqual(self.user.profile.get_meta()['session_id'], client1.session.session_key)
# second login should log out the first
response = client2.post(self.url, creds)
self._assert_response(response, success=True)
url = reverse('logout')
response = client1.get(url)
self.assertEqual(response.status_code, 302)
def test_change_enrollment_400(self):
"""
Tests that a 400 in change_enrollment doesn't lead to a 404
and in fact just logs in the user without incident
"""
# add this post param to trigger a call to change_enrollment
extra_post_params = {"enrollment_action": "enroll"}
with patch('student.views.change_enrollment') as mock_change_enrollment:
mock_change_enrollment.return_value = HttpResponseBadRequest("I am a 400")
response, _ = self._login_response(
'test@edx.org',
'test_password',
extra_post_params=extra_post_params,
)
response_content = json.loads(response.content)
self.assertIsNone(response_content["redirect_url"])
self._assert_response(response, success=True)
def test_change_enrollment_200_no_redirect(self):
"""
Tests "redirect_url" is None if change_enrollment returns a HttpResponse
with no content
"""
# add this post param to trigger a call to change_enrollment
extra_post_params = {"enrollment_action": "enroll"}
with patch('student.views.change_enrollment') as mock_change_enrollment:
mock_change_enrollment.return_value = HttpResponse()
response, _ = self._login_response(
'test@edx.org',
'test_password',
extra_post_params=extra_post_params,
)
response_content = json.loads(response.content)
self.assertIsNone(response_content["redirect_url"])
self._assert_response(response, success=True)
def _login_response(self, email, password, patched_audit_log='student.views.AUDIT_LOG', extra_post_params=None):
''' Post the login info '''
post_params = {'email': email, 'password': password}
if extra_post_params is not None:
post_params.update(extra_post_params)
with patch(patched_audit_log) as mock_audit_log:
result = self.client.post(self.url, post_params)
return result, mock_audit_log
def _assert_response(self, response, success=None, value=None):
'''
Assert that the response had status 200 and returned a valid
JSON-parseable dict.
If success is provided, assert that the response had that
value for 'success' in the JSON dict.
If value is provided, assert that the response contained that
value for 'value' in the JSON dict.
'''
self.assertEqual(response.status_code, 200)
try:
response_dict = json.loads(response.content)
except ValueError:
self.fail("Could not parse response content as JSON: %s"
% str(response.content))
if success is not None:
self.assertEqual(response_dict['success'], success)
if value is not None:
msg = ("'%s' did not contain '%s'" %
(str(response_dict['value']), str(value)))
self.assertTrue(value in response_dict['value'], msg)
def _assert_audit_log(self, mock_audit_log, level, log_strings):
"""
Check that the audit log has received the expected call as its last call.
"""
method_calls = mock_audit_log.method_calls
name, args, _kwargs = method_calls[-1]
self.assertEquals(name, level)
self.assertEquals(len(args), 1)
format_string = args[0]
for log_string in log_strings:
self.assertIn(log_string, format_string)
def _assert_not_in_audit_log(self, mock_audit_log, level, log_strings):
"""
Check that the audit log has received the expected call as its last call.
"""
method_calls = mock_audit_log.method_calls
name, args, _kwargs = method_calls[-1]
self.assertEquals(name, level)
self.assertEquals(len(args), 1)
format_string = args[0]
for log_string in log_strings:
self.assertNotIn(log_string, format_string)
class ExternalAuthShibTest(ModuleStoreTestCase):
"""
Tests how login_user() interacts with ExternalAuth, in particular Shib
"""
def setUp(self):
super(ExternalAuthShibTest, self).setUp()
self.course = CourseFactory.create(
org='Stanford',
number='456',
display_name='NO SHIB',
user_id=self.user.id,
)
self.shib_course = CourseFactory.create(
org='Stanford',
number='123',
display_name='Shib Only',
enrollment_domain='shib:https://idp.stanford.edu/',
user_id=self.user.id,
)
self.user_w_map = UserFactory.create(email='withmap@stanford.edu')
self.extauth = ExternalAuthMap(external_id='withmap@stanford.edu',
external_email='withmap@stanford.edu',
external_domain='shib:https://idp.stanford.edu/',
external_credentials="",
user=self.user_w_map)
self.user_w_map.save()
self.extauth.save()
self.user_wo_map = UserFactory.create(email='womap@gmail.com')
self.user_wo_map.save()
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_login_page_redirect(self):
"""
Tests that when a shib user types their email address into the login page, they get redirected
to the shib login.
"""
response = self.client.post(reverse('login'), {'email': self.user_w_map.email, 'password': ''})
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertEqual(obj, {
'success': False,
'redirect': reverse('shib-login'),
})
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_login_required_dashboard(self):
"""
Tests redirects to when @login_required to dashboard, which should always be the normal login,
since there is no course context
"""
response = self.client.get(reverse('dashboard'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], 'http://testserver/login?next=/dashboard')
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_externalauth_login_required_course_context(self):
"""
Tests the redirects when visiting course-specific URL with @login_required.
Should vary by course depending on its enrollment_domain
"""
TARGET_URL = reverse('courseware', args=[self.course.id.to_deprecated_string()]) # pylint: disable=invalid-name
noshib_response = self.client.get(TARGET_URL, follow=True)
self.assertEqual(noshib_response.redirect_chain[-1],
('http://testserver/login?next={url}'.format(url=TARGET_URL), 302))
self.assertContains(noshib_response, ("Sign in or Register | {platform_name}"
.format(platform_name=settings.PLATFORM_NAME)))
self.assertEqual(noshib_response.status_code, 200)
TARGET_URL_SHIB = reverse('courseware', args=[self.shib_course.id.to_deprecated_string()]) # pylint: disable=invalid-name
shib_response = self.client.get(**{'path': TARGET_URL_SHIB,
'follow': True,
'REMOTE_USER': self.extauth.external_id,
'Shib-Identity-Provider': 'https://idp.stanford.edu/'})
# Test that the shib-login redirect page with ?next= and the desired page are part of the redirect chain
# The 'courseware' page actually causes a redirect itself, so it's not the end of the chain and we
# won't test its contents
self.assertEqual(shib_response.redirect_chain[-3],
('http://testserver/shib-login/?next={url}'.format(url=TARGET_URL_SHIB), 302))
self.assertEqual(shib_response.redirect_chain[-2],
('http://testserver{url}'.format(url=TARGET_URL_SHIB), 302))
self.assertEqual(shib_response.status_code, 200)
@httpretty.activate
class LoginOAuthTokenMixin(ThirdPartyOAuthTestMixin):
"""
Mixin with tests for the login_oauth_token view. A TestCase that includes
this must define the following:
BACKEND: The name of the backend from python-social-auth
USER_URL: The URL of the endpoint that the backend retrieves user data from
UID_FIELD: The field in the user data that the backend uses as the user id
"""
def setUp(self):
super(LoginOAuthTokenMixin, self).setUp()
self.url = reverse(login_oauth_token, kwargs={"backend": self.BACKEND})
def _assert_error(self, response, status_code, error):
"""Assert that the given response was a 400 with the given error code"""
self.assertEqual(response.status_code, status_code)
self.assertEqual(json.loads(response.content), {"error": error})
self.assertNotIn("partial_pipeline", self.client.session)
def test_success(self):
self._setup_provider_response(success=True)
response = self.client.post(self.url, {"access_token": "dummy"})
self.assertEqual(response.status_code, 204)
self.assertEqual(int(self.client.session['_auth_user_id']), self.user.id) # pylint: disable=no-member
def test_invalid_token(self):
self._setup_provider_response(success=False)
response = self.client.post(self.url, {"access_token": "dummy"})
self._assert_error(response, 401, "invalid_token")
def test_missing_token(self):
response = self.client.post(self.url)
self._assert_error(response, 400, "invalid_request")
def test_unlinked_user(self):
UserSocialAuth.objects.all().delete()
self._setup_provider_response(success=True)
response = self.client.post(self.url, {"access_token": "dummy"})
self._assert_error(response, 401, "invalid_token")
def test_get_method(self):
response = self.client.get(self.url, {"access_token": "dummy"})
self.assertEqual(response.status_code, 405)
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class LoginOAuthTokenTestFacebook(LoginOAuthTokenMixin, ThirdPartyOAuthTestMixinFacebook, TestCase):
"""Tests login_oauth_token with the Facebook backend"""
pass
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class LoginOAuthTokenTestGoogle(LoginOAuthTokenMixin, ThirdPartyOAuthTestMixinGoogle, TestCase):
"""Tests login_oauth_token with the Google backend"""
pass
| devs1991/test_edx_docmode | common/djangoapps/student/tests/test_login.py | Python | agpl-3.0 | 26,838 | 0.002757 |
try:
import simplejson
except ImportError:
import json as simplejson
from .meta import DocumentMeta, BaseDocumentSession
json_objects = []
class JSONDocument(object):
"""
JSON Document base class
"""
__metaclass__ = DocumentMeta
def __init__(self, **kwargs):
json_objects.append(kwargs)
class Session(BaseDocumentSession):
"""
A class featuring a database session
"""
def commit(self):
"""
Dumps the scraped data to the filesystem
"""
with open(self.file_name, 'w') as f:
simplejson.dump(json_objects, f)
def close(self):
super(Session,self).close()
json_session = Session()
| bossiernesto/onyx | persistance/documents/json_doc.py | Python | bsd-3-clause | 703 | 0.004267 |
# Copyright (c) 2014, Canon Inc. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Canon Inc. nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY CANON INC. AND ITS CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL CANON INC. AND ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import logging
import sys
import time
from webkitpy.common.system.autoinstall import AutoInstaller
from webkitpy.layout_tests.servers import http_server_base
_log = logging.getLogger(__name__)
def doc_root(port_obj):
doc_root = port_obj.get_option("wptserver_doc_root")
if doc_root is None:
return port_obj.host.filesystem.join("imported", "w3c", "web-platform-tests")
return doc_root
def base_url(port_obj):
config_wk_filepath = port_obj._filesystem.join(port_obj.layout_tests_dir(), "imported", "w3c", "resources", "config.json")
if not port_obj.host.filesystem.isfile(config_wk_filepath):
# This should only be hit by webkitpy unit tests
_log.debug("No WPT config file found")
return "http://localhost:8800/"
json_data = port_obj._filesystem.read_text_file(config_wk_filepath)
config = json.loads(json_data)
ports = config["ports"]
return "http://" + config["host"] + ":" + str(ports["http"][0]) + "/"
class WebPlatformTestServer(http_server_base.HttpServerBase):
def __init__(self, port_obj, name, pidfile=None):
http_server_base.HttpServerBase.__init__(self, port_obj)
self._output_dir = port_obj.results_directory()
self._name = name
self._log_file_name = '%s_process_log.out.txt' % (self._name)
self._wsout = None
self._process = None
self._pid_file = pidfile
if not self._pid_file:
self._pid_file = self._filesystem.join(self._runtime_path, '%s.pid' % self._name)
self._servers_file = self._filesystem.join(self._runtime_path, '%s_servers.json' % (self._name))
self._stdout_data = None
self._stderr_data = None
self._filesystem = port_obj.host.filesystem
self._layout_root = port_obj.layout_tests_dir()
self._doc_root = self._filesystem.join(self._layout_root, doc_root(port_obj))
self._resources_files_to_copy = ['testharness.css', 'testharnessreport.js']
current_dir_path = self._filesystem.abspath(self._filesystem.split(__file__)[0])
self._start_cmd = ["python", self._filesystem.join(current_dir_path, "web_platform_test_launcher.py"), self._servers_file]
self._doc_root_path = self._filesystem.join(self._layout_root, self._doc_root)
def _install_modules(self):
modules_file_path = self._filesystem.join(self._doc_root_path, "..", "resources", "web-platform-tests-modules.json")
if not self._filesystem.isfile(modules_file_path):
_log.warning("Cannot read " + modules_file_path)
return
modules = json.loads(self._filesystem.read_text_file(modules_file_path))
for module in modules:
path = module["path"]
name = path.pop()
AutoInstaller(target_dir=self._filesystem.join(self._doc_root, self._filesystem.sep.join(path))).install(url=module["url"], url_subpath=module["url_subpath"], target_name=name)
def _copy_webkit_test_files(self):
_log.debug('Copying WebKit resources files')
for f in self._resources_files_to_copy:
webkit_filename = self._filesystem.join(self._layout_root, "resources", f)
if self._filesystem.isfile(webkit_filename):
self._filesystem.copyfile(webkit_filename, self._filesystem.join(self._doc_root, "resources", f))
_log.debug('Copying WebKit web platform server config.json')
config_wk_filename = self._filesystem.join(self._layout_root, "imported", "w3c", "resources", "config.json")
if self._filesystem.isfile(config_wk_filename):
config_json = self._filesystem.read_text_file(config_wk_filename).replace("%CERTS_DIR%", self._filesystem.join(self._output_dir, "_wpt_certs"))
self._filesystem.write_text_file(self._filesystem.join(self._doc_root, "config.json"), config_json)
wpt_testharnessjs_file = self._filesystem.join(self._doc_root, "resources", "testharness.js")
layout_tests_testharnessjs_file = self._filesystem.join(self._layout_root, "resources", "testharness.js")
# FIXME: Next line to be removed once all bots have wpt_testharnessjs_file updated correctly. See https://bugs.webkit.org/show_bug.cgi?id=152257.
self._filesystem.copyfile(layout_tests_testharnessjs_file, wpt_testharnessjs_file)
if (not self._filesystem.compare(wpt_testharnessjs_file, layout_tests_testharnessjs_file)):
_log.warning("\n//////////\nWPT tests are not using the same testharness.js file as other WebKit Layout tests.\nWebKit testharness.js might need to be updated according WPT testharness.js.\n//////////\n")
def _clean_webkit_test_files(self):
_log.debug('Cleaning WPT resources files')
for f in self._resources_files_to_copy:
wpt_filename = self._filesystem.join(self._doc_root, "resources", f)
if self._filesystem.isfile(wpt_filename):
self._filesystem.remove(wpt_filename)
_log.debug('Cleaning WPT web platform server config.json')
config_wpt_filename = self._filesystem.join(self._doc_root, "config.json")
if self._filesystem.isfile(config_wpt_filename):
self._filesystem.remove(config_wpt_filename)
def _prepare_config(self):
if self._filesystem.exists(self._output_dir):
output_log = self._filesystem.join(self._output_dir, self._log_file_name)
self._wsout = self._filesystem.open_text_file_for_writing(output_log)
self._install_modules()
self._copy_webkit_test_files()
def _spawn_process(self):
self._stdout_data = None
self._stderr_data = None
if self._wsout:
self._process = self._executive.popen(self._start_cmd, cwd=self._doc_root_path, shell=False, stdin=self._executive.PIPE, stdout=self._wsout, stderr=self._wsout)
else:
self._process = self._executive.popen(self._start_cmd, cwd=self._doc_root_path, shell=False, stdin=self._executive.PIPE, stdout=self._executive.PIPE, stderr=self._executive.STDOUT)
self._filesystem.write_text_file(self._pid_file, str(self._process.pid))
# Wait a second for the server to actually start so that tests do not start until server is running.
time.sleep(1)
return self._process.pid
def _stop_running_subservers(self):
if self._filesystem.exists(self._servers_file):
try:
json_data = self._filesystem.read_text_file(self._servers_file)
started_servers = json.loads(json_data)
for server in started_servers:
if self._executive.check_running_pid(server['pid']):
_log.warning('Killing server process (protocol: %s , port: %d, pid: %d).' % (server['protocol'], server['port'], server['pid']))
self._executive.kill_process(server['pid'])
finally:
self._filesystem.remove(self._servers_file)
def stop(self):
super(WebPlatformTestServer, self).stop()
# In case of orphaned pid, kill the running subservers if any still alive.
self._stop_running_subservers()
def _stop_running_server(self):
_log.debug('Stopping %s server' % (self._name))
self._clean_webkit_test_files()
if self._process:
(self._stdout_data, self._stderr_data) = self._process.communicate(input='\n')
if self._wsout:
self._wsout.close()
self._wsout = None
if self._pid and self._executive.check_running_pid(self._pid):
_log.warning('Cannot stop %s server normally.' % (self._name))
_log.warning('Killing server launcher process (pid: %d).' % (self._pid))
self._executive.kill_process(self._pid)
self._remove_pid_file()
self._stop_running_subservers()
| teamfx/openjfx-9-dev-rt | modules/javafx.web/src/main/native/Tools/Scripts/webkitpy/layout_tests/servers/web_platform_test_server.py | Python | gpl-2.0 | 9,422 | 0.003927 |
"""
File: logmessages.py
Author: Fernando Crespo Gravalos (cees.project.official@gmail.com)
Date: 2014/06/16
"""
##############
# LOG MESSAGES
##############
## GENERAL ##
DB_ERROR = 'Database error. Check database log file.'
STORE_NOT_FOUND = 'Could not find store in cees database.'
TOKEN_NOT_FOUND = 'Could not find token in database.'
SCHEMA_NOT_FOUND = 'Could not validate request. Schema file not found.'
VALIDATION_ERROR = 'Data not valid.'
## LOGIN ##
LOGGED_IN = 'Shop assistant logged in as '
CREDENTIALS_NOT_FOUND = 'Could not find the email/password provided.'
## ARRIVALS ##
RFID_NOT_FOUND = 'Invalid identifier. RFID not found in cees database.'
CLIENT_NOT_ALLOWED = 'Client does not belong to this customer.'
SENDING_NOTIFICATION = 'Sending push notification.'
CLIENT_NOT_FOUND = 'Client not found as an arrival.'
## TOKEN ##
MISSING_TOKEN = 'There is no Authentication header in request.'
## GCM AND DEVICE REGISTRATION##
UNKNOWN_DEVICE = 'Device not found in databse.'
REGISTRATION_NOT_FOUND = 'Registration not found in database.' | fcgravalos/CEES-API-v1.0 | logmessages.py | Python | apache-2.0 | 1,055 | 0.006635 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2009 Ars Aperta, Itaapy, Pierlis, Talend.
#
# Authors: Hervé Cauwelier <herve@itaapy.com>
# Romain Gauthier <romain@itaapy.com>
#
# This file is part of Lpod (see: http://lpod-project.org).
# Lpod is free software; you can redistribute it and/or modify it under
# the terms of either:
#
# a) the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option)
# any later version.
# Lpod is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Lpod. If not, see <http://www.gnu.org/licenses/>.
#
# b) the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Import from the standard library
from optparse import OptionParser
from sys import exit, stdout
# Import from lpod
from lpod import __version__
from lpod.document import odf_get_document
def show_styles(document, automatic=True, common=True, properties=False):
"""Show the different styles of a document and their properties.
"""
output = document.show_styles(automatic=automatic, common=common,
properties=properties)
# Print the styles
encoding = stdout.encoding if stdout.encoding is not None else 'utf-8'
stdout.write(output.encode(encoding))
stdout.flush()
def delete_styles(document, pretty=True):
n = document.delete_styles()
document.save(pretty=pretty)
print n, "styles removed (0 error, 0 warning)."
def merge_styles(document, from_file, pretty=True):
source = odf_get_document(from_file)
document.delete_styles()
document.merge_styles_from(source)
document.save(pretty=pretty)
print "Done (0 error, 0 warning)."
if __name__ == '__main__':
# Options initialisation
usage = '%prog <file>'
description = 'A command line interface to manipulate styles of ' \
'OpenDocument files.'
parser = OptionParser(usage, version=__version__,
description=description)
# --automatic
parser.add_option('-a', '--automatic', dest='automatic',
action='store_true', default=False,
help="show automatic styles only")
# --common
parser.add_option('-c', '--common', dest='common', action='store_true',
default=False, help="show common styles only")
# --properties
parser.add_option('-p', '--properties', dest='properties',
action='store_true', help="show properties of styles")
# --delete
parser.add_option('-d', '--delete', dest='delete',
action='store_true', help="delete all styles (except default)")
# --merge
help = ('copy styles from FILE to <file>. Any style with the same name '
'will be replaced.')
parser.add_option('-m', '--merge-styles-from', dest='merge',
action='store', metavar='FILE', help=help)
# Parse options
options, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
exit(1)
document = odf_get_document(args[0])
if options.delete:
delete_styles(document)
elif options.merge:
merge_styles(document, options.merge)
else:
automatic = options.automatic
common = options.common
if not automatic ^ common:
automatic, common = True, True
show_styles(document, automatic=automatic, common=common,
properties=options.properties)
| kiniou/blender-smooth-slides | tools/lpod/scripts/lpod-style.py | Python | gpl-3.0 | 3,854 | 0.003374 |
import random
import unittest
from hearthbreaker.cards.spells.neutral import TheCoin
from tests.agents.testing_agents import OneCardPlayingAgent, MinionAttackingAgent, CardTestingAgent, \
PlayAndAttackAgent
from tests.testing_utils import generate_game_for
from hearthbreaker.cards import *
from hearthbreaker.constants import MINION_TYPE
from hearthbreaker.agents.basic_agents import PredictableAgent, DoNothingAgent
class TestShaman(unittest.TestCase):
def setUp(self):
random.seed(1857)
def test_AlAkirTheWindlord(self):
game = generate_game_for(AlAkirTheWindlord, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 15):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Al'Akir the Windlord", game.players[0].minions[0].card.name)
self.assertTrue(game.players[0].minions[0].windfury())
self.assertTrue(game.players[0].minions[0].charge())
self.assertTrue(game.players[0].minions[0].divine_shield)
self.assertTrue(game.players[0].minions[0].taunt)
def test_DustDevil(self):
game = generate_game_for(DustDevil, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Dust Devil", game.players[0].minions[0].card.name)
self.assertTrue(game.players[0].minions[0].windfury())
self.assertEqual(2, game.players[0].upcoming_overload)
game.play_single_turn()
# Overload should cause that we start this turn with 0 mana
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(0, game.players[0].upcoming_overload)
self.assertEqual(0, game.players[0].mana)
self.assertEqual(2, game.players[0].max_mana)
def test_EarthElemental(self):
game = generate_game_for(EarthElemental, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
# Earth Elemental should be played
for turn in range(0, 9):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Earth Elemental", game.players[0].minions[0].card.name)
self.assertTrue(game.players[0].minions[0].taunt)
self.assertEqual(3, game.players[0].upcoming_overload)
def test_FireElemental(self):
game = generate_game_for(FireElemental, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 10):
game.play_single_turn()
self.assertEqual(30, game.players[1].hero.health)
# Fire Elemental should be played, and its battlecry dealing three damage to opponent
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Fire Elemental", game.players[0].minions[0].card.name)
self.assertEqual(27, game.players[1].hero.health)
def test_FlametongueTotem(self):
game = generate_game_for(StonetuskBoar, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 5):
game.play_single_turn()
# There should be three Stonetusk Boars on the board
self.assertEqual(3, len(game.players[0].minions))
# add a new Flametongue Totem at index 1
totem = FlametongueTotem()
totem.summon(game.players[0], game, 1)
# The minions to either side should have their attack increased
self.assertEqual(4, len(game.players[0].minions))
self.assertEqual(3, game.players[0].minions[0].calculate_attack())
self.assertEqual(3, game.players[0].minions[2].calculate_attack())
self.assertEqual(1, game.players[0].minions[3].calculate_attack())
# When removing the minion at index 0, we should not get an error
game.players[0].minions[0].die(None)
game.players[0].minions[0].activate_delayed()
self.assertEqual(3, len(game.players[0].minions))
# When removing the minion at index 1, we should have a new minion at index 1,
# and its attack should be increased
game.players[0].minions[1].die(None)
game.players[0].minions[1].activate_delayed()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(3, game.players[0].minions[1].calculate_attack())
# Silencing this minion should have no effect on its attack
game.players[0].minions[1].silence()
self.assertEqual(3, game.players[0].minions[1].calculate_attack())
# We should be able to add a boar on either side of the wolf, and their attack should be increased
# The attack of the boar which used to be next to the wolf should decrease
boar = StonetuskBoar()
boar.summon(game.players[0], game, 0)
boar.summon(game.players[0], game, 2)
self.assertEqual(4, len(game.players[0].minions))
self.assertEqual(3, game.players[0].minions[0].calculate_attack())
self.assertEqual(3, game.players[0].minions[2].calculate_attack())
self.assertEqual(1, game.players[0].minions[3].calculate_attack())
# Add a new boar on the left of the totem since we haven't tested that yet
boar.summon(game.players[0], game, 1)
self.assertEqual(5, len(game.players[0].minions))
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
self.assertEqual(3, game.players[0].minions[1].calculate_attack())
game.players[0].minions[1].die(None)
game.players[0].minions[1].activate_delayed()
self.assertEqual(4, len(game.players[0].minions))
self.assertEqual(3, game.players[0].minions[0].calculate_attack())
# If the totem is silenced, then the boars to either side should no longer have increased attack
game.players[0].minions[1].silence()
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
self.assertEqual(1, game.players[0].minions[2].calculate_attack())
self.assertEqual(1, game.players[0].minions[3].calculate_attack())
def test_ManaTideTotem(self):
game = generate_game_for([ManaTideTotem, WarGolem], StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 4):
game.play_single_turn()
self.assertEqual(25, game.players[0].deck.left)
self.assertEqual(0, len(game.players[0].minions))
# Mana Tide Totem should be played, and we should draw a card at the end of turn
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Mana Tide Totem", game.players[0].minions[0].card.name)
self.assertEqual(23, game.players[0].deck.left)
game.play_single_turn()
# Silence, we should only draw one card next turn
game.players[0].minions[0].silence()
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(22, game.players[0].deck.left)
def test_UnboundElemental(self):
game = generate_game_for([UnboundElemental, DustDevil, DustDevil], StonetuskBoar, OneCardPlayingAgent,
DoNothingAgent)
for turn in range(0, 6):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Unbound Elemental", game.players[0].minions[0].card.name)
self.assertEqual(2, game.players[0].minions[0].calculate_attack())
self.assertEqual(4, game.players[0].minions[0].calculate_max_health())
# One Dust Devil should be played, giving the Unbound Elemental +1/+1
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(3, game.players[0].minions[-1].calculate_attack())
self.assertEqual(5, game.players[0].minions[-1].calculate_max_health())
# Test the silence
game.players[0].minions[-1].silence()
self.assertEqual(2, game.players[0].minions[-1].calculate_attack())
self.assertEqual(4, game.players[0].minions[-1].calculate_max_health())
# Another Dust Devil, nothing should happen because of silence
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual(2, game.players[0].minions[-1].calculate_attack())
self.assertEqual(4, game.players[0].minions[-1].calculate_max_health())
def test_Windspeaker(self):
game = generate_game_for([StonetuskBoar, Windspeaker], StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 6):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Stonetusk Boar", game.players[0].minions[0].card.name)
self.assertFalse(game.players[0].minions[0].windfury())
# Windspeaker should be played, giving the boar windfury
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual("Windspeaker", game.players[0].minions[0].card.name)
self.assertTrue(game.players[0].minions[1].windfury())
def test_AncestralHealing(self):
game = generate_game_for([FlametongueTotem, AncestralHealing], StonetuskBoar,
OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 4):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Flametongue Totem", game.players[0].minions[0].card.name)
self.assertEqual(3, game.players[0].minions[0].health)
self.assertFalse(game.players[0].minions[0].taunt)
game.players[0].minions[0].health = 1
game.play_single_turn()
self.assertEqual(3, game.players[0].minions[0].health)
self.assertTrue(game.players[0].minions[0].taunt)
def test_AncestralSpirit(self):
game = generate_game_for([ArgentCommander, AncestralSpirit], StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 11):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Argent Commander", game.players[0].minions[0].card.name)
self.assertEqual(2, game.players[0].minions[0].health)
self.assertTrue(game.players[0].minions[0].divine_shield)
game.play_single_turn()
# Ancestral Spirit should be played on the Argent Commander
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
game.players[0].minions[0].health = 1
game.players[0].minions[0].divine_shield = False
# Let the minion die in order to test Ancestral Spirit
commander = game.players[0].minions[0]
commander.die(None)
commander.activate_delayed()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Argent Commander", game.players[0].minions[0].card.name)
self.assertEqual(2, game.players[0].minions[0].health)
self.assertTrue(game.players[0].minions[0].divine_shield)
def test_AncestralSpiritDeathrattle(self):
game = generate_game_for([LootHoarder, AncestralSpirit], StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(4, len(game.players[0].hand))
loot = game.players[0].minions[0]
loot.die(None)
loot.activate_delayed()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(5, len(game.players[0].hand))
def test_Bloodlust(self):
game = generate_game_for([StonetuskBoar, StonetuskBoar, StonetuskBoar, StonetuskBoar, Bloodlust], StonetuskBoar,
MinionAttackingAgent, DoNothingAgent)
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(4, len(game.players[0].minions))
self.assertEqual(20, game.players[1].hero.health)
# Bloodlust should be played, resulting in 4 * 4 = 16 damage
game.play_single_turn()
self.assertEqual(4, game.players[1].hero.health)
# Attack power should be back to normal
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
def test_EarthShock(self):
game = generate_game_for(EarthShock, ArgentSquire, OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(0, 2):
game.play_single_turn()
self.assertEqual(1, len(game.players[1].minions))
self.assertTrue(game.players[1].minions[0].divine_shield)
# Earth Shock should be played, resulting in silence which removes the divine shield and then 1 damage
game.play_single_turn()
self.assertEqual(0, len(game.players[1].minions))
def test_FarSight(self):
game = generate_game_for(FarSight, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 5):
game.play_single_turn()
# Far Sight should have been played, our latest card should cost 3 - 3 = 0
self.assertEqual(0, game.players[0].hand[-1].mana_cost())
self.assertEqual(3, game.players[0].hand[0].mana_cost())
# Draw a card to make sure the new card doesn't get the effect
game.players[0].draw()
self.assertEqual(3, game.players[0].hand[-1].mana_cost())
# Our old card shouldn't have been affected
self.assertEqual(0, game.players[0].hand[-2].mana_cost())
def test_FeralSpirit(self):
game = generate_game_for(FeralSpirit, StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(2, game.players[0].minions[0].calculate_attack())
self.assertEqual(3, game.players[0].minions[0].health)
self.assertTrue(game.players[0].minions[0].taunt)
self.assertEqual("Spirit Wolf", game.players[0].minions[0].card.name)
self.assertEqual(2, game.players[0].minions[0].card.mana)
self.assertEqual(2, game.players[0].minions[1].calculate_attack())
self.assertEqual(3, game.players[0].minions[1].health)
self.assertTrue(game.players[0].minions[1].taunt)
self.assertEqual("Spirit Wolf", game.players[0].minions[1].card.name)
self.assertEqual(2, game.players[0].minions[1].card.mana)
self.assertEqual(2, game.players[0].upcoming_overload)
def test_VitalityTotem(self):
game = generate_game_for(VitalityTotem, StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 2):
game.play_single_turn()
game.players[0].hero.health = 20
game.play_single_turn()
game.play_single_turn()
self.assertEqual(24, game.players[0].hero.health)
self.assertEqual(0, game.players[0].minions[0].calculate_attack())
self.assertEqual(3, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
# player now has two vitality totems in play
self.assertEqual(30, game.players[0].hero.health)
self.assertEqual(2, len(game.players[0].minions))
def test_ForkedLightning(self):
game = generate_game_for(ForkedLightning, StonetuskBoar, CardTestingAgent, OneCardPlayingAgent)
for turn in range(0, 4):
game.play_single_turn()
# Nothing should have happened yet, since the opponent haven't got 2 minions until now
self.assertEqual(2, len(game.players[1].minions))
# Forked Lightning should be played
game.play_single_turn()
self.assertEqual(0, len(game.players[1].minions))
self.assertEqual(2, game.players[0].upcoming_overload)
def test_FrostShock(self):
game = generate_game_for(FrostShock, StonetuskBoar, CardTestingAgent, DoNothingAgent)
# Frost Shock should be played
game.play_single_turn()
self.assertEqual(29, game.players[1].hero.health)
self.assertTrue(game.players[1].hero.frozen)
def test_Hex(self):
game = generate_game_for(ChillwindYeti, Hex, OneCardPlayingAgent, CardTestingAgent)
for turn in range(0, 7):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertFalse(game.players[0].minions[0].taunt)
self.assertEqual(4, game.players[0].minions[0].calculate_attack())
self.assertEqual(5, game.players[0].minions[0].health)
self.assertEqual("Chillwind Yeti", game.players[0].minions[0].card.name)
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertTrue(game.players[0].minions[0].taunt)
self.assertEqual(0, game.players[0].minions[0].calculate_attack())
self.assertEqual(1, game.players[0].minions[0].health)
self.assertEqual("Frog", game.players[0].minions[0].card.name)
self.assertEqual(MINION_TYPE.BEAST, game.players[0].minions[0].card.minion_type)
def test_LavaBurst(self):
game = generate_game_for(LavaBurst, StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 4):
game.play_single_turn()
self.assertEqual(30, game.players[1].hero.health)
game.play_single_turn()
self.assertEqual(25, game.players[1].hero.health)
self.assertEqual(2, game.players[0].upcoming_overload)
def test_LightningBolt(self):
game = generate_game_for(LightningBolt, StonetuskBoar, CardTestingAgent, DoNothingAgent)
self.assertEqual(30, game.players[1].hero.health)
game.play_single_turn()
self.assertEqual(27, game.players[1].hero.health)
self.assertEqual(1, game.players[0].upcoming_overload)
def test_LightningStorm(self):
game = generate_game_for(LightningStorm, Shieldbearer, CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 4):
game.play_single_turn()
# Lightning Storm should be played
game.play_single_turn()
self.assertEqual(3, len(game.players[1].minions))
self.assertEqual(1, game.players[1].minions[0].health)
self.assertEqual(2, game.players[1].minions[1].health)
self.assertEqual(2, game.players[1].minions[2].health)
self.assertEqual(2, game.players[0].upcoming_overload)
def test_RockbiterWeapon(self):
game = generate_game_for(RockbiterWeapon, Shieldbearer, PlayAndAttackAgent, DoNothingAgent)
self.assertEqual(30, game.players[1].hero.health)
# Rockbiter Weapon should be played and used
game.play_single_turn()
self.assertEqual(27, game.players[1].hero.health)
def test_RockbiterWeapon_and_Hex(self):
game = generate_game_for([IronfurGrizzly, RockbiterWeapon, Hex], StonetuskBoar,
CardTestingAgent, DoNothingAgent)
for turn in range(7):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual("Frog", game.current_player.minions[0].card.name)
def test_RockbiterWeapon_and_BaronGeddon(self):
game = generate_game_for([BaronGeddon, RecklessRocketeer, RockbiterWeapon], StonetuskBoar,
PlayAndAttackAgent, DoNothingAgent)
for turn in range(15):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual("Baron Geddon", game.current_player.minions[0].card.name)
self.assertEqual(11, game.other_player.hero.health)
def test_TotemicMight(self):
game = generate_game_for([TotemicMight, StonetuskBoar], Shieldbearer, PredictableAgent, DoNothingAgent)
for turn in range(0, 2):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Stonetusk Boar", game.players[0].minions[0].card.name)
# Hero power and Totemic Might should be played
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(1, game.players[0].minions[0].calculate_max_health())
self.assertEqual("Stoneclaw Totem", game.players[0].minions[1].card.name)
self.assertEqual(4, game.players[0].minions[1].calculate_max_health())
def test_Windfury(self):
game = generate_game_for(Windfury, StonetuskBoar, CardTestingAgent, OneCardPlayingAgent)
for turn in range(0, 2):
game.play_single_turn()
self.assertFalse(game.players[1].minions[0].windfury())
# Windfury should be played
game.play_single_turn()
self.assertTrue(game.players[1].minions[0].windfury())
def test_Doomhammer(self):
game = generate_game_for(Doomhammer, StonetuskBoar, PlayAndAttackAgent, DoNothingAgent)
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(30, game.players[1].hero.health)
self.assertFalse(game.players[0].hero.windfury())
# Doomhammer should be played
game.play_single_turn()
self.assertTrue(game.players[0].hero.windfury())
self.assertEqual(2, game.players[0].weapon.base_attack)
self.assertEqual(6, game.players[0].weapon.durability)
self.assertEqual(2, game.players[0].upcoming_overload)
self.assertEqual(26, game.players[1].hero.health)
def test_StormforgedAxe(self):
game = generate_game_for(StormforgedAxe, StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 3):
game.play_single_turn()
self.assertEqual(2, game.players[0].weapon.base_attack)
self.assertEqual(3, game.players[0].weapon.durability)
self.assertEqual(1, game.players[0].upcoming_overload)
def test_Crackle(self):
game = generate_game_for(Crackle, StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 3):
game.play_single_turn()
self.assertEqual(25, game.players[1].hero.health)
self.assertEqual(1, game.players[0].upcoming_overload)
def test_SiltfinSpiritwalker(self):
game = generate_game_for([MurlocTidecaller, MurlocTidehunter, SiltfinSpiritwalker, Deathwing],
[MurlocTidecaller, Hellfire, BaneOfDoom], OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(6):
game.play_single_turn()
self.assertEqual(3, len(game.other_player.minions))
self.assertEqual(1, len(game.current_player.minions))
# Play Siltfin
game.play_single_turn()
self.assertEqual(4, len(game.current_player.minions))
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual(4, len(game.current_player.hand))
self.assertEqual(7, len(game.other_player.hand))
# Hellfire will kill all the murlocs but the siltfin.
game.play_single_turn()
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual(7, len(game.other_player.hand))
self.assertEqual(0, len(game.current_player.minions))
self.assertEqual(7, len(game.current_player.hand))
def test_WhirlingZapOMatic(self):
game = generate_game_for(WhirlingZapomatic, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 3):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Whirling Zap-o-matic", game.players[0].minions[0].card.name)
self.assertTrue(game.players[0].minions[0].windfury())
def test_DunemaulShaman(self):
game = generate_game_for(DunemaulShaman,
[StonetuskBoar, GoldshireFootman, SilverbackPatriarch, MogushanWarden],
PlayAndAttackAgent, OneCardPlayingAgent)
for turn in range(7):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(3, len(game.other_player.minions))
game.play_single_turn()
# The shaman's forgetful ability triggers once. It hits the warden one time (its intended target)
# and the footman one time (after triggering forgetful)
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
self.assertEqual(3, len(game.other_player.minions))
self.assertEqual("Mogu'shan Warden", game.other_player.minions[0].card.name)
self.assertEqual("Silverback Patriarch", game.other_player.minions[1].card.name)
self.assertEqual("Stonetusk Boar", game.other_player.minions[2].card.name)
self.assertEqual(30, game.other_player.hero.health)
def test_Powermace(self):
game = generate_game_for([Powermace, SpiderTank, SpiderTank], Wisp, PlayAndAttackAgent, DoNothingAgent)
for turn in range(0, 6):
game.play_single_turn()
self.assertEqual(0, len(game.players[0].minions))
self.assertEqual(27, game.players[1].hero.health)
self.assertEqual(3, game.players[0].weapon.base_attack)
self.assertEqual(1, game.players[0].weapon.durability)
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(24, game.players[1].hero.health)
self.assertEqual(5, game.players[0].minions[0].calculate_attack())
self.assertEqual(6, game.players[0].minions[0].health)
def test_Neptulon(self):
game = generate_game_for([TheCoin, TheCoin, TheCoin, TheCoin, TheCoin, TheCoin, TheCoin, TheCoin, TheCoin,
Neptulon], Wisp, CardTestingAgent, DoNothingAgent)
for turn in range(0, 12):
game.play_single_turn()
self.assertEqual(0, len(game.players[0].minions))
self.assertEqual(0, len(game.players[0].hand))
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(4, len(game.players[0].hand))
self.assertEqual("Siltfin Spiritwalker", game.players[0].hand[0].name)
self.assertEqual("Murloc Tidecaller", game.players[0].hand[1].name)
self.assertEqual("Grimscale Oracle", game.players[0].hand[2].name)
self.assertEqual("Coldlight Seer", game.players[0].hand[3].name)
def test_AncestorsCall(self):
game = generate_game_for([AncestorsCall, StonetuskBoar], [Doomguard, Soulfire],
OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(6):
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual("Stonetusk Boar", game.current_player.minions[0].card.name)
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual("Doomguard", game.other_player.minions[0].card.name)
self.assertEqual(5, len(game.current_player.hand))
self.assertEqual(7, len(game.other_player.hand))
def test_LavaShock(self):
game = generate_game_for([Doomhammer, LightningBolt, LavaShock], StonetuskBoar,
CardTestingAgent, DoNothingAgent)
for turn in range(11):
game.play_single_turn()
# The player should have been able to do everything AND have three mana left over
self.assertEqual(25, game.other_player.hero.health)
self.assertEqual(3, game.current_player.mana)
def test_FireguardDestroyer(self):
game = generate_game_for(FireguardDestroyer, Wisp, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(6, game.players[0].minions[0].calculate_attack())
self.assertEqual(6, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(5, game.players[0].minions[0].calculate_attack())
self.assertEqual(6, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual(5, game.players[0].minions[0].calculate_attack())
self.assertEqual(6, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(4, len(game.players[0].minions))
self.assertEqual(5, game.players[0].minions[0].calculate_attack())
self.assertEqual(6, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(5, len(game.players[0].minions))
self.assertEqual(6, game.players[0].minions[0].calculate_attack())
self.assertEqual(6, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(6, len(game.players[0].minions))
self.assertEqual(4, game.players[0].minions[0].calculate_attack())
self.assertEqual(6, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(7, len(game.players[0].minions)) # Well, I was trying to get a 7/6 but no luck
self.assertEqual(5, game.players[0].minions[0].calculate_attack())
self.assertEqual(6, game.players[0].minions[0].health)
def test_AncestralKnowledge(self):
game = generate_game_for(AncestralKnowledge, StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 3):
game.play_single_turn()
self.assertEqual(6, len(game.current_player.hand))
self.assertEqual(2, game.current_player.upcoming_overload)
| kingoflolz/hearthbreaker | tests/card_tests/shaman_tests.py | Python | mit | 30,173 | 0.002453 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import mock
import six
from heat.common import exception
from heat.common import template_format
from heat.engine.resources.openstack.manila import share as mshare
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
manila_template = """
heat_template_version: 2015-04-30
resources:
test_share:
type: OS::Manila::Share
properties:
share_protocol: NFS
size: 1
access_rules:
- access_to: 127.0.0.1
access_type: ip
access_level: ro
name: basic_test_share
description: basic test share
is_public: True
metadata: {"key": "value"}
"""
class DummyShare(object):
def __init__(self):
self.availability_zone = 'az'
self.host = 'host'
self.export_locations = 'el'
self.share_server_id = 'id'
self.created_at = 'ca'
self.status = 's'
self.project_id = 'p_id'
class ManilaShareTest(common.HeatTestCase):
def setUp(self):
super(ManilaShareTest, self).setUp()
self.fake_share = mock.MagicMock(id="test_share_id")
self.available_share = mock.MagicMock(
id="test_share_id",
status=mshare.ManilaShare.STATUS_AVAILABLE)
self.failed_share = mock.MagicMock(
id="test_share_id",
status=mshare.ManilaShare.STATUS_ERROR)
self.deleting_share = mock.MagicMock(
id="test_share_id",
status=mshare.ManilaShare.STATUS_DELETING)
def _init_share(self, stack_name):
tmp = template_format.parse(manila_template)
self.stack = utils.parse_stack(tmp, stack_name=stack_name)
res_def = self.stack.t.resource_definitions(self.stack)["test_share"]
share = mshare.ManilaShare("test_share", res_def, self.stack)
# replace clients and plugins with mocks
mock_client = mock.MagicMock()
client = mock.MagicMock(return_value=mock_client)
share.client = client
mock_plugin = mock.MagicMock()
client_plugin = mock.MagicMock(return_value=mock_plugin)
share.client_plugin = client_plugin
return share
def _create_share(self, stack_name):
share = self._init_share(stack_name)
share.client().shares.create.return_value = self.fake_share
share.client().shares.get.return_value = self.available_share
scheduler.TaskRunner(share.create)()
return share
def test_share_create(self):
share = self._create_share("stack_share_create")
expected_state = (share.CREATE, share.COMPLETE)
self.assertEqual(expected_state, share.state,
"Share is not in expected state")
self.assertEqual(self.fake_share.id, share.resource_id,
"Expected share ID was not propagated to share")
share.client().shares.allow.assert_called_once_with(
access="127.0.0.1", access_level="ro",
share=share.resource_id, access_type="ip")
args, kwargs = share.client().shares.create.call_args
message_end = " parameter was not passed to manila client"
self.assertEqual(u"NFS", kwargs["share_proto"],
"Share protocol" + message_end)
self.assertEqual(1, kwargs["size"], "Share size" + message_end)
self.assertEqual("basic_test_share", kwargs["name"],
"Share name" + message_end)
self.assertEqual("basic test share", kwargs["description"],
"Share description" + message_end)
self.assertEqual({u"key": u"value"}, kwargs["metadata"],
"Metadata" + message_end)
self.assertTrue(kwargs["is_public"])
share.client().shares.get.assert_called_once_with(self.fake_share.id)
self.assertEqual('shares', share.entity)
def test_share_create_fail(self):
share = self._init_share("stack_share_create_fail")
share.client().shares.get.return_value = self.failed_share
exc = self.assertRaises(exception.ResourceInError,
share.check_create_complete,
self.failed_share)
self.assertIn("Error during creation", six.text_type(exc))
def test_share_create_unknown_status(self):
share = self._init_share("stack_share_create_unknown")
share.client().shares.get.return_value = self.deleting_share
exc = self.assertRaises(exception.ResourceUnknownStatus,
share.check_create_complete,
self.deleting_share)
self.assertIn("Unknown status", six.text_type(exc))
def test_share_check(self):
share = self._create_share("stack_share_check")
scheduler.TaskRunner(share.check)()
expected_state = (share.CHECK, share.COMPLETE)
self.assertEqual(expected_state, share.state,
"Share is not in expected state")
def test_share_check_fail(self):
share = self._create_share("stack_share_check_fail")
share.client().shares.get.return_value = self.failed_share
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(share.check))
self.assertIn("Error: resources.test_share: 'status': expected "
"'['available']'", six.text_type(exc))
def test_share_update(self):
share = self._create_share("stack_share_update")
updated_share_props = copy.deepcopy(share.properties.data)
updated_share_props[mshare.ManilaShare.DESCRIPTION] = "desc"
updated_share_props[mshare.ManilaShare.NAME] = "name"
updated_share_props[mshare.ManilaShare.IS_PUBLIC] = True
share.client().shares.update.return_value = None
after = rsrc_defn.ResourceDefinition(share.name, share.type(),
updated_share_props)
scheduler.TaskRunner(share.update, after)()
kwargs = {
"display_name": "name",
"display_description": "desc",
}
share.client().shares.update.assert_called_once_with(
share.resource_id, **kwargs)
def test_share_update_access_rules(self):
share = self._create_share("stack_share_update_access_rules")
updated_share_props = copy.deepcopy(share.properties.data)
updated_share_props[mshare.ManilaShare.ACCESS_RULES] = [
{mshare.ManilaShare.ACCESS_TO: "127.0.0.2",
mshare.ManilaShare.ACCESS_TYPE: "ip",
mshare.ManilaShare.ACCESS_LEVEL: "ro"}]
share.client().shares.deny.return_value = None
current_rule = {
mshare.ManilaShare.ACCESS_TO: "127.0.0.1",
mshare.ManilaShare.ACCESS_TYPE: "ip",
mshare.ManilaShare.ACCESS_LEVEL: "ro",
"id": "test_access_rule"
}
rule_tuple = collections.namedtuple("DummyRule",
list(current_rule.keys()))
share.client().shares.access_list.return_value = [
rule_tuple(**current_rule)]
after = rsrc_defn.ResourceDefinition(share.name, share.type(),
updated_share_props)
scheduler.TaskRunner(share.update, after)()
share.client().shares.access_list.assert_called_once_with(
share.resource_id)
share.client().shares.allow.assert_called_with(
share=share.resource_id, access_type="ip",
access="127.0.0.2", access_level="ro")
share.client().shares.deny.assert_called_once_with(
share=share.resource_id, id="test_access_rule")
def test_share_update_metadata(self):
share = self._create_share("stack_share_update_metadata")
updated_share_props = copy.deepcopy(share.properties.data)
updated_share_props[mshare.ManilaShare.METADATA] = {
"fake_key": "fake_value"}
share.client().shares.update_all_metadata.return_value = None
after = rsrc_defn.ResourceDefinition(share.name, share.type(),
updated_share_props)
scheduler.TaskRunner(share.update, after)()
share.client().shares.update_all_metadata.assert_called_once_with(
share.resource_id,
updated_share_props[mshare.ManilaShare.METADATA])
def test_attributes(self):
share = self._create_share("share")
share.client().shares.get.return_value = DummyShare()
self.assertEqual('az', share.FnGetAtt('availability_zone'))
self.assertEqual('host', share.FnGetAtt('host'))
self.assertEqual('el', share.FnGetAtt('export_locations'))
self.assertEqual('id', share.FnGetAtt('share_server_id'))
self.assertEqual('ca', share.FnGetAtt('created_at'))
self.assertEqual('s', share.FnGetAtt('status'))
self.assertEqual('p_id', share.FnGetAtt('project_id'))
| cwolferh/heat-scratch | heat/tests/openstack/manila/test_share.py | Python | apache-2.0 | 9,631 | 0 |
#!/usr/bin/env python3
"""Server for multithreaded (asynchronous) chat application."""
from socket import AF_INET, socket, SOCK_STREAM
from threading import Thread
def accept_incoming_connections():
"""Sets up handling for incoming clients."""
while True:
client, client_address = SERVER.accept() # client_adress is ip and port
print("client {}:{} has connected with server".format(client_address[0], client_address[1]))
#client.send(bytes("Welcome to Battleships! Please type your name and press enter!", "utf8"))
addresses[client] = client_address
Thread(target=handle_client, args=(client,)).start()
def handle_client(client): # Takes client socket as argument.
"""Handles a single client connection."""
name = client.recv(BUFSIZ).decode("utf8")
#welcome = "Welcome {}! type 'quit' to exit".format(name)
if players[0] is None:
index = 0
client.send(bytes("welcome player1 ","utf8"))
print("welcome player1")
players[0] = name
elif players[1] is None:
index = 1
client.send(bytes("welcome player2 ","utf8"))
print("welcome player2")
players[1] = name
broadcast("player{} ({}) has joined the chat!".format(index+1, name), "server:")
#broadcast(bytes(msg, "utf8"))
clients[client] = name
if players[0] is not None and players[1] is not None:
broadcast("may the game begin!", "server:")
while True:
msg = client.recv(BUFSIZ) # msg is in byte format
#create string:
message = "".join([chr(i) for i in msg])
#if msg != bytes("quit", "utf8"):
# broadcast(msg, "player{} ({}): ".format(index+1,name))#, "utf8")
#else:
if message == "quit":
client.send(bytes("quit", "utf8"))
client.close()
del clients[client]
broadcast("player{}({}) has left the chat".format(index+1, name), "server:") # , "utf8"))
break
if message.lower()=="a2" and Game.turn % 2 == index:
broadcast("mfires at A2", "player{}({})".format(index+1, name))
Game.turn += 1
broadcast("turn {}. It is your turn, player{}".format(Game.turn, index+1))
else:
broadcast(message, "player{} ({}):".format(index+1,name))
def broadcast(msg, prefix=""): # prefix tells who is sending the message.
"""Broadcasts a message to all the clients. converts msg to bytes if necessary"""
msg2 = msg if isinstance(msg, bytes) else bytes(msg, 'utf8')
for sock in clients:
#sock.send(bytes(prefix, "utf8") + msg)
#print("message:", msg, type(msg))
#print("prefix:", prefix)
sock.send(bytes(prefix, "utf8") + msg2)
class Game:
turn = 1
players = [None, None]
clients = {}
addresses = {}
HOST = ''
PORT = 33000
BUFSIZ = 1024
ADDR = (HOST, PORT)
SERVER = socket(AF_INET, SOCK_STREAM)
SERVER.bind(ADDR)
if __name__ == "__main__":
SERVER.listen(5)
print("Waiting for connection...")
ACCEPT_THREAD = Thread(target=accept_incoming_connections)
ACCEPT_THREAD.start()
ACCEPT_THREAD.join()
SERVER.close() | horstjens/ThePythonGameBook | en/python/battleship/chat_server.py | Python | gpl-3.0 | 3,173 | 0.007249 |
# -*- coding: utf-8 -*-
"""
The following examples are used to demonstrate how to get/record
analytics
The method signatures are:
Pushbots.get_analytics()
and
Pushbots.record_analytics(platform=None, data=None)
In which you must specify either platform or data.
"""
from pushbots import Pushbots
def example_get_analytics():
"""Get analytics by calling Pushbots.get_analytics()"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
code, message = pushbots.get_analytics()
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))
def example_record_analytics1():
"""Record analytics by passing platform directly to
Pushbots.record_analytics()
"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
# Define platform
platform = Pushbots.PLATFORM_IOS
code, message = pushbots.record_analytics(platform=platform)
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))
def example_record_analytics2():
"""Record analytics by passing data defined by you to
Pushbots.record_analytics()
"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
# Define data
data = {'platform': '0'} # '0' is Equivalent to Pushbots.PLATFORM_IOS
code, message = pushbots.record_analytics(data=data)
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))
| tchar/pushbots | pushbots/examples/analytics.py | Python | mit | 1,805 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2015: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
#
#
# This file incorporates work covered by the following copyright and
# permission notice:
#
# Copyright (C) 2009-2014:
# Hartmut Goebel, h.goebel@goebel-consult.de
# aviau, alexandre.viau@savoirfairelinux.com
# Nicolas Dupeux, nicolas@dupeux.net
# Grégory Starck, g.starck@gmail.com
# Sebastien Coavoux, s.coavoux@free.fr
# Jean Gabes, naparuba@gmail.com
# Zoran Zaric, zz@zoranzaric.de
# Gerhard Lausser, gerhard.lausser@consol.de
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
from action import Action
from alignak.property import IntegerProp, StringProp, FloatProp, BoolProp
from alignak.autoslots import AutoSlots
""" TODO: Add some comment about this class for the doc"""
class EventHandler(Action):
# AutoSlots create the __slots__ with properties and
# running_properties names
__metaclass__ = AutoSlots
my_type = 'eventhandler'
properties = {
'is_a': StringProp(default='eventhandler'),
'type': StringProp(default=''),
'_in_timeout': StringProp(default=False),
'status': StringProp(default=''),
'exit_status': StringProp(default=3),
'output': StringProp(default=''),
'long_output': StringProp(default=''),
't_to_go': StringProp(default=0),
'check_time': StringProp(default=0),
'execution_time': FloatProp(default=0),
'u_time': FloatProp(default=0.0),
's_time': FloatProp(default=0.0),
'env': StringProp(default={}),
'perf_data': StringProp(default=''),
'sched_id': IntegerProp(default=0),
'timeout': IntegerProp(default=10),
'check_time': IntegerProp(default=0),
'command': StringProp(default=''),
'module_type': StringProp(default='fork'),
'worker': StringProp(default='none'),
'reactionner_tag': StringProp(default='None'),
'is_snapshot': BoolProp(default=False),
}
# id = 0 #Is common to Actions
def __init__(self, command, id=None, ref=None, timeout=10, env={},
module_type='fork', reactionner_tag='None', is_snapshot=False):
self.is_a = 'eventhandler'
self.type = ''
self.status = 'scheduled'
if id is None: # id != None is for copy call only
self.id = Action.id
Action.id += 1
self.ref = ref
self._in_timeout = False
self.timeout = timeout
self.exit_status = 3
self.command = command
self.output = ''
self.long_output = ''
self.t_to_go = time.time()
self.check_time = 0
self.execution_time = 0
self.u_time = 0
self.s_time = 0
self.perf_data = ''
self.env = {}
self.module_type = module_type
self.worker = 'none'
self.reactionner_tag = reactionner_tag
self.is_snapshot = is_snapshot
# return a copy of the check but just what is important for execution
# So we remove the ref and all
def copy_shell(self):
# We create a dummy check with nothing in it, just defaults values
return self.copy_shell__(EventHandler('', id=self.id, is_snapshot=self.is_snapshot))
def get_return_from(self, e):
self.exit_status = e.exit_status
self.output = e.output
self.long_output = getattr(e, 'long_output', '')
self.check_time = e.check_time
self.execution_time = getattr(e, 'execution_time', 0.0)
self.perf_data = getattr(e, 'perf_data', '')
def get_outputs(self, out, max_plugins_output_length):
self.output = out
def is_launchable(self, t):
return t >= self.t_to_go
def __str__(self):
return "Check %d status:%s command:%s" % (self.id, self.status, self.command)
def get_id(self):
return self.id
# Call by pickle to dataify the comment
# because we DO NOT WANT REF in this pickleisation!
def __getstate__(self):
cls = self.__class__
# id is not in *_properties
res = {'id': self.id}
for prop in cls.properties:
if hasattr(self, prop):
res[prop] = getattr(self, prop)
return res
# Inverted function of getstate
def __setstate__(self, state):
cls = self.__class__
self.id = state['id']
for prop in cls.properties:
if prop in state:
setattr(self, prop, state[prop])
if not hasattr(self, 'worker'):
self.worker = 'none'
if not getattr(self, 'module_type', None):
self.module_type = 'fork'
# s_time and u_time are added between 1.2 and 1.4
if not hasattr(self, 'u_time'):
self.u_time = 0
self.s_time = 0
| ddurieux/alignak | alignak/eventhandler.py | Python | agpl-3.0 | 6,318 | 0.002375 |
'''
Audio tests
===========
'''
import unittest
import os
SAMPLE_FILE = os.path.join(os.path.dirname(__file__), 'sample1.ogg')
SAMPLE_LENGTH = 1.402
SAMPLE_LENGTH_MIN = SAMPLE_LENGTH * 0.99
SAMPLE_LENGTH_MAX = SAMPLE_LENGTH * 1.01
class AudioTestCase(unittest.TestCase):
def get_sound(self):
import os
assert os.path.exists(SAMPLE_FILE)
from kivy.core import audio
return audio.SoundLoader.load(SAMPLE_FILE)
def test_length_simple(self):
sound = self.get_sound()
volume = sound.volume = 0.75
length = sound.length
assert SAMPLE_LENGTH_MIN <= length <= SAMPLE_LENGTH_MAX
# ensure that the gstreamer play/stop doesn't mess up the volume
assert volume == sound.volume
def test_length_playing(self):
import time
sound = self.get_sound()
sound.play()
try:
time.sleep(0.1)
length = sound.length
assert SAMPLE_LENGTH_MIN <= length <= SAMPLE_LENGTH_MAX
finally:
sound.stop()
def test_length_stopped(self):
import time
sound = self.get_sound()
sound.play()
try:
time.sleep(0.1)
finally:
sound.stop()
length = sound.length
assert SAMPLE_LENGTH_MIN <= length <= SAMPLE_LENGTH_MAX
class AudioGstreamerTestCase(AudioTestCase):
def make_sound(self, source):
from kivy.core.audio import audio_gstreamer
return audio_gstreamer.SoundGstreamer(source)
class AudioPygameTestCase(AudioTestCase):
def make_sound(self, source):
from kivy.core.audio import audio_pygame
return audio_pygame.SoundPygame(source)
| JulienMcJay/eclock | windows/kivy/kivy/tests/test_audio.py | Python | gpl-2.0 | 1,707 | 0.000586 |
#!/usr/bin/python3
"""Script to determine the Pywikibot version (tag, revision and date).
.. versionchanged:: 7.0
version script was moved to the framework scripts folder
"""
#
# (C) Pywikibot team, 2007-2021
#
# Distributed under the terms of the MIT license.
#
import codecs
import os
import sys
import pywikibot
from pywikibot.version import get_toolforge_hostname, getversion
class DummyModule:
"""Fake module instance."""
__version__ = 'n/a'
try:
import setuptools
except ImportError:
setuptools = DummyModule()
try:
import mwparserfromhell
except ImportError:
mwparserfromhell = DummyModule()
try:
import wikitextparser
except ImportError:
wikitextparser = DummyModule()
try:
import requests
except ImportError:
requests = DummyModule()
WMF_CACERT = 'MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs'
def main(*args: str) -> None:
"""Print pywikibot version and important settings."""
pywikibot.output('Pywikibot: ' + getversion())
pywikibot.output('Release version: ' + pywikibot.__version__)
pywikibot.output('setuptools version: ' + setuptools.__version__)
pywikibot.output('mwparserfromhell version: '
+ mwparserfromhell.__version__)
pywikibot.output('wikitextparser version: ' + wikitextparser.__version__)
pywikibot.output('requests version: ' + requests.__version__)
has_wikimedia_cert = False
if (not hasattr(requests, 'certs')
or not hasattr(requests.certs, 'where')
or not callable(requests.certs.where)):
pywikibot.output(' cacerts: not defined')
elif not os.path.isfile(requests.certs.where()):
pywikibot.output(' cacerts: {} (missing)'.format(
requests.certs.where()))
else:
pywikibot.output(' cacerts: ' + requests.certs.where())
with codecs.open(requests.certs.where(), 'r', 'utf-8') as cert_file:
text = cert_file.read()
if WMF_CACERT in text:
has_wikimedia_cert = True
pywikibot.output(' certificate test: {}'
.format('ok' if has_wikimedia_cert else 'not ok'))
if not has_wikimedia_cert:
pywikibot.output(' Please reinstall requests!')
pywikibot.output('Python: ' + sys.version)
toolforge_env_hostname = get_toolforge_hostname()
if toolforge_env_hostname:
pywikibot.output('Toolforge hostname: ' + toolforge_env_hostname)
# check environment settings
settings = {key for key in os.environ if key.startswith('PYWIKIBOT')}
settings.update(['PYWIKIBOT_DIR', 'PYWIKIBOT_DIR_PWB',
'PYWIKIBOT_NO_USER_CONFIG'])
for environ_name in sorted(settings):
pywikibot.output(
'{}: {}'.format(environ_name,
os.environ.get(environ_name, 'Not set') or "''"))
pywikibot.output('Config base dir: ' + pywikibot.config.base_dir)
for family, usernames in pywikibot.config.usernames.items():
if not usernames:
continue
pywikibot.output('Usernames for family {!r}:'.format(family))
for lang, username in usernames.items():
pywikibot.output('\t{}: {}'.format(lang, username))
if __name__ == '__main__':
main()
| wikimedia/pywikibot-core | pywikibot/scripts/version.py | Python | mit | 3,279 | 0 |
# Copyright (C) 2014 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__="Adam Schubert <adam.schubert@sg1-game.net>"
__date__ ="$12.10.2014 2:20:45$"
import tests.DwaTestCase as DwaTestCase
import unittest
import time
class UserTest(DwaTestCase.DwaTestCase):
def setUp(self):
DwaTestCase.DwaTestCase.setUp(self)
self.user = self.d.user()
self.username = self.credential['username'] + 'UserTest' + str(time.time())
def testCreate(self):
params = {}
params['password'] = self.credential['password']
params['username'] = self.username
params['nickname'] = DwaTestCase.generateNickname()
params['email'] = self.username + '@divine-warfare.com'
params['active'] = True
#create
message = self.user.create(params)['message']
#delete
userData = self.user.token({'password': params['password'], 'username': params['username']})
delParams = {}
delParams['user_id'] = userData['id']
delParams['user_token'] = userData['token']
self.user.delete(delParams)
self.assertEqual(message, 'User created')
def testDelete(self):
params = {}
params['password'] = self.credential['password']
params['username'] = self.username
params['nickname'] = DwaTestCase.generateNickname()
params['email'] = self.username + '@divine-warfare.com'
params['active'] = True
#create
self.user.create(params)
userData = self.user.token({'password': params['password'], 'username': params['username']})
delParams = {}
delParams['user_id'] = userData['id']
delParams['user_token'] = userData['token']
#delete
message = self.user.delete(delParams)['message']
self.assertEqual(message, 'User deleted')
def testList(self):
data = self.user.list({'limit': 20, 'page': 0})
self.assertEqual(data['message'], 'OK')
self.assertIsNotNone(data['data'])
self.assertIsNotNone(data['pages'])
def testToken(self):
data = self.user.token(self.credential)
self.assertEqual(data['message'], 'Token created')
self.assertEqual(len(data['token']), 32)
self.assertIsNotNone(data['id'])
self.assertRegexpMatches(data['token_expiration'], '(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})')
def testPassword(self):
data_token = self.user.token(self.credential)
data = self.user.password({'old_password': self.credential['password'], 'new_password': self.credential['password'], 'user_token': data_token['token'], 'user_id': data_token['id']})
self.assertEqual(data['message'], 'Password changed')
def testActive(self):
data_token = self.user.token(self.credential)
data = self.user.active({'user_id': data_token['id'], 'active': True, 'user_token': data_token['token']})
self.assertEqual(data['message'], 'User activated')
def testDeactive(self):
data_token = self.user.token(self.credential)
data = self.user.active({'user_id': data_token['id'], 'active': False, 'user_token': data_token['token']})
self.assertEqual(data['message'], 'User deactivated')
#Will fail cos our mailserver checks if maildir exists...
#@unittest.expectedFailure
def testRequestPasswordReset(self):
email = self.credential['username'] + '@example.com';
content_fill = 'abc' * 5333 #16k of shit
data = self.user.request_password_reset({'email': email, 'email_content': 'URL: example.com/password/reset/{reset_token}' + content_fill, 'email_subject': 'Password reset unittest', 'email_from': 'unittest@example.com'})
#self.assertEqual(data['message'], 'Email with reset token has been send')
self.assertEqual(data['message'], 'Email not found')
@unittest.expectedFailure
def testDoPasswordReset(self):
#we use USER token as password reset token, cos we dont have reset token (and we cant have it cos it is only in email) so this call will fail, and that is a good thing :)
data_token = self.user.token(self.credential)
data = self.user.request_password_reset({'reset_token': data_token['token'], 'new_password': 'newPassword'})
self.assertEqual(data['message'], 'Password changed')
| Salamek/DwaPython | tests/UserTest.py | Python | gpl-3.0 | 4,776 | 0.012772 |
from Models.Submission import Submission
from Core.Database import Database
from Core.Scorer import Score
from sqlalchemy import func, desc
class Ranking():
@staticmethod
def get_all():
session = Database.session()
scores = session.query(Score).order_by(desc(Score.score)).all()
return [{"student_id": s.student_id,
"submissions": session.query(func.count(Submission.id))
.filter(Submission.student_id == s.student_id).scalar(),
"score": s.score}
for s in scores]
| brnomendes/grader-edx | Core/Ranking.py | Python | mit | 564 | 0.003546 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# name: settings.py
# author: Harold Bradley III
# email: harold@bradleystudio.net
# created on: 01/23/2016
#
# description: The settings file for ww
#
from __future__ import absolute_import, print_function
import os
import time
TEMPLATE_PATH = os.path.dirname(os.path.realpath(__file__)) + '/../templates/'
## Change these settings to your hearts content ##
# Site Settings
SITE_ADMIN_EMAIL = 'email@mail.com'
SITE_ERROR_LOG = 'error.log'
SITE_ACCESS_LOG = 'access.log'
WWW_DIR = '/var/www/'
WWW_USR = 'www-data'
WWW_ADMIN = 'admin_usr'
GITIGNORE_TEMPLATE = TEMPLATE_PATH + 'gitignore.template'
HTA_5G_TEMPLATE = TEMPLATE_PATH + '5g-htaccess.template'
VHOST_PATH = '/etc/apache2/sites-available/'
VHOST_TEMPLATE = TEMPLATE_PATH + 'vhost.template'
VHOST_SSL_TEMPLATE = TEMPLATE_PATH + 'vhost-ssl.template'
MYSQL = {
'host' : 'localhost',
'user' : 'username',
'password' : 'password123',
}
# WordPress Settings
WP_LATEST = 'http://wordpress.org/latest.tar.gz'
WP_SETUP_URL = '/wp-admin/setup-config.php?step=2'
WP_INSTALL_URL = '/wp-admin/install.php?step=2'
WP_HTA_TEMPLATE = TEMPLATE_PATH + 'wordpress-htaccess.template'
WP_HTA_HARDENED_TEMPLATE = TEMPLATE_PATH + 'hardened-wordpress-htaccess.template'
WP_CONFIG_TEMPLATE = TEMPLATE_PATH + 'wp-config.php.template'
WP_ADMIN_USER = 'admin'
WP_ADMIN_EMAIL = 'admin@wp.com'
WP_ADMIN_PW = 'password123' # Please change this.
WP_SALT_URL = 'https://api.wordpress.org/secret-key/1.1/salt/'
# Apache commands
CMD_RESTART_APACHE = 'sudo service apache2 reload'
CMD_ENABLE_CONFIG = 'sudo a2ensite ' # run as: {command} domain
CMD_DISABLE_CONFIG = 'sudo a2dissite ' # run as: {command} domain
CMD_CHECK_IF_ENABLED = "apache2ctl -S | grep ' namevhost {0} '" # See if apache is serving domain ({})
# Try to import local settings. This is a temporary work-around for now.
try:
from .settings_local import *
except ImportError:
print("Can't find settings_local. Using default settings.")
| hbradleyiii/ww | ww/settings.py | Python | mit | 2,079 | 0.005772 |
#!/usr/bin/env python
"""
fileaction.py 'display' '*.ps' '*.jpg' '*.gif'
creates a GUI with a list of all PostScript, JPEG, and GIF files in
the directory tree with the current working directory as root.
Clicking on one of the filenames in the list launches the display
program, which displays the image file.
As another example,
fileaction.py 'xanim' '*.mpg' '*.mpeg'
gives an overview of all MPEG files in the directory tree and
the possibility to play selected files with the xanim application.
The general interface is
fileactionGUI.py command filetype1 filetype2 filetype3 ...
"""
from Tkinter import *
import Pmw, os, sys
class FileActionGUI:
def __init__(self, parent, file_patterns, command):
self.master = parent
self.top = Frame(parent)
self.top.pack(expand=True, fill='both')
self.file_patterns = file_patterns
self.files = self.find_files()
self.list1 = Pmw.ScrolledListBox(self.top,
listbox_selectmode='single', # or 'multiple'
vscrollmode='dynamic', hscrollmode='dynamic',
listbox_width=min(max([len(f) for f in self.files]),40),
listbox_height=min(len(self.files),20),
label_text='files', labelpos='n',
items=self.files,
selectioncommand=self.select)
self.list1.pack(side='top', padx=10, expand=True, fill='both')
self.command = StringVar(); self.command.set(command)
Pmw.EntryField(self.top,
labelpos='w', label_text='process file with',
entry_width=len(command)+5,
entry_textvariable=self.command).pack(side='top',pady=3)
Button(self.top, text='Quit', width=8, command=self.master.destroy).pack(pady=2)
def select(self):
file = self.list1.getcurselection()[0]
cmd = '%s %s &' % (self.command.get(), file)
os.system(cmd)
def find_files(self):
from scitools.misc import find
def check(filepath, arg):
ext = os.path.splitext(filepath)[1]
import fnmatch # Unix shell-style wildcard matching
for s in self.file_patterns:
if fnmatch.fnmatch(ext, s):
arg.append(filepath)
files = []
find(check, os.curdir, files)
return files
if __name__ == '__main__':
root = Tk()
Pmw.initialise(root)
import scitools.misc; scitools.misc.fontscheme3(root)
try:
command = sys.argv[1]
file_patterns = sys.argv[2:]
except:
print 'Usage: %s file-command filetype1 filetype2 ...' % sys.argv[0]
print "Example: fileactionGUI.py 'display' '*.ps' '*.eps' '*.jpg'"
print 'A GUI with a list of all files matching the specified'
print 'patterns is launched. By clicking on one of the filenames,'
print 'the specified command is run with that file as argument.'
sys.exit(1)
g = FileActionGUI(root, file_patterns, command)
root.mainloop()
| sniemi/SamPy | sandbox/src1/TCSE3-3rd-examples/src/tools/fileaction.py | Python | bsd-2-clause | 3,008 | 0.006981 |
plot = variance.plot(norm='log', vmin=.5, cmap='plasma')
ax = plot.gca()
ax.grid()
ax.set_xlim(20, 1500)
ax.set_ylim(1e-24, 1e-20)
ax.set_xlabel('Frequency [Hz]')
ax.set_ylabel(r'[strain/\rtHz]')
ax.set_title('LIGO-Livingston sensitivity variance')
plot.show() | gwpy/gwpy.github.io | docs/v0.5/examples/frequencyseries/variance-3.py | Python | gpl-3.0 | 260 | 0.003846 |
def extractXiaoyuusTranslations(item):
"""
Xiaoyuu's Translations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
return False
| fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractXiaoyuusTranslations.py | Python | bsd-3-clause | 244 | 0.028689 |
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypd output module
This module produces gyp input as its output. Output files are given the
.gypd extension to avoid overwriting the .gyp files that they are generated
from. Internal references to .gyp files (such as those found in
"dependencies" sections) are not adjusted to point to .gypd files instead;
unlike other paths, which are relative to the .gyp or .gypd file, such paths
are relative to the directory from which gyp was run to create the .gypd file.
This generator module is intended to be a sample and a debugging aid, hence
the "d" for "debug" in .gypd. It is useful to inspect the results of the
various merges, expansions, and conditional evaluations performed by gyp
and to see a representation of what would be fed to a generator module.
It's not advisable to rename .gypd files produced by this module to .gyp,
because they will have all merges, expansions, and evaluations already
performed and the relevant constructs not present in the output; paths to
dependencies may be wrong; and various sections that do not belong in .gyp
files such as such as "included_files" and "*_excluded" will be present.
Output will also be stripped of comments. This is not intended to be a
general-purpose gyp pretty-printer; for that, you probably just want to
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
comments but won't do all of the other things done to this module's output.
The specific formatting of the output generated by this module is subject
to change.
"""
import gyp.common
import errno
import os
import pprint
# These variables should just be spit back out as variable references.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
# gypd doesn't define a default value for OS like many other generator
# modules. Specify "-D OS=whatever" on the command line to provide a value.
generator_default_variables = {
}
# gypd supports multiple toolsets
generator_supports_multiple_toolsets = True
# TODO(mark): This always uses <, which isn't right. The input module should
# notify the generator to tell it which phase it is operating in, and this
# module should use < for the early phase and then switch to > for the late
# phase. Bonus points for carrying @ back into the output too.
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
output_files = {}
for qualified_target in target_list:
[input_file, target] = \
gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
if input_file[-4:] != '.gyp':
continue
input_file_stem = input_file[:-4]
output_file = input_file_stem + params['options'].suffix + '.gypd'
if not output_file in output_files:
output_files[output_file] = input_file
for output_file, input_file in output_files.iteritems():
output = open(output_file, 'w')
pprint.pprint(data[input_file], output)
output.close()
| nikste/visualizationDemo | zeppelin-web/node/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py | Python | apache-2.0 | 3,325 | 0.002105 |
# Symantec BackupExec
# CVE-2007-6016,CVE-2007-6017
import logging
log = logging.getLogger("Thug")
def Set_DOWText0(self, val):
self.__dict__['_DOWText0'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _DOWText0",
cve = 'CVE-2007-6016')
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6016", None)
log.DFT.check_shellcode(val)
def Set_DOWText6(self, val):
self.__dict__['_DOWText6'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _DOWText6",
cve = 'CVE-2007-6016')
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6016", None)
log.DFT.check_shellcode(val)
def Set_MonthText0(self, val):
self.__dict__['_MonthText0'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _MonthText6",
cve = 'CVE-2007-6016')
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6016", None)
log.DFT.check_shellcode(val)
def Set_MonthText11(self, val):
self.__dict__['_MonthText11'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _MonthText11",
cve = 'CVE-2007-6016')
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6016", None)
log.DFT.check_shellcode(val)
def Save(self, a, b):
return
| fedelemantuano/thug | thug/ActiveX/modules/SymantecBackupExec.py | Python | gpl-2.0 | 2,143 | 0.0056 |
"""
**********
Edge Lists
**********
Read and write NetworkX graphs as edge lists.
"""
__author__ = """Aric Hagberg (hagberg@lanl.gov)\nDan Schult (dschult@colgate.edu)"""
# Copyright (C) 2004-2008 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# Distributed under the terms of the GNU Lesser General Public License
# http://www.gnu.org/copyleft/lesser.html
__all__ = ['read_edgelist', 'write_edgelist']
import codecs
import locale
import string
import sys
import time
from networkx.utils import is_string_like,_get_fh
import networkx
def write_edgelist(G, path, comments="#", delimiter=' '):
"""Write graph as a list of edges.
Parameters
----------
G : graph
A networkx graph
path : file or string
File or filename to write.
Filenames ending in .gz or .bz2 will be compressed.
comments : string, optional
The character used to indicate the start of a comment
delimiter : string, optional
The string uses to separate values. The default is whitespace.
Examples
--------
>>> G=nx.path_graph(4)
>>> nx.write_edgelist(G, "test.edgelist")
>>> fh=open("test.edgelist",'w')
>>> nx.write_edgelist(G,fh)
>>> nx.write_edgelist(G, "test.edgelist.gz")
Notes
-----
The file will use the default text encoding on your system.
It is possible to write files in other encodings by opening
the file with the codecs module. See doc/examples/unicode.py
for hints.
>>> import codecs
>>> fh=codecs.open("test.edgelist",'w',encoding='utf=8') # utf-8 encoding
>>> nx.write_edgelist(G,fh)
See Also
--------
networkx.write_edgelist
"""
fh=_get_fh(path,mode='w')
pargs=comments+" "+string.join(sys.argv,' ')
fh.write("%s\n" % (pargs))
fh.write(comments+" GMT %s\n" % (time.asctime(time.gmtime())))
fh.write(comments+" %s\n" % (G.name))
def make_str(t):
if is_string_like(t): return t
return str(t)
for e in G.edges(data=True):
fh.write(delimiter.join(map(make_str,e))+"\n")
#if G.multigraph:
# u,v,datalist=e
# for d in datalist:
# fh.write(delimiter.join(map(make_str,(u,v,d)))+"\n")
#else:
def read_edgelist(path, comments="#", delimiter=' ',
create_using=None, nodetype=None, edgetype=None):
"""Read a graph from a list of edges.
Parameters
----------
path : file or string
File or filename to write.
Filenames ending in .gz or .bz2 will be uncompressed.
comments : string, optional
The character used to indicate the start of a comment
delimiter : string, optional
The string uses to separate values. The default is whitespace.
create_using : Graph container, optional
Use specified Graph container to build graph. The default is
nx.Graph().
nodetype : int, float, str, Python type, optional
Convert node data from strings to specified type
edgetype : int, float, str, Python type, optional
Convert edge data from strings to specified type
Returns
----------
out : graph
A networkx Graph or other type specified with create_using
Examples
--------
>>> nx.write_edgelist(nx.path_graph(4), "test.edgelist")
>>> G=nx.read_edgelist("test.edgelist")
>>> fh=open("test.edgelist")
>>> G=nx.read_edgelist(fh)
>>> G=nx.read_edgelist("test.edgelist", nodetype=int)
>>> G=nx.read_edgelist("test.edgelist",create_using=nx.DiGraph())
Notes
-----
Since nodes must be hashable, the function nodetype must return hashable
types (e.g. int, float, str, frozenset - or tuples of those, etc.)
Example edgelist file formats
Without edge data::
# source target
a b
a c
d e
With edge data:::
# source target data
a b 1
a c 3.14159
d e apple
"""
if create_using is None:
G=networkx.Graph()
else:
try:
G=create_using
G.clear()
except:
raise TypeError("Input graph is not a networkx graph type")
fh=_get_fh(path)
for line in fh.readlines():
line = line[:line.find(comments)].strip()
if not len(line): continue
# if line.startswith("#") or line.startswith("\n"):
# continue
# line=line.strip() #remove trailing \n
# split line, should have 2 or three items
s=line.split(delimiter)
if len(s)==2:
if nodetype is not None:
try:
(u,v)=map(nodetype,s)
except:
raise TypeError("Failed to convert edge %s to type %s"\
%(s,nodetype))
else:
(u,v)=s
G.add_edge(u,v)
elif len(s)==3:
(u,v,d)=s
if nodetype is not None:
try:
(u,v)=map(nodetype,(u,v))
except:
raise TypeError("Failed to convert edge (%s, %s) to type %s"\
%(u,v,nodetype))
if d is not None and edgetype is not None:
try:
d=edgetype(d)
except:
raise TypeError("Failed to convert edge data (%s) to type %s"\
%(d, edgetype))
G.add_edge(u,v,d) # XGraph or XDiGraph
else:
raise TypeError("Failed to read line: %s"%line)
return G
| emreg00/biana | biana/ext/networkx/readwrite/edgelist.py | Python | gpl-3.0 | 5,622 | 0.011028 |
school_scores = [
[2.2, 1032, 1253, 188, 0],
[1.9, 671, 1622, 418, 12],
[2.1, 3854, 7193, 1184, 16],
[2.2, 457, 437, 57, 0],
[1.8, 25546, 84659, 18839, 240],
[2.2, 2736, 4312, 732, 12],
[2.1, 2646, 17108, 4338, 105],
[1.8, 956, 2718, 731, 19],
[1.8, 316, 1493, 643, 13],
[1.8, 15418, 41559, 9420, 111],
[1.8, 9098, 29098, 5573, 74],
[2, 1012, 3581, 970, 25],
[2, 817, 1167, 257, 2],
[2.1, 3127, 4056, 476, 4],
[2.2, 6003, 18736, 5912, 117],
[2.8, 557, 333, 35, 0],
[2.5, 832, 684, 76, 3],
[2.1, 1390, 1417, 152, 3],
[1.8, 805, 1081, 183, 2],
[2.2, 1617, 4945, 954, 25],
[2.1, 6112, 19826, 6013, 169],
[2, 4536, 28692, 8054, 265],
[2.2, 3032, 3181, 474, 12],
[2.7, 1769, 1908, 172, 6],
[2.3, 344, 232, 30, 0],
[2.6, 1106, 1245, 157, 7],
[2.6, 776, 1233, 238, 5],
[2.5, 457, 419, 56, 4],
[1.8, 1350, 3039, 538, 9],
[2, 1305, 6016, 1376, 20],
[2.1, 10417, 37455, 9008, 214],
[2.3, 607, 843, 127, 4],
[2.2, 19988, 64955, 19989, 1361],
[1.9, 10991, 20933, 4728, 128],
[2.8, 140, 67, 13, 0],
[2.4, 7640, 13102, 2814, 62],
[2.3, 789, 684, 85, 1],
[2, 3846, 7638, 1514, 34],
[2.1, 17969, 44568, 9793, 207],
[1.8, 491, 691, 93, 1],
[2, 721, 4240, 1000, 38],
[1.8, 4354, 10152, 2216, 33],
[2.7, 154, 94, 15, 0],
[2.3, 2027, 2349, 319, 3],
[2, 24009, 58514, 7193, 93],
[2.7, 545, 613, 82, 1],
[2.4, 727, 2551, 495, 12],
[1.2, 62, 503, 167, 0],
[2.1, 1055, 938, 121, 1],
[2.1, 6556, 14116, 2387, 46],
[2.5, 1362, 1047, 153, 6],
[2.5, 204, 179, 33, 2],
[2.2, 1050, 1137, 150, 2],
[1.9, 689, 1553, 459, 8],
[2.1, 3939, 7011, 1303, 26],
[2.3, 435, 355, 64, 1],
[1.8, 28126, 85907, 20619, 283],
[2.2, 2710, 4094, 746, 12],
[2.1, 2971, 17185, 4432, 94],
[1.8, 1102, 2812, 758, 21],
[1.9, 321, 1498, 622, 22],
[1.7, 16541, 40909, 9685, 136],
[1.8, 9458, 27921, 5259, 71],
[2, 1127, 3412, 1037, 22],
[2, 865, 971, 210, 4],
[2.1, 3263, 3787, 415, 7],
[2.2, 6259, 18383, 5835, 139],
[2.9, 509, 313, 25, 1],
[2.6, 776, 649, 83, 1],
[2.1, 1399, 1175, 157, 0],
[1.9, 670, 742, 105, 2],
[2.2, 1771, 4843, 929, 17],
[2.1, 6622, 19945, 5965, 178],
[2, 5119, 29138, 8377, 302],
[2.2, 2989, 2870, 466, 9],
[2.7, 1798, 1674, 178, 4],
[2.3, 342, 252, 30, 0],
[2.6, 1066, 1155, 159, 4],
[2.5, 736, 1055, 219, 5],
[2.4, 448, 332, 43, 1],
[1.7, 1383, 2941, 548, 6],
[2, 1399, 5945, 1488, 21],
[2.1, 11608, 37683, 9117, 209],
[2.3, 621, 808, 140, 4],
[2.3, 22060, 63727, 19016, 1283],
[1.9, 11706, 19807, 4506, 113],
[2.7, 114, 47, 5, 1],
[2.4, 7653, 12582, 2778, 69],
[2.4, 743, 632, 88, 2],
[2.1, 3893, 6910, 1400, 28],
[2, 18867, 42918, 9022, 178],
[1.7, 537, 697, 90, 0],
[2, 804, 4118, 970, 40],
[1.8, 4528, 10189, 1993, 37],
[2.8, 119, 45, 8, 1],
[2.3, 1895, 2097, 276, 5],
[2, 24613, 55066, 6799, 90],
[2.7, 557, 580, 79, 2],
[2.3, 755, 2468, 514, 9],
[1.4, 80, 457, 142, 1],
[2.2, 1076, 960, 163, 4],
[2.1, 6695, 13557, 2476, 44],
[2.6, 1273, 949, 140, 6],
[2.7, 180, 149, 28, 1],
[2.2, 1031, 1126, 160, 4],
[1.9, 715, 1480, 381, 9],
[2.1, 4040, 7413, 1390, 24],
[2.3, 397, 359, 48, 0],
[1.8, 28902, 88322, 21971, 311],
[2.2, 2610, 3810, 681, 20],
[2.1, 3112, 17860, 4817, 122],
[1.8, 1059, 2758, 761, 12],
[1.8, 366, 1487, 710, 31],
[1.8, 17020, 42754, 10229, 151],
[1.8, 9748, 28641, 5690, 67],
[2, 1079, 3558, 1037, 29],
[2, 807, 1059, 212, 3],
[2.1, 2866, 3334, 389, 7],
[2.2, 6484, 18913, 6135, 157],
[2.8, 435, 248, 31, 0],
[2.6, 738, 588, 82, 1],
[2.2, 1274, 964, 137, 3],
[1.9, 774, 805, 124, 2],
[2.1, 1520, 4406, 1182, 84],
[2.1, 6827, 20409, 6259, 200],
[1.9, 5556, 30001, 8656, 329],
[2.2, 2804, 2688, 440, 8],
[2.7, 1551, 1481, 167, 3],
[2.5, 301, 202, 21, 1],
[2.6, 1048, 928, 98, 2],
[2.5, 738, 1000, 212, 4],
[2.6, 431, 287, 37, 2],
[1.8, 1477, 3179, 597, 10],
[2, 1552, 6078, 1469, 23],
[2.1, 12329, 38967, 9520, 280],
[2.3, 581, 778, 109, 2],
[2.3, 23170, 66092, 20647, 1509],
[1.9, 12478, 20458, 4789, 146],
[2.8, 118, 45, 13, 0],
[2.4, 7683, 12192, 2632, 63],
[2.4, 690, 536, 67, 2],
[2.1, 3942, 7256, 1360, 25],
[2, 19746, 43385, 9133, 205],
[1.7, 487, 715, 101, 3],
[2, 814, 4215, 1052, 34],
[1.8, 4795, 10168, 2123, 43],
[2.8, 125, 67, 7, 0],
[2.3, 1847, 1926, 244, 4],
[2, 24679, 56823, 7163, 108],
[2.7, 486, 546, 77, 2],
[2.3, 788, 2530, 423, 14],
[1.4, 81, 445, 120, 0],
[2.1, 9153, 24979, 8388, 260],
[2.1, 7040, 13758, 2678, 50],
[2.2, 1103, 934, 154, 2],
[2.6, 1181, 927, 138, 3],
[2.6, 156, 106, 15, 0],
[2.3, 933, 1039, 145, 7],
[1.9, 696, 1453, 365, 13],
[2.2, 4265, 7666, 1495, 27],
[2.4, 413, 370, 32, 2],
[1.9, 26435, 89292, 21939, 306],
[2.3, 2540, 3443, 579, 13],
[2.1, 2869, 18052, 4484, 139],
[1.7, 976, 2784, 728, 25],
[2, 403, 1603, 836, 30],
[1.8, 16387, 42479, 9671, 163],
[1.9, 9449, 28902, 5687, 72],
[2.1, 1120, 3590, 976, 20],
[2, 800, 1053, 188, 3],
[2.1, 2515, 3064, 439, 24],
[2.3, 6523, 19485, 6428, 173],
[2.8, 414, 252, 31, 2],
[2.7, 725, 585, 54, 1],
[2.2, 1123, 962, 109, 2],
[2.1, 695, 753, 109, 1],
[2.2, 1718, 5430, 1576, 144],
[2.2, 6174, 20443, 6320, 182],
[2, 4925, 29244, 8289, 329],
[2.3, 2197, 1680, 175, 4],
[2.8, 1489, 1338, 164, 4],
[2.4, 295, 184, 30, 1],
[2.6, 936, 826, 97, 7],
[2.6, 675, 875, 179, 6],
[2.6, 335, 247, 30, 1],
[1.8, 1559, 3452, 645, 12],
[2.1, 1506, 5963, 1371, 26],
[2.1, 11183, 38429, 9154, 226],
[2.4, 616, 708, 114, 2],
[2.3, 20267, 67272, 21456, 1639],
[1.9, 11475, 21013, 4575, 104],
[2.7, 109, 41, 3, 0],
[2.4, 7312, 11726, 2338, 70],
[2.4, 627, 485, 57, 3],
[2.1, 3983, 6897, 1312, 31],
[2.1, 18952, 43429, 9174, 247],
[1.9, 493, 707, 73, 3],
[2.1, 794, 4372, 1016, 33],
[1.8, 4323, 9361, 1948, 42],
[2.8, 92, 58, 5, 1],
[2.4, 1575, 1844, 249, 8],
[2.1, 24387, 58981, 7507, 94],
[2.7, 493, 512, 86, 7],
[2.4, 824, 2522, 445, 7],
[1.4, 90, 498, 127, 0],
[2.1, 9083, 24925, 7804, 213],
[2.2, 7071, 13986, 2782, 62],
[2.2, 1007, 971, 146, 4],
[2.5, 1129, 790, 125, 2],
[2.8, 109, 73, 11, 0],
[2.3, 1022, 1069, 126, 2],
[2.3, 1022, 1069, 126, 2],
[2.3, 4704, 8114, 1583, 31],
[2.4, 509, 348, 57, 0],
[2, 32111, 94493, 22337, 337],
[2.3, 2596, 3347, 507, 7],
[2.3, 3342, 18794, 4543, 145],
[1.8, 1131, 3033, 829, 23],
[2.1, 404, 1702, 870, 24],
[1.9, 17485, 45804, 10011, 179],
[1.9, 10676, 30509, 6237, 77],
[2.2, 1260, 3696, 1009, 27],
[2.1, 887, 1012, 189, 8],
[2.2, 2439, 2629, 344, 17],
[2.4, 7147, 19999, 6524, 168],
[3, 423, 201, 37, 0],
[2.8, 699, 495, 75, 2],
[2.2, 1057, 797, 101, 1],
[2.1, 713, 775, 127, 4],
[2.3, 1833, 6753, 2452, 215],
[2.3, 6848, 21354, 6662, 209],
[2.1, 5585, 30307, 8499, 357],
[2.3, 2054, 1393, 185, 5],
[2.9, 1510, 1188, 184, 4],
[2.4, 352, 183, 23, 0],
[2.7, 955, 848, 92, 8],
[2.6, 629, 865, 156, 6],
[2.6, 343, 205, 28, 2],
[1.9, 1759, 3762, 714, 18],
[2.2, 1570, 6077, 1348, 29],
[2.3, 12434, 39524, 8915, 223],
[2.3, 585, 734, 118, 4],
[2.4, 23527, 69233, 22802, 1884],
[2, 13285, 22176, 5035, 104],
[3, 87, 35, 11, 1],
[2.5, 7445, 11108, 2138, 68],
[2.5, 685, 431, 62, 0],
[2.2, 4022, 6802, 1309, 21],
[2.1, 20683, 44131, 9398, 279],
[2.3, 883, 4412, 1014, 29],
[1.8, 5390, 10882, 2104, 55],
[2.8, 107, 63, 9, 0],
[2.4, 1689, 1847, 227, 2],
[2.1, 26279, 64120, 7978, 112],
[2.8, 554, 566, 82, 4],
[2.5, 873, 2442, 462, 6],
[2.2, 10153, 25792, 7837, 194],
[2.3, 7381, 14437, 3060, 81],
[2.2, 1016, 943, 152, 6],
[2.6, 1124, 712, 104, 2],
[2.8, 90, 53, 15, 1],
[2.3, 956, 974, 138, 3],
[2, 751, 1483, 350, 18],
[2.3, 4812, 7754, 1326, 45],
[2.5, 462, 336, 58, 3],
[2, 34416, 94101, 20646, 448],
[2.4, 2435, 3046, 462, 22],
[2.3, 3617, 18341, 4355, 151],
[1.8, 1254, 2945, 760, 22],
[2.2, 409, 1761, 783, 32],
[1.9, 17915, 46419, 9921, 281],
[1.9, 11314, 31447, 6279, 108],
[2.2, 1236, 3559, 912, 24],
[2.1, 943, 1043, 213, 3],
[2.2, 2478, 2407, 396, 33],
[2.3, 7056, 20489, 6748, 204],
[3, 405, 189, 27, 0],
[2.8, 613, 426, 56, 5],
[2.3, 983, 663, 69, 2],
[2.2, 644, 757, 128, 3],
[2.2, 2008, 6982, 2411, 249],
[2.4, 6831, 21095, 6238, 286],
[2.2, 6074, 29860, 8071, 376],
[2.4, 1941, 1247, 162, 7],
[2.9, 1493, 1115, 151, 5],
[2.5, 298, 184, 22, 2],
[2.8, 887, 704, 129, 7],
[2.6, 659, 926, 190, 8],
[2.6, 357, 177, 25, 1],
[2, 1750, 4002, 768, 26],
[2.2, 1704, 5911, 1253, 31],
[2.3, 13022, 38829, 8638, 279],
[2.4, 562, 664, 93, 4],
[2.5, 24892, 68733, 22574, 1972],
[2, 13593, 22105, 4548, 130],
[3, 94, 47, 6, 0],
[2.5, 7077, 9949, 1869, 56],
[2.5, 621, 430, 67, 1],
[2.2, 4298, 6673, 1343, 39],
[2.1, 20724, 43212, 9077, 314],
[2.3, 899, 4327, 1007, 36],
[1.9, 5528, 10390, 1875, 52],
[2.8, 84, 43, 7, 0],
[2.4, 1628, 1569, 185, 2],
[2.1, 27642, 66501, 8071, 222],
[2.9, 541, 500, 76, 4],
[2.6, 863, 2330, 401, 11],
[2.2, 10612, 24959, 6918, 234],
[2.3, 7724, 14595, 2961, 90],
[2.2, 906, 782, 114, 10],
[2.7, 1053, 653, 106, 5],
[2.9, 88, 65, 8, 0],
[2.2, 1004, 1172, 168, 9],
[2, 791, 1456, 375, 19],
[2.2, 5305, 8366, 1570, 44],
[2.5, 429, 296, 47, 1],
[2, 36880, 97800, 21965, 459],
[2.5, 2614, 3016, 430, 20],
[2.3, 3866, 18338, 4299, 161],
[1.8, 1298, 3131, 794, 27],
[2.2, 431, 1772, 817, 35],
[2, 19194, 50703, 12183, 525],
[1.9, 12470, 33997, 6733, 145],
[2.2, 1343, 3830, 959, 28],
[2.2, 971, 1134, 188, 6],
[2.3, 2475, 2077, 211, 9],
[2.3, 7645, 21107, 7099, 238],
[2.8, 471, 230, 33, 0],
[2.8, 712, 522, 65, 6],
[2.3, 958, 620, 61, 2],
[2.2, 758, 862, 134, 2],
[2.4, 2060, 6750, 2067, 218],
[2.4, 7252, 21497, 6191, 302],
[2.2, 6391, 30069, 7781, 369],
[2.5, 1863, 1104, 157, 6],
[3, 1443, 1097, 135, 10],
[2.4, 375, 211, 23, 1],
[2.8, 981, 779, 114, 6],
[2.7, 655, 910, 173, 11],
[2.7, 328, 173, 36, 2],
[1.9, 1968, 4425, 964, 23],
[2.2, 1740, 5698, 1079, 28],
[2.3, 13554, 38901, 8384, 302],
[2.4, 598, 749, 115, 2],
[2.5, 25314, 69226, 22696, 2061],
[1.9, 14884, 23087, 4937, 160],
[3, 93, 31, 3, 1],
[2.5, 6968, 9519, 1692, 73],
[2.5, 699, 458, 67, 3],
[2.2, 4481, 6855, 1306, 29],
[2.2, 21781, 42544, 8831, 342],
[2.1, 755, 881, 85, 0],
[2.3, 874, 4216, 953, 39],
[2.1, 5726, 10859, 2065, 66],
[2.7, 122, 59, 7, 0],
[2.4, 1736, 1551, 153, 9],
[2.1, 29625, 75327, 9547, 243],
[2.9, 609, 534, 90, 1],
[2.6, 840, 2282, 388, 11],
[1.4, 81, 647, 176, 2],
[2.2, 11185, 25276, 6975, 215],
[2.3, 8029, 15055, 3085, 102],
[2.3, 945, 762, 128, 9],
[2.6, 1088, 670, 98, 2],
[3, 110, 59, 4, 0],
[2.3, 1072, 1210, 193, 4],
[2.1, 796, 1520, 427, 4],
[2.3, 5443, 8340, 1519, 57],
[2.4, 445, 275, 47, 1],
[1.9, 39137, 99927, 22306, 490],
[2.5, 2389, 2471, 301, 10],
[2.3, 4011, 17996, 4124, 160],
[1.7, 1521, 4282, 1390, 79],
[2.2, 426, 1668, 727, 35],
[2.1, 19473, 49375, 10534, 363],
[1.9, 12697, 33977, 6413, 148],
[2.2, 1477, 3887, 1042, 34],
[2.2, 990, 1030, 198, 5],
[2.2, 2393, 1756, 182, 8],
[2.3, 8112, 21185, 6346, 188],
[2.8, 423, 201, 29, 0],
[2.8, 681, 413, 44, 2],
[2.3, 897, 530, 70, 5],
[2.3, 785, 911, 131, 4],
[2.4, 2124, 6636, 1849, 121],
[2.4, 7140, 21014, 6189, 291],
[2.2, 6907, 28492, 7065, 359],
[2.6, 1694, 988, 134, 5],
[3, 1311, 986, 118, 3],
[2.4, 319, 192, 26, 3],
[2.7, 987, 680, 92, 7],
[2.7, 740, 965, 179, 17],
[2.7, 361, 197, 37, 1],
[1.9, 2064, 4784, 960, 30],
[2.2, 1688, 5463, 991, 29],
[2.3, 13972, 37249, 7730, 291],
[2.4, 658, 739, 124, 6],
[2.5, 25871, 68258, 21288, 1822],
[2, 15697, 22893, 4736, 166],
[2.9, 73, 22, 11, 0],
[2.5, 6653, 8103, 1418, 53],
[2.5, 713, 416, 47, 3],
[2.3, 4449, 6850, 1207, 61],
[2.2, 22013, 41217, 7939, 304],
[2.1, 834, 932, 103, 5],
[2.4, 954, 4105, 926, 42],
[2.1, 6107, 10765, 1820, 52],
[2.8, 99, 41, 3, 1],
[2.5, 1717, 1471, 170, 8],
[2.2, 31314, 78013, 9724, 269],
[2.8, 587, 445, 66, 5],
[2.5, 949, 2151, 343, 7],
[1.4, 79, 633, 202, 1],
[2.2, 11847, 25197, 6352, 230],
[2.3, 8066, 14745, 3031, 125],
[2.4, 902, 720, 118, 7],
[2.7, 1000, 596, 82, 4],
[2.9, 102, 68, 7, 2],
[2.3, 1008, 1018, 158, 3],
[2, 740, 1447, 349, 10],
[2.3, 5208, 7690, 1382, 35],
[2.4, 403, 215, 24, 2],
[1.9, 39447, 97989, 20580, 405],
[2.5, 2222, 2057, 266, 7],
[2.3, 4253, 17191, 3707, 136],
[1.7, 1462, 4178, 1377, 71],
[2.2, 482, 1612, 700, 25],
[2.1, 19649, 47788, 9766, 259],
[1.9, 13058, 31920, 5608, 106],
[2.3, 1426, 3677, 863, 29],
[1.9, 2982, 5672, 2479, 235],
[2.3, 2237, 1534, 148, 7],
[2.3, 8414, 20103, 5835, 185],
[2.7, 377, 192, 26, 0],
[2.9, 649, 376, 36, 1],
[2.3, 783, 426, 39, 2],
[2.3, 750, 670, 93, 1],
[2.4, 2110, 5699, 1329, 89],
[2.4, 7250, 19677, 6037, 306],
[2.2, 7236, 27799, 6332, 278],
[2.7, 1559, 735, 80, 7],
[2.9, 1226, 838, 93, 1],
[2.6, 351, 163, 22, 0],
[2.7, 868, 609, 64, 3],
[2.6, 601, 744, 109, 8],
[2.7, 269, 162, 18, 0],
[1.9, 2086, 4749, 919, 24],
[2.2, 1820, 5032, 871, 22],
[2.3, 14203, 35003, 6810, 223],
[2.4, 679, 665, 118, 6],
[2.5, 25697, 65184, 19518, 1458],
[2, 15368, 19635, 3540, 106],
[2.9, 68, 21, 2, 0],
[2.5, 6240, 6730, 1080, 37],
[2.5, 665, 389, 46, 1],
[2.2, 4371, 6516, 1069, 24],
[2.2, 22367, 37888, 6945, 245],
[2.2, 865, 950, 86, 2],
[2.3, 955, 3817, 858, 35],
[2.1, 6162, 10193, 1430, 35],
[2.8, 104, 33, 5, 2],
[2.5, 1629, 1341, 140, 1],
[2.2, 31039, 74737, 8989, 162],
[2.8, 511, 427, 37, 0],
[2.5, 910, 1979, 314, 7],
[1.5, 84, 523, 151, 3],
[2.2, 11798, 24322, 5383, 154],
[2.3, 7998, 14972, 3229, 140],
[2.4, 876, 628, 101, 0],
[2.6, 927, 533, 72, 3],
[2.7, 83, 48, 4, 0],
[2.3, 967, 831, 154, 2],
[2, 803, 1474, 361, 13],
[2.3, 5499, 7981, 1523, 22],
[2.5, 422, 239, 30, 0],
[1.9, 41810, 101671, 20705, 268],
[2.5, 2301, 1983, 200, 8],
[2.3, 4554, 17460, 3736, 113],
[1.8, 1639, 4142, 1271, 65],
[2.3, 513, 1910, 931, 39],
[2.1, 21143, 49103, 10047, 229],
[1.9, 0, 0, 0, 0],
[2.3, 1519, 3389, 792, 17],
[1.9, 3145, 5988, 2604, 214],
[2.3, 2283, 1461, 133, 2],
[2.3, 8909, 20486, 5442, 109],
[2.8, 410, 167, 24, 1],
[2.9, 597, 337, 49, 1],
[2.3, 753, 358, 37, 2],
[2.4, 648, 566, 88, 1],
[2.3, 2161, 5776, 1378, 85],
[2.3, 7538, 21082, 6567, 363],
[2.2, 7465, 28257, 6225, 263],
[2.6, 1533, 654, 72, 2],
[2.9, 1178, 810, 94, 2],
[2.4, 291, 141, 17, 0],
[2.7, 880, 612, 74, 1],
[2.6, 494, 445, 71, 3],
[2.8, 284, 149, 15, 1],
[1.9, 2063, 5064, 1019, 20],
[2.2, 1768, 5120, 820, 6],
[2.3, 14831, 35345, 6715, 189],
[2.4, 680, 676, 124, 1],
[2.5, 26785, 64459, 18234, 1363],
[2, 16090, 19790, 3140, 60],
[2.7, 60, 15, 2, 0],
[2.5, 5553, 5651, 801, 15],
[2.4, 592, 321, 49, 3],
[2.2, 4392, 6327, 1033, 24],
[2.1, 22480, 37779, 6305, 152],
[2.2, 1026, 1000, 86, 0],
[2.3, 1056, 3799, 746, 30],
[2.2, 6610, 9804, 1291, 15],
[2.9, 88, 43, 2, 0],
[2.5, 1577, 1183, 120, 2],
[2.2, 33522, 79105, 9520, 102],
[2.9, 529, 361, 51, 2],
[2.5, 922, 1961, 263, 6],
[1.3, 71, 494, 116, 0],
[2.2, 12305, 24098, 5309, 127],
[2.2, 7946, 15575, 3525, 154],
[2.3, 825, 614, 77, 2],
[2.7, 920, 451, 57, 5],
[2.6, 63, 32, 4, 0],
[2.3, 907, 774, 92, 0],
[2, 812, 1367, 339, 12],
[2.3, 5120, 7856, 1371, 16],
[2.5, 429, 236, 25, 0],
[1.9, 42656, 104693, 21065, 295],
[2.5, 2071, 1754, 177, 5],
[2.3, 4712, 17305, 3682, 117],
[1.8, 1651, 4195, 1336, 45],
[2.3, 475, 1844, 915, 56],
[2.1, 21446, 52282, 11318, 391],
[1.9, 14259, 31874, 5207, 49],
[2.3, 1533, 3288, 725, 12],
[2, 2659, 4907, 1764, 114],
[2.2, 1991, 1310, 117, 3],
[2.3, 8927, 20374, 5450, 134],
[2.7, 386, 184, 17, 1],
[2.8, 588, 300, 38, 0],
[2.3, 669, 312, 27, 0],
[2.4, 636, 523, 64, 1],
[2.4, 2181, 5962, 1366, 72],
[2.3, 7525, 20931, 6410, 332],
[2.2, 8036, 28688, 6059, 258],
[2.7, 1432, 689, 53, 1],
[2.9, 1032, 732, 107, 2],
[2.5, 307, 129, 7, 1],
[2.8, 766, 519, 61, 0],
[2.7, 422, 362, 43, 1],
[2.6, 263, 124, 15, 0],
[1.9, 2054, 4993, 1047, 27],
[2.2, 1743, 4860, 820, 9],
[2.2, 15234, 36271, 6455, 171],
[2.3, 683, 691, 106, 3],
[2.5, 27646, 62726, 17446, 1292],
[2, 16234, 19575, 3200, 60],
[3.1, 54, 17, 4, 0],
[2.5, 5179, 5002, 753, 20],
[2.4, 603, 334, 46, 5],
[2.2, 4218, 6296, 1027, 20],
[2.1, 22235, 36331, 5927, 130],
[2.3, 1018, 985, 71, 1],
[2.3, 1079, 3856, 808, 23],
[2.2, 6868, 9629, 1358, 19],
[2.6, 81, 37, 5, 0],
[2.5, 1418, 1077, 106, 0],
[2.2, 33641, 87512, 12348, 190],
[2.9, 502, 312, 33, 0],
[2.5, 950, 1929, 293, 8],
[1.5, 90, 530, 134, 0],
[2.1, 12489, 23912, 5027, 102],
[2.2, 8430, 16364, 4304, 238],
[2.4, 816, 584, 80, 1],
[2.6, 876, 418, 47, 4],
[2.5, 70, 40, 2, 0],
]
school_scores2 = [
[0.4173069147, 0.5066720582, 0.07602102709, 0],
[0.2464193904, 0.5956665443, 0.1535071612, 0.00440690415],
[0.3146893117, 0.5873275088, 0.09667673716, 0.001306442394],
[0.4805467928, 0.4595162986, 0.05993690852, 0],
[0.1975959902, 0.6548296773, 0.1457179543, 0.001856378206],
[0.3511293634, 0.5533880903, 0.09394250513, 0.001540041068],
[0.1093523991, 0.7070297971, 0.1792784229, 0.004339380915],
[0.2160940325, 0.6143761302, 0.1652350814, 0.004294755877],
[0.1281947262, 0.6056795132, 0.260851927, 0.005273833671],
[0.2318217357, 0.6248721958, 0.1416370963, 0.001668972154],
[0.207513172, 0.6636863353, 0.127112652, 0.001687840704],
[0.1811023622, 0.6408375089, 0.1735862563, 0.004473872584],
[0.3642443156, 0.5202853321, 0.1145786893, 0.0008916629514],
[0.4080647266, 0.5292966201, 0.06211666449, 0.0005219887772],
[0.1951053042, 0.6089443578, 0.1921476859, 0.003802652106],
[0.6021621622, 0.36, 0.03783783784, 0],
[0.521630094, 0.4288401254, 0.04764890282, 0.001880877743],
[0.4692775152, 0.4783929777, 0.05131667792, 0.001012829169],
[0.3887011106, 0.5219700628, 0.08836310961, 0.0009657170449],
[0.2144277947, 0.6557485745, 0.1265084206, 0.003315210184],
[0.1902864259, 0.6172478207, 0.1872042341, 0.005261519303],
[0.1091775579, 0.6905913784, 0.1938527451, 0.006378318531],
[0.4526048664, 0.4748469921, 0.07075682938, 0.001791312136],
[0.4588845655, 0.4949416342, 0.04461738003, 0.001556420233],
[0.5676567657, 0.3828382838, 0.0495049505, 0],
[0.4397614314, 0.4950298211, 0.06242544732, 0.002783300199],
[0.3445825933, 0.5475133215, 0.1056838366, 0.002220248668],
[0.4882478632, 0.4476495726, 0.05982905983, 0.004273504274],
[0.2735008104, 0.6156807131, 0.1089951378, 0.001823338736],
[0.1497074682, 0.6901456923, 0.1578524722, 0.002294367328],
[0.1824534977, 0.6560234, 0.1577748975, 0.003748204715],
[0.3839342188, 0.5332068311, 0.08032890576, 0.002530044276],
[0.1880462495, 0.6110938632, 0.1880556575, 0.01280422982],
[0.2988308864, 0.5691408374, 0.128548124, 0.003480152257],
[0.6363636364, 0.3045454545, 0.05909090909, 0],
[0.3234820899, 0.5547463799, 0.1191464138, 0.002625116437],
[0.5060936498, 0.4387427838, 0.05452212957, 0.0006414368185],
[0.2951197053, 0.5860957643, 0.1161755678, 0.002608962554],
[0.2477218523, 0.6144174697, 0.135006962, 0.002853716035],
[0.3847962382, 0.5415360502, 0.07288401254, 0.0007836990596],
[0.1201866978, 0.7067844641, 0.1666944491, 0.006334389065],
[0.2598627275, 0.605908684, 0.1322590272, 0.001969561325],
[0.5855513308, 0.3574144487, 0.05703422053, 0],
[0.4314601958, 0.5, 0.06790123457, 0.0006385696041],
[0.2673340088, 0.6515382645, 0.08009219566, 0.00103553096],
[0.4391619662, 0.4939564867, 0.06607574537, 0.0008058017728],
[0.1920739762, 0.6739762219, 0.1307793923, 0.003170409511],
[0.08469945355, 0.6871584699, 0.2281420765, 0],
[0.4988179669, 0.443498818, 0.05721040189, 0.0004728132388],
[0.2837481065, 0.6109500108, 0.1033109717, 0.001990911058],
[0.5303738318, 0.4077102804, 0.05957943925, 0.002336448598],
[0.4880382775, 0.4282296651, 0.07894736842, 0.004784688995],
[0.4489097905, 0.4861051732, 0.06412997007, 0.0008550662676],
[0.2543373939, 0.5732742709, 0.1694352159, 0.002953119232],
[0.3207915954, 0.5709748351, 0.1061161332, 0.002117436273],
[0.5087719298, 0.4152046784, 0.07485380117, 0.001169590643],
[0.2084411013, 0.6366546856, 0.152806907, 0.00209730611],
[0.3583708014, 0.5413911664, 0.09865115049, 0.001586881777],
[0.1203711207, 0.6962563812, 0.1795640548, 0.0038084434],
[0.2348178138, 0.5991902834, 0.1615171532, 0.004474749627],
[0.1303288672, 0.6082013804, 0.2525375558, 0.008932196508],
[0.245886043, 0.6081223707, 0.1439699127, 0.002021673529],
[0.2214521529, 0.6537497951, 0.1231356389, 0.001662413075],
[0.2013219007, 0.6095033941, 0.1852447303, 0.003929974991],
[0.4219512195, 0.4736585366, 0.1024390244, 0.001951219512],
[0.4366970021, 0.5068254818, 0.05554068522, 0.0009368308351],
[0.2044355892, 0.6004376796, 0.1905866214, 0.004540109747],
[0.6002358491, 0.3691037736, 0.02948113208, 0.001179245283],
[0.5142478463, 0.4300861498, 0.05500331345, 0.0006626905235],
[0.512266569, 0.4302453314, 0.0574880996, 0],
[0.4410796577, 0.4884792627, 0.06912442396, 0.001316655695],
[0.2342592593, 0.6406084656, 0.1228835979, 0.002248677249],
[0.2024457352, 0.6097523693, 0.1823601345, 0.005441760929],
[0.1192239612, 0.6786379728, 0.1951043413, 0.007033724613],
[0.471897695, 0.4531101989, 0.07357120303, 0.001420903063],
[0.4920634921, 0.4581280788, 0.04871373837, 0.00109469075],
[0.5480769231, 0.4038461538, 0.04807692308, 0],
[0.447147651, 0.4844798658, 0.06669463087, 0.001677852349],
[0.3652605459, 0.523573201, 0.1086848635, 0.002481389578],
[0.5436893204, 0.4029126214, 0.05218446602, 0.001213592233],
[0.2835178352, 0.6029110291, 0.1123411234, 0.0012300123],
[0.1580255281, 0.6715237773, 0.1680786174, 0.002372077262],
[0.1980312879, 0.6428681099, 0.1555350837, 0.003565518536],
[0.3947870312, 0.51366815, 0.08900190718, 0.002542911634],
[0.2079444979, 0.6007107441, 0.1792507965, 0.0120939615],
[0.3239787446, 0.5481844348, 0.1247093989, 0.003127421676],
[0.6826347305, 0.2814371257, 0.02994011976, 0.005988023952],
[0.3315570574, 0.545100078, 0.1203535222, 0.002989342345],
[0.5071672355, 0.4313993174, 0.06006825939, 0.001365187713],
[0.318289592, 0.5649578939, 0.1144632491, 0.002289264982],
[0.2657885469, 0.604606607, 0.1270972741, 0.002507572022],
[0.4055891239, 0.5264350453, 0.06797583082, 0],
[0.1355360755, 0.694200944, 0.1635198921, 0.006743088334],
[0.2703767839, 0.608407476, 0.1190063892, 0.002209350929],
[0.6878612717, 0.2601156069, 0.04624277457, 0.005780346821],
[0.4434823309, 0.4907559092, 0.06459162181, 0.001170138076],
[0.284319841, 0.6361010997, 0.0785394141, 0.001039645134],
[0.4573070608, 0.4761904762, 0.06486042693, 0.001642036125],
[0.2015483182, 0.6588360918, 0.1372130272, 0.002402562734],
[0.1176470588, 0.6720588235, 0.2088235294, 0.001470588235],
[0.4884248752, 0.4357694054, 0.07399001362, 0.001815705856],
[0.2940014052, 0.595336378, 0.1087300193, 0.001932197435],
[0.5375844595, 0.4007601351, 0.05912162162, 0.002533783784],
[0.5027932961, 0.4162011173, 0.0782122905, 0.002793296089],
[0.444205084, 0.4851357174, 0.06893580353, 0.001723395088],
[0.2765957447, 0.5725338491, 0.1473887814, 0.003481624758],
[0.3139815031, 0.5761249709, 0.1080282894, 0.001865236652],
[0.4937810945, 0.4465174129, 0.05970149254, 0],
[0.207173885, 0.6331053861, 0.1574914341, 0.002229294797],
[0.366521556, 0.5350372139, 0.09563263587, 0.002808594299],
[0.120103431, 0.6892825441, 0.1859055999, 0.004708424993],
[0.2307189542, 0.6008714597, 0.165795207, 0.002614379085],
[0.1410948342, 0.5732459522, 0.2737085582, 0.01195065536],
[0.2426091171, 0.6094306811, 0.1458077943, 0.002152407561],
[0.2208127577, 0.6487790513, 0.1288904997, 0.001517691297],
[0.1891986674, 0.6238821673, 0.1818341224, 0.00508504296],
[0.3877943296, 0.5088899568, 0.101874099, 0.001441614608],
[0.4345057611, 0.5054578532, 0.05897513645, 0.001061249242],
[0.2046135883, 0.5968317082, 0.1936003029, 0.004954400581],
[0.6092436975, 0.3473389356, 0.04341736695, 0],
[0.5237757275, 0.4173172463, 0.05819730305, 0.0007097232079],
[0.535744323, 0.4053826745, 0.05761143818, 0.00126156434],
[0.4539589443, 0.4721407625, 0.07272727273, 0.001173020528],
[0.2113459399, 0.612625139, 0.164349277, 0.01167964405],
[0.2026116635, 0.6056981748, 0.185754563, 0.005935598754],
[0.124736204, 0.6735440708, 0.1943334381, 0.0073862871],
[0.4720538721, 0.4525252525, 0.07407407407, 0.001346801347],
[0.4843847595, 0.4625234229, 0.05215490319, 0.0009369144285],
[0.5733333333, 0.3847619048, 0.04, 0.001904761905],
[0.5048169557, 0.4470134875, 0.0472061657, 0.0009633911368],
[0.3776867963, 0.5117707267, 0.1084953941, 0.002047082907],
[0.5693527081, 0.3791281374, 0.04887714663, 0.002642007926],
[0.2806384192, 0.6040281208, 0.113433403, 0.001900057002],
[0.1701381276, 0.6663012497, 0.1610392458, 0.002521376891],
[0.2017971717, 0.6377995286, 0.1558203483, 0.004582951421],
[0.3952380952, 0.5292517007, 0.07414965986, 0.001360544218],
[0.2079556266, 0.5931896103, 0.1853111705, 0.0135435926],
[0.3294869425, 0.5402022656, 0.1264555993, 0.003855192628],
[0.6704545455, 0.2556818182, 0.07386363636, 0],
[0.3404076207, 0.5401860877, 0.1166149756, 0.002791315906],
[0.5328185328, 0.4138996139, 0.05173745174, 0.001544401544],
[0.313279822, 0.5766510371, 0.1080823333, 0.001986807598],
[0.2724751273, 0.598669776, 0.1260263009, 0.002828795761],
[0.3728943338, 0.5474732006, 0.07733537519, 0.002297090352],
[0.1331152903, 0.6892886345, 0.1720359771, 0.005560098119],
[0.2799346138, 0.5936131706, 0.123941853, 0.002510362543],
[0.6281407035, 0.3366834171, 0.0351758794, 0],
[0.459338473, 0.478985327, 0.06068142253, 0.0009947774186],
[0.2780011941, 0.6400932716, 0.08068894822, 0.001216586124],
[0.4374437444, 0.4914491449, 0.06930693069, 0.001800180018],
[0.2098535286, 0.6737683089, 0.1126498003, 0.003728362184],
[0.1253869969, 0.6888544892, 0.1857585139, 0],
[0.2139551192, 0.5838943432, 0.1960729313, 0.006077606358],
[0.2992433903, 0.584799796, 0.1138315056, 0.00212530817],
[0.5029639763, 0.4259005928, 0.07022343821, 0.0009119927041],
[0.5251222766, 0.4121831925, 0.06136060471, 0.001333926189],
[0.5631768953, 0.3826714801, 0.05415162455, 0],
[0.4392655367, 0.4891713748, 0.06826741996, 0.00329566855],
[0.2754254056, 0.5749901068, 0.1444400475, 0.005144440047],
[0.3170296588, 0.5698357244, 0.1111276295, 0.002006987289],
[0.5055079559, 0.452876377, 0.03916768666, 0.002447980416],
[0.1915968457, 0.6471747891, 0.1590105239, 0.002217841301],
[0.3863117871, 0.5236501901, 0.0880608365, 0.001977186312],
[0.1123160038, 0.706702161, 0.1755402443, 0.00544159098],
[0.2162641259, 0.6168845557, 0.161311766, 0.005539552404],
[0.1403203343, 0.5581476323, 0.291086351, 0.01044568245],
[0.2385298399, 0.6183260553, 0.1407714702, 0.002372634643],
[0.2142144638, 0.6552255724, 0.1289276808, 0.001632282929],
[0.1962846127, 0.6291622853, 0.1710480196, 0.003505082369],
[0.3913894325, 0.5151663405, 0.09197651663, 0.001467710372],
[0.4162528964, 0.5071168487, 0.07265806024, 0.003972194638],
[0.2000367997, 0.597534423, 0.1971234935, 0.005305283817],
[0.5922746781, 0.3605150215, 0.0443490701, 0.002861230329],
[0.5311355311, 0.4285714286, 0.03956043956, 0.0007326007326],
[0.5113843352, 0.4380692168, 0.04963570128, 0.0009107468124],
[0.446084724, 0.4833119384, 0.06996148909, 0.0006418485237],
[0.1937302661, 0.6123139378, 0.1777176364, 0.01623815968],
[0.1864186721, 0.6172589752, 0.1908270177, 0.005495335004],
[0.1151050553, 0.6834786267, 0.1937270666, 0.007689251408],
[0.5416666667, 0.4142011834, 0.04314595661, 0.0009861932939],
[0.4971619366, 0.4467445743, 0.05475792988, 0.001335559265],
[0.5784313725, 0.3607843137, 0.05882352941, 0.001960784314],
[0.501607717, 0.4426580922, 0.05198285102, 0.003751339764],
[0.3890489914, 0.5043227666, 0.1031700288, 0.003458213256],
[0.5464926591, 0.4029363785, 0.04893964111, 0.00163132137],
[0.2750529287, 0.6090331687, 0.1137967537, 0.002117148906],
[0.1698623957, 0.6725693661, 0.1546356869, 0.00293255132],
[0.189568077, 0.6514273122, 0.1551735829, 0.003831027936],
[0.4277777778, 0.4916666667, 0.07916666667, 0.001388888889],
[0.1831896162, 0.6080590054, 0.1939367645, 0.01481461395],
[0.3087416256, 0.5653671268, 0.1230930664, 0.002798181182],
[0.7124183007, 0.2679738562, 0.01960784314, 0],
[0.3409493612, 0.5467686282, 0.1090179987, 0.003264011937],
[0.5349829352, 0.4138225256, 0.04863481229, 0.002559726962],
[0.3258610816, 0.5642640923, 0.1073386239, 0.002536202242],
[0.2639480794, 0.6048438762, 0.1277680287, 0.003440015598],
[0.3863636364, 0.5540752351, 0.05721003135, 0.002351097179],
[0.1277554304, 0.7034593725, 0.1634754626, 0.005309734513],
[0.275807069, 0.5972310833, 0.1242822509, 0.002679596784],
[0.5897435897, 0.3717948718, 0.03205128205, 0.00641025641],
[0.4284548422, 0.5016322089, 0.06773667029, 0.002176278564],
[0.2680803351, 0.6483637283, 0.0825226176, 0.001033319043],
[0.4489981785, 0.4663023679, 0.07832422587, 0.006375227687],
[0.2169562928, 0.6640337019, 0.11716693, 0.001843075303],
[0.1258741259, 0.6965034965, 0.1776223776, 0],
[0.216133254, 0.5930993456, 0.1856989887, 0.00506841166],
[0.2958453621, 0.5851638007, 0.1163968035, 0.002594033722],
[0.4732142857, 0.4562969925, 0.06860902256, 0.001879699248],
[0.5518084066, 0.3861192571, 0.06109481916, 0.0009775171065],
[0.5647668394, 0.378238342, 0.05699481865, 0],
[0.4605678233, 0.4817485354, 0.05678233438, 0.000901306895],
[0.4605678233, 0.4817485354, 0.05678233438, 0.000901306895],
[0.3259423503, 0.5622228381, 0.1096868071, 0.002148004435],
[0.556892779, 0.3807439825, 0.06236323851, 0],
[0.2151087233, 0.6330001742, 0.1496335696, 0.002257532925],
[0.402044293, 0.5183521759, 0.07851943627, 0.001084094781],
[0.1245899195, 0.7006412168, 0.1693632568, 0.005405606919],
[0.2254784689, 0.6046650718, 0.1652711324, 0.004585326954],
[0.1346666667, 0.5673333333, 0.29, 0.008],
[0.2379591448, 0.6233617768, 0.1362430082, 0.00243607017],
[0.2247626266, 0.6423082591, 0.1313080275, 0.00162108676],
[0.2102803738, 0.6168224299, 0.1683911883, 0.004506008011],
[0.4231870229, 0.4828244275, 0.09017175573, 0.003816793893],
[0.4492540063, 0.4842512433, 0.06336341868, 0.003131331737],
[0.2112122466, 0.591021928, 0.192800993, 0.004964832437],
[0.6399394856, 0.3040847201, 0.05597579425, 0],
[0.5499606609, 0.3894571204, 0.0590086546, 0.001573564123],
[0.5403885481, 0.4074642127, 0.05163599182, 0.0005112474438],
[0.4403953057, 0.4786905497, 0.07844348363, 0.002470660902],
[0.162889896, 0.6001066382, 0.2178974496, 0.01910601617],
[0.1952499073, 0.6088444102, 0.1899466826, 0.0059589998],
[0.1248100474, 0.6772816662, 0.1899302762, 0.00797801019],
[0.5647511685, 0.3830079736, 0.05086609843, 0.001374759417],
[0.5232155232, 0.4116424116, 0.06375606376, 0.001386001386],
[0.6308243728, 0.3279569892, 0.04121863799, 0],
[0.5018392013, 0.4456121913, 0.04834471886, 0.004203888597],
[0.3798309179, 0.5223429952, 0.09420289855, 0.003623188406],
[0.5934256055, 0.3546712803, 0.04844290657, 0.003460207612],
[0.2813049736, 0.601631217, 0.1141851911, 0.002878618263],
[0.1739804965, 0.6734264184, 0.1493794326, 0.003213652482],
[0.2035157784, 0.6469163284, 0.1459178997, 0.003649993453],
[0.4059680777, 0.5093684941, 0.08188757807, 0.002775850104],
[0.20032185, 0.5894879349, 0.1941488003, 0.01604141478],
[0.3272167488, 0.5462068966, 0.1240147783, 0.002561576355],
[0.6492537313, 0.2611940299, 0.08208955224, 0.007462686567],
[0.3586396262, 0.5350932126, 0.1029914736, 0.003275687654],
[0.5814940577, 0.3658743633, 0.05263157895, 0],
[0.3309198618, 0.5596511437, 0.1077011683, 0.00172782623],
[0.2776577036, 0.5924339853, 0.1261628922, 0.00374541891],
[0.139318397, 0.6961186494, 0.1599873777, 0.004575575891],
[0.2924420813, 0.590418317, 0.1141554989, 0.00298410287],
[0.5977653631, 0.3519553073, 0.05027932961, 0],
[0.4486055777, 0.4905710491, 0.06029216467, 0.0005312084993],
[0.2668216755, 0.6510371717, 0.08100396999, 0.001137182833],
[0.4593698176, 0.4693200663, 0.0679933665, 0.003316749585],
[0.2307692308, 0.645519429, 0.1221252974, 0.001586042823],
[0.2308759323, 0.5865017282, 0.1782108423, 0.00441149718],
[0.295724989, 0.5784286229, 0.1226010657, 0.003245322329],
[0.4799244214, 0.4454416627, 0.07179971658, 0.002834199339],
[0.578784758, 0.3666323378, 0.05355303811, 0.001029866117],
[0.5660377358, 0.3333333333, 0.09433962264, 0.006289308176],
[0.4616127475, 0.4703042009, 0.0666344761, 0.001448575567],
[0.2886241353, 0.5699461952, 0.1345119139, 0.006917755573],
[0.3452679917, 0.5563607663, 0.09514242663, 0.003228815384],
[0.5378346915, 0.3911525029, 0.06752037253, 0.003492433062],
[0.2300365615, 0.6289711318, 0.1379978745, 0.002994432228],
[0.4082145851, 0.5106454317, 0.07745180218, 0.003688181056],
[0.1366762394, 0.6930547158, 0.1645631802, 0.005705864571],
[0.2517566754, 0.5912467376, 0.1525798033, 0.004416783778],
[0.1370184255, 0.5899497487, 0.2623115578, 0.01072026801],
[0.2403536546, 0.6227728883, 0.1331034668, 0.00376999034],
[0.2302026532, 0.6398429234, 0.1277569789, 0.002197444453],
[0.2156691677, 0.62100855, 0.1591345315, 0.004187750829],
[0.4282470481, 0.4736603088, 0.09673024523, 0.00136239782],
[0.4663153933, 0.4529544599, 0.07452013549, 0.006210011291],
[0.2045395252, 0.5939357046, 0.1956112126, 0.005913557701],
[0.652173913, 0.3043478261, 0.04347826087, 0],
[0.5572727273, 0.3872727273, 0.05090909091, 0.004545454545],
[0.5725101922, 0.3861386139, 0.04018637158, 0.001164822365],
[0.4203655352, 0.4941253264, 0.08355091384, 0.001958224543],
[0.172360515, 0.5993133047, 0.2069527897, 0.02137339056],
[0.198287373, 0.6123367199, 0.1810740203, 0.008301886792],
[0.1368603682, 0.6728104369, 0.181857101, 0.008472093914],
[0.5781948168, 0.3714626154, 0.04825737265, 0.002085195115],
[0.5401591896, 0.4034008683, 0.05463096961, 0.001808972504],
[0.5889328063, 0.3636363636, 0.04347826087, 0.00395256917],
[0.5136074117, 0.4076433121, 0.07469600463, 0.004053271569],
[0.3696017947, 0.5193494111, 0.1065619742, 0.004486819966],
[0.6375, 0.3160714286, 0.04464285714, 0.001785714286],
[0.2673388329, 0.6113657195, 0.1173235564, 0.003971891231],
[0.191482189, 0.6642319362, 0.1408023373, 0.003483537476],
[0.214290416, 0.638971169, 0.1421471827, 0.004591232227],
[0.4247921391, 0.5018896447, 0.07029478458, 0.003023431595],
[0.2106438974, 0.5816401655, 0.1910282557, 0.01668768141],
[0.3366603923, 0.5474787002, 0.112641173, 0.003219734496],
[0.6394557823, 0.3197278912, 0.04081632653, 0],
[0.373436758, 0.5249854889, 0.09862276397, 0.002954989183],
[0.5549597855, 0.3842716711, 0.05987488829, 0.0008936550492],
[0.3479316765, 0.5401926657, 0.1087185299, 0.003157127823],
[0.2826244085, 0.58930544, 0.1237879635, 0.004282188007],
[0.1434040517, 0.690221726, 0.1606316797, 0.00574254267],
[0.3097786495, 0.5822359204, 0.1050714486, 0.002913981507],
[0.6268656716, 0.3208955224, 0.05223880597, 0],
[0.4810874704, 0.4636524823, 0.05466903073, 0.0005910165485],
[0.2698465383, 0.6491955953, 0.07879065953, 0.002167206841],
[0.4826048171, 0.4460303301, 0.06779661017, 0.00356824264],
[0.2393897365, 0.6463245492, 0.1112343967, 0.003051317614],
[0.2483907965, 0.584205229, 0.161926831, 0.005477143459],
[0.3044540796, 0.5752857706, 0.1167126527, 0.003547497044],
[0.5, 0.4315673289, 0.06291390728, 0.005518763797],
[0.5795266924, 0.3593835993, 0.05833791965, 0.002751788663],
[0.5465838509, 0.4037267081, 0.04968944099, 0],
[0.4266893328, 0.4980875478, 0.07139821504, 0.003824904377],
[0.2995077622, 0.5513063234, 0.1419916698, 0.007194244604],
[0.3470722931, 0.5473339876, 0.1027150801, 0.002878639189],
[0.5549805951, 0.382923674, 0.06080206986, 0.001293661061],
[0.2347489561, 0.622517568, 0.1398118444, 0.002921631531],
[0.4299342105, 0.4960526316, 0.07072368421, 0.003289473684],
[0.1449894989, 0.6877437744, 0.1612286229, 0.00603810381],
[0.2472380952, 0.5963809524, 0.1512380952, 0.005142857143],
[0.1410801964, 0.5800327332, 0.2674304419, 0.01145662848],
[0.2323588161, 0.6138006174, 0.1474850191, 0.006355547485],
[0.2337613647, 0.6373043397, 0.1262161402, 0.002718155404],
[0.2180194805, 0.6217532468, 0.1556818182, 0.004545454545],
[0.4223575468, 0.4932579382, 0.08177468465, 0.002609830361],
[0.518650461, 0.4352472758, 0.04421626153, 0.001886001676],
[0.211837402, 0.5848596525, 0.1967081382, 0.006594807282],
[0.6416893733, 0.3133514986, 0.04495912807, 0],
[0.5455938697, 0.4, 0.04980842912, 0.004597701149],
[0.5837903717, 0.3778184034, 0.03717245582, 0.001218769043],
[0.4316628702, 0.4908883827, 0.07630979499, 0.001138952164],
[0.1856692204, 0.6083821541, 0.1863001352, 0.01964849031],
[0.2057771977, 0.6099824074, 0.1756710743, 0.008569320697],
[0.1432638422, 0.6740416947, 0.1744227752, 0.008271687962],
[0.5952076677, 0.352715655, 0.05015974441, 0.001916932907],
[0.5374301676, 0.408566108, 0.05027932961, 0.003724394786],
[0.6147540984, 0.3459016393, 0.03770491803, 0.001639344262],
[0.5218085106, 0.4143617021, 0.06063829787, 0.003191489362],
[0.3744997141, 0.5202973128, 0.09891366495, 0.006289308176],
[0.6085343228, 0.3209647495, 0.0667903525, 0.003710575139],
[0.2666666667, 0.5995934959, 0.1306233062, 0.003116531165],
[0.2036278525, 0.6668227033, 0.1262726741, 0.003276770041],
[0.2216843035, 0.6362506338, 0.1371256604, 0.004939402365],
[0.4084699454, 0.5116120219, 0.07855191257, 0.001366120219],
[0.2121930979, 0.5802828235, 0.1902478688, 0.0172762098],
[0.3455930157, 0.5360592551, 0.1146326739, 0.003715055261],
[0.7265625, 0.2421875, 0.0234375, 0.0078125],
[0.3817663818, 0.5215318869, 0.09270216963, 0.003999561692],
[0.5696821516, 0.3732681337, 0.05460472698, 0.002444987775],
[0.353642175, 0.5409991319, 0.1030700024, 0.002288690711],
[0.2963482, 0.5788456829, 0.1201529293, 0.004653187842],
[0.4386984311, 0.5119116793, 0.0493898896, 0],
[0.1437027294, 0.6931930286, 0.1566918777, 0.006412364354],
[0.3059414405, 0.5801987604, 0.1103334046, 0.003526394529],
[0.6489361702, 0.3138297872, 0.03723404255, 0],
[0.5033342998, 0.4496955639, 0.04436068426, 0.002609452015],
[0.2581879347, 0.6564902128, 0.0832040578, 0.002117794705],
[0.4935170178, 0.43273906, 0.07293354943, 0.0008103727715],
[0.2385685885, 0.648111332, 0.1101959671, 0.003124112468],
[0.08940397351, 0.7141280353, 0.1942604857, 0.002207505519],
[0.2562369705, 0.5790474445, 0.1597901537, 0.004925431262],
[0.3056221689, 0.5730653572, 0.1174298656, 0.003882608199],
[0.512472885, 0.4132321041, 0.0694143167, 0.004880694143],
[0.5855758881, 0.3606027987, 0.05274488698, 0.001076426265],
[0.6358381503, 0.3410404624, 0.02312138728, 0],
[0.4324324324, 0.4881000403, 0.07785397338, 0.001613553852],
[0.2897706589, 0.5533309064, 0.1554423007, 0.001456133964],
[0.3543850511, 0.5430041018, 0.09889966795, 0.003711179113],
[0.5794270833, 0.3580729167, 0.06119791667, 0.001302083333],
[0.2417953787, 0.6173668602, 0.1378104535, 0.00302730755],
[0.4619996132, 0.477857281, 0.05820924386, 0.001933861922],
[0.1525617131, 0.6844927922, 0.1568597619, 0.00608573276],
[0.2091584158, 0.5888338834, 0.1911441144, 0.01086358636],
[0.1491596639, 0.5840336134, 0.2545518207, 0.01225490196],
[0.2441908584, 0.6191610759, 0.1320960562, 0.00455200953],
[0.2385085, 0.6382455152, 0.1204658589, 0.002780125857],
[0.2293478261, 0.6035714286, 0.1618012422, 0.005279503106],
[0.4453441296, 0.4633378318, 0.08906882591, 0.002249212776],
[0.5515095644, 0.4047015441, 0.04194514865, 0.001843742798],
[0.2263961374, 0.5912478022, 0.1771092071, 0.005246853283],
[0.6477794793, 0.3078101072, 0.04441041348, 0],
[0.5973684211, 0.3622807018, 0.03859649123, 0.001754385965],
[0.5972037284, 0.3528628495, 0.0466045273, 0.003328894807],
[0.4287274713, 0.4975423266, 0.0715456035, 0.00218459858],
[0.1979496738, 0.6184529357, 0.1723205965, 0.01127679404],
[0.2061558007, 0.6067448172, 0.1786972339, 0.008402148178],
[0.1612918292, 0.6653433902, 0.1649814352, 0.008383345399],
[0.6004962779, 0.3502304147, 0.04750088621, 0.001772421127],
[0.5421836228, 0.4077750207, 0.0488006617, 0.001240694789],
[0.5907407407, 0.3555555556, 0.04814814815, 0.005555555556],
[0.5588901472, 0.3850509626, 0.05209513024, 0.003963759909],
[0.3892688059, 0.5076275644, 0.09416096791, 0.008942661757],
[0.605704698, 0.3305369128, 0.06208053691, 0.001677852349],
[0.2633324828, 0.6103597857, 0.1224802245, 0.003827507017],
[0.2065842614, 0.6685840166, 0.1212825848, 0.003549137193],
[0.2358461902, 0.6287600014, 0.1304817528, 0.004912055636],
[0.4309102816, 0.4839554682, 0.08120497708, 0.003929273084],
[0.22066889, 0.5822124037, 0.1815778026, 0.01554090362],
[0.3609169502, 0.5263726662, 0.1088935896, 0.003816793893],
[0.6886792453, 0.2075471698, 0.1037735849, 0],
[0.4099956862, 0.4993529303, 0.08738522216, 0.003266161336],
[0.604749788, 0.352841391, 0.03986429177, 0.002544529262],
[0.3540224397, 0.5450783799, 0.09604519774, 0.004853982653],
[0.30799043, 0.5766793055, 0.1110769102, 0.004253354414],
[0.4450373533, 0.4973319104, 0.05496264674, 0.002668089648],
[0.1582877053, 0.681101709, 0.1536419446, 0.006968641115],
[0.3258109262, 0.5743171148, 0.09709773794, 0.002774221084],
[0.6875, 0.2847222222, 0.02083333333, 0.006944444444],
[0.5101010101, 0.4370172311, 0.05050505051, 0.002376708259],
[0.2624371438, 0.6538132752, 0.08149513912, 0.002254441837],
[0.5321849501, 0.4034451496, 0.0598368087, 0.004533091568],
[0.2750724638, 0.6234782609, 0.09942028986, 0.002028985507],
[0.08633879781, 0.6918032787, 0.2207650273, 0.001092896175],
[0.2715582451, 0.5775684225, 0.145601247, 0.005272085454],
[0.3106250241, 0.5678360997, 0.1167250741, 0.004813802133],
[0.5163136806, 0.4121350887, 0.06754436176, 0.004006868918],
[0.594530321, 0.3543400713, 0.04875148633, 0.002378121284],
[0.5698324022, 0.3798882682, 0.03910614525, 0.01117318436],
[0.4609053498, 0.4654778235, 0.07224508459, 0.001371742112],
[0.2906520031, 0.568342498, 0.137077769, 0.003927729772],
[0.3638141809, 0.5371987426, 0.09654208872, 0.002444987775],
[0.6257763975, 0.3338509317, 0.03726708075, 0.003105590062],
[0.2490010794, 0.6185354214, 0.1299070199, 0.002556479255],
[0.4881370826, 0.4518892794, 0.05843585237, 0.001537785589],
[0.1681891881, 0.6798354886, 0.1465970657, 0.005378257603],
[0.2062641084, 0.5894469526, 0.194272009, 0.01001693002],
[0.1709826179, 0.5718339837, 0.2483150053, 0.008868393047],
[0.253659859, 0.6169218456, 0.1260747205, 0.003343574914],
[0.2575948868, 0.6296851574, 0.1106288961, 0.002091059733],
[0.2378648874, 0.6133444537, 0.1439532944, 0.00483736447],
[0.2623152709, 0.4989444053, 0.2180682618, 0.02067206193],
[0.569791136, 0.3907284768, 0.03769740194, 0.001782985227],
[0.2436227814, 0.5820714017, 0.1689492428, 0.005356574109],
[0.6336134454, 0.3226890756, 0.04369747899, 0],
[0.6111111111, 0.3540489642, 0.03389830508, 0.0009416195857],
[0.6264, 0.3408, 0.0312, 0.0016],
[0.4953764861, 0.4425363276, 0.06142668428, 0.0006605019815],
[0.2286767097, 0.6176438712, 0.1440338138, 0.009645605289],
[0.2179140367, 0.5914337241, 0.1814547641, 0.009197475203],
[0.1737543523, 0.667523112, 0.1520470645, 0.006675471245],
[0.6547669047, 0.3086938261, 0.03359932801, 0.002939941201],
[0.5681186284, 0.3883225209, 0.04309545876, 0.0004633920297],
[0.6548507463, 0.3041044776, 0.04104477612, 0],
[0.5621761658, 0.3944300518, 0.0414507772, 0.001943005181],
[0.4110807114, 0.5088919289, 0.07455540356, 0.005471956224],
[0.5991091314, 0.3608017817, 0.04008908686, 0],
[0.2681923374, 0.6105682695, 0.118153767, 0.003085626125],
[0.2349903163, 0.64970949, 0.1124596514, 0.002840542285],
[0.2525471648, 0.6223972688, 0.1210903466, 0.003965219865],
[0.4625340599, 0.4529972752, 0.08038147139, 0.00408719346],
[0.229730817, 0.5827440393, 0.1744906443, 0.01303449941],
[0.3976299516, 0.508033843, 0.09159357293, 0.00274263241],
[0.7472527473, 0.2307692308, 0.02197802198, 0],
[0.4429615958, 0.4777454391, 0.07666643004, 0.002626535103],
[0.6039963669, 0.353315168, 0.04178019982, 0.0009082652134],
[0.3648580968, 0.5439065109, 0.08923205342, 0.002003338898],
[0.3316331826, 0.5617614352, 0.1029727926, 0.003632589517],
[0.4545454545, 0.4992117709, 0.04519180242, 0.001050972149],
[0.1685789938, 0.6737864078, 0.1514563107, 0.006178287732],
[0.3457912458, 0.5719977553, 0.08024691358, 0.001964085297],
[0.7222222222, 0.2291666667, 0.03472222222, 0.01388888889],
[0.5236258438, 0.431051109, 0.0450016072, 0.0003214400514],
[0.2700757872, 0.6502997555, 0.07821486683, 0.001409590436],
[0.5241025641, 0.4379487179, 0.03794871795, 0],
[0.2834890966, 0.6165109034, 0.09781931464, 0.002180685358],
[0.1103810775, 0.6872536137, 0.1984231275, 0.00394218134],
[0.2832177065, 0.5838634563, 0.1292219795, 0.003696857671],
[0.3036561753, 0.5684346406, 0.1225938722, 0.005315311895],
[0.5457943925, 0.3912772586, 0.06292834891, 0],
[0.6039087948, 0.3472312704, 0.04690553746, 0.001954397394],
[0.6148148148, 0.3555555556, 0.02962962963, 0],
[0.4948822927, 0.4252814739, 0.07881269191, 0.001023541453],
[0.3029045643, 0.5560165975, 0.1361750283, 0.004903809883],
[0.3659900166, 0.5311813644, 0.1013643927, 0.00146422629],
[0.6107091172, 0.3458755427, 0.04341534009, 0],
[0.2542352269, 0.6182336702, 0.1259014679, 0.001629635035],
[0.5122439893, 0.4414514693, 0.04452359751, 0.0017809439],
[0.1760816611, 0.6750956966, 0.1444534663, 0.004369176043],
[0.2302936631, 0.5819867922, 0.1785864831, 0.009133061683],
[0.151193634, 0.5629236664, 0.2743884468, 0.01149425287],
[0.2625742033, 0.6098084995, 0.1247733539, 0.00284394327],
#DIV/0!
[0.2656987931, 0.5927934231, 0.1385341963, 0.002973587546],
[0.2631578947, 0.5010459376, 0.2178897163, 0.01790645134],
[0.588553751, 0.3766434648, 0.03428718742, 0.0005155968033],
[0.2549361873, 0.5862187375, 0.1557259772, 0.003119098037],
[0.6810631229, 0.2774086379, 0.03986710963, 0.001661129568],
[0.6067073171, 0.3424796748, 0.04979674797, 0.001016260163],
[0.6547826087, 0.3113043478, 0.03217391304, 0.001739130435],
[0.497313891, 0.4343821949, 0.06753645434, 0.0007674597084],
[0.229893617, 0.6144680851, 0.1465957447, 0.009042553191],
[0.2120393812, 0.59302391, 0.1847257384, 0.01021097046],
[0.1768538261, 0.6694385217, 0.1474769012, 0.006230751007],
[0.6780185759, 0.2892525431, 0.03184431667, 0.0008845643521],
[0.5652591171, 0.3886756238, 0.04510556622, 0.0009596928983],
[0.6481069042, 0.3140311804, 0.03786191537, 0],
[0.561582642, 0.390555201, 0.04722399489, 0.0006381620932],
[0.4876604146, 0.4392892399, 0.07008884501, 0.002961500494],
[0.6325167038, 0.3318485523, 0.03340757238, 0.002227171492],
[0.252632868, 0.6201322557, 0.1247856968, 0.002449179525],
[0.2291936738, 0.6637282862, 0.1063002333, 0.0007778065854],
[0.2598283111, 0.6192186405, 0.1176419061, 0.003311142256],
[0.4591492235, 0.4564483457, 0.08372721134, 0.0006752194463],
[0.2416524571, 0.5815447353, 0.1645059139, 0.01229689375],
[0.4117195496, 0.5063971341, 0.08034800409, 0.00153531218],
[0.7792207792, 0.1948051948, 0.02597402597, 0],
[0.4619800333, 0.4701331115, 0.06663893511, 0.001247920133],
[0.6134715026, 0.332642487, 0.05077720207, 0.00310880829],
[0.3729619565, 0.537279212, 0.08772078804, 0.002038043478],
[0.3369506565, 0.5662659632, 0.09450506625, 0.002278314048],
[0.4857954545, 0.4734848485, 0.04071969697, 0],
[0.1875332978, 0.6746581424, 0.1324809093, 0.005327650506],
[0.3730248307, 0.5532731377, 0.07285553047, 0.0008465011287],
[0.6616541353, 0.3233082707, 0.01503759398, 0],
[0.5471894518, 0.4104788341, 0.04163775156, 0.000693962526],
[0.274210832, 0.6470809577, 0.07787384764, 0.0008343626533],
[0.5609756098, 0.3828207847, 0.05408271474, 0.002120890774],
[0.2925126904, 0.6221446701, 0.08343908629, 0.001903553299],
[0.1042584435, 0.7254038179, 0.1703377386, 0],
[0.2941035876, 0.575969789, 0.1268911781, 0.003035445398],
[0.2921323529, 0.5726102941, 0.1295955882, 0.005661764706],
[0.5434782609, 0.4044795784, 0.05072463768, 0.001317523057],
[0.6420097697, 0.3147243545, 0.03977669225, 0.003489183531],
[0.6363636364, 0.3232323232, 0.0404040404, 0],
[0.5115623237, 0.4365482234, 0.0518894529, 0],
[0.3209486166, 0.5403162055, 0.1339920949, 0.004743083004],
[0.3564714892, 0.5469609413, 0.09545359605, 0.001113973404],
[0.6217391304, 0.3420289855, 0.03623188406, 0],
[0.2528377265, 0.6205537345, 0.124859966, 0.001748572987],
[0.5168455203, 0.4377339656, 0.04417269778, 0.001247816321],
[0.1825224667, 0.6703207313, 0.1426247289, 0.004532073133],
[0.2284488723, 0.5804621558, 0.1848623218, 0.006226650062],
[0.1443768997, 0.5604863222, 0.2781155015, 0.0170212766],
[0.251015368, 0.6119362805, 0.1324718799, 0.004576471552],
[0.2774718325, 0.6202494697, 0.1013251863, 0.0009535114519],
[0.2758186398, 0.5915797049, 0.1304426053, 0.002159050018],
[0.2815544261, 0.5195891571, 0.1867852605, 0.01207115629],
[0.5819935691, 0.3829289681, 0.03420052616, 0.0008769365683],
[0.2558979504, 0.5840332521, 0.156227605, 0.00384119249],
[0.656462585, 0.3129251701, 0.02891156463, 0.001700680272],
[0.6349892009, 0.3239740821, 0.04103671706, 0],
[0.6636904762, 0.3095238095, 0.02678571429, 0],
[0.5196078431, 0.4272875817, 0.0522875817, 0.0008169934641],
[0.2276380336, 0.6222732491, 0.1425738441, 0.007514873187],
[0.2137905563, 0.5946644696, 0.18211262, 0.009432354111],
[0.1867056992, 0.6665272647, 0.1407727516, 0.005994284519],
[0.6583908046, 0.3167816092, 0.02436781609, 0.0004597701149],
[0.5509877202, 0.3908168713, 0.05712760278, 0.001067805659],
[0.6914414414, 0.2905405405, 0.01576576577, 0.002252252252],
[0.5690936107, 0.3855869242, 0.04531946508, 0],
[0.5096618357, 0.4371980676, 0.05193236715, 0.001207729469],
[0.6542288557, 0.3084577114, 0.03731343284, 0],
[0.2529245167, 0.6148257604, 0.1289250092, 0.003324713705],
[0.2345263724, 0.6539289559, 0.1103336921, 0.001210979548],
[0.2620632709, 0.6239527963, 0.111042301, 0.002941631832],
[0.4605529332, 0.4659474039, 0.07147673635, 0.0020229265],
[0.2533773256, 0.574887728, 0.1598936853, 0.01184126111],
[0.4155212573, 0.5010366275, 0.08190637078, 0.001535744452],
[0.72, 0.2266666667, 0.05333333333, 0],
[0.4727953259, 0.456636845, 0.06874201205, 0.001825817053],
[0.6103238866, 0.3380566802, 0.04655870445, 0.005060728745],
[0.3648473315, 0.5445895684, 0.08883314592, 0.001729954156],
[0.3440725438, 0.562199217, 0.0917165715, 0.002011667673],
[0.4906024096, 0.4746987952, 0.03421686747, 0.0004819277108],
[0.1871314603, 0.6687478321, 0.1401318071, 0.003988900451],
[0.3842452725, 0.5387154526, 0.07597627839, 0.001062996531],
[0.6585365854, 0.3008130081, 0.0406504065, 0],
[0.5451749327, 0.414071511, 0.04075355632, 0],
[0.2516324958, 0.6545840782, 0.0923622383, 0.001421187664],
[0.5926800472, 0.3683589138, 0.03896103896, 0],
[0.2987421384, 0.6066037736, 0.09213836478, 0.00251572327],
[0.1193633952, 0.7029177719, 0.1777188329, 0],
[0.3007223694, 0.5757765471, 0.1210450277, 0.002456055863],
[0.28736024, 0.5578129261, 0.1467139351, 0.008112898827],
[0.5509790682, 0.3943281567, 0.05401755571, 0.0006752194463],
[0.6513011152, 0.3107806691, 0.03494423792, 0.002973977695],
[0.625, 0.3571428571, 0.01785714286, 0],
]
def plot():
pass
Examples = {
'SchoolScores2': {
'data': school_scores2,
'k': [3, 2, 4],
},
}
| WmHHooper/aima-python | submissions/Flanagin/myKMeans.py | Python | mit | 54,288 | 0.000037 |
from synergine.synergy.event.Action import Action
from tests.src.event.LonelinessSuicideEvent import LonelinessSuicideEvent
from tests.src.event.TooMuchBeansAction import TooMuchBeansAction
class LonelinessSuicideAction(Action):
_listen = LonelinessSuicideEvent
_depend = [TooMuchBeansAction]
def run(self, obj, context, synergy_manager):
obj.get_collection().remove_object(obj) | buxx/synergine | tests/src/event/LonelinessSuicideAction.py | Python | apache-2.0 | 402 | 0.002488 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.