text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import numpy as np
import numpy.random as rng
import theano
import theano.tensor as T
from theano.tensor.nnet import conv2d
minibatch = 3
image_height,image_width = 28,28
filter_height,filter_width = 3,3
n_filters = 1
n_channels = 1
n = 1/(np.sqrt(image_height*image_width))
X = T.tensor4(name='X')
X_shape = (minibatch,n_channels,image_height,image_width)
W_shape = (n_filters,n_channels,filter_height,filter_width)
W = theano.shared(n*rng.randn(*W_shape),name='W')
conv_out = conv2d(X,
W,
input_shape=X_shape,
filter_shape=W_shape,
border_mode='valid')
f = theano.function([X],[conv_out])
X_data = np.array(rng.randint(low=0,high=256,size=X_shape))
conv_out = f(X_data)
|
nzufelt/theano_nn
|
min_work_ex.py
|
Python
|
mit
| 757 | 0.029062 |
from django.core.urlresolvers import get_resolver
from django.http import HttpResponse
class HandlerRegistry(dict):
def __init__(self):
self.maxlength = 0
def __setitem__(self, name, value):
self.maxlength = max(self.maxlength, len(str(value)))
super(HandlerRegistry, self).__setitem__(name, value)
def register(self, handler):
self[handler] = None
def sync_urls(self):
resolver = get_resolver(None)
reverse = resolver.reverse_dict
for h in self:
if not self[h]:
# tied to current django url storage
urltuple = reverse[h][0][0]
args = dict((name, '<%s>' % name) for name in urltuple[1])
url = urltuple[0] % args
self[h] = url
registry = HandlerRegistry()
def docs(request):
registry.sync_urls()
output = []
format = '%%-%ss\n%%s\n\n' % registry.maxlength
paramformatsrc = '\t%%-%ss - %%s\n'
for handler, url in registry.items():
try:
paramlength = max(map(len, handler.params.keys()))
paramformat = paramformatsrc % paramlength
params = ''.join(paramformat % (k, v) for k, v
in handler.params.items())
except ValueError:
params = ''
if handler.__doc__:
doc = '\t%s\n\n%s' % (handler.__doc__.strip(), params)
else:
doc = params
output.append(format % (url, doc))
return HttpResponse(''.join(output), mimetype='text/plain')
|
j2a/django-simprest
|
simprest/docs.py
|
Python
|
bsd-3-clause
| 1,557 | 0.001927 |
callback_classes = [
['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
letiangit/802.11ah-ns3
|
src/energy/bindings/callbacks_list.py
|
Python
|
gpl-2.0
| 641 | 0.00624 |
#!/usr/bin/python
import requests
from bs4 import BeautifulSoup
class CoverGrabber:
def __init__(self, url=None):
if url is None:
self.url = 'http://www.amazon.com/s/ref=nb_sb_noss_2?url=search-alias=aps&field-keywords=cd'
else:
self.url = url
def request_service(self, keyword):
complete_url = "%s %s" % (self.url, keyword)
html = requests.get(complete_url)
soup = BeautifulSoup(html.text)
return soup
def grab(self, keyword):
try:
soup = self.request_service(keyword)
image = soup.find_all("img",
{"class": "s-access-image"})[0].get('src')
return image
except:
return None
if __name__ == "__main__":
print "Grab CD Cover from Amazon"
cover_grabber = CoverGrabber()
cover = cover_grabber.grab('Black ice')
if cover is None:
print "Error"
else:
print "Cover : %s" % cover
|
Ganapati/DjangoZik
|
infos_grabber/coverGrabber.py
|
Python
|
gpl-2.0
| 998 | 0.002004 |
# -*- coding: utf-8 -*-
from functools import partial
from types import NoneType
from navmazing import NavigateToSibling, NavigateToAttribute
from cfme.exceptions import DestinationNotFound
from cfme.fixtures import pytest_selenium as sel
from cfme.provisioning import provisioning_form as request_form
from cfme.web_ui import (
Form, Select, Table, accordion, fill, paginator,
flash, form_buttons, tabstrip, DHTMLSelect, Input, Tree, AngularSelect,
BootstrapTreeview, toolbar as tb, match_location, CheckboxTable)
from utils import version, fakeobject_or_object
from utils.appliance import Navigatable
from utils.appliance.implementations.ui import CFMENavigateStep, navigate_to, navigator
from utils.update import Updateable
from utils.pretty import Pretty
from utils.version import current_version
cfg_btn = partial(tb.select, "Configuration")
policy_btn = partial(tb.select, "Policy")
accordion_tree = partial(accordion.tree, "Catalog Items")
dynamic_tree = Tree("//div[@id='basic_info_div']//ul[@class='dynatree-container']")
entry_tree = BootstrapTreeview('automate_treebox')
listview_table = CheckboxTable(table_locator='//div[@id="list_grid"]/table')
template_select_form = Form(
fields=[
('template_table', Table('//div[@id="prov_vm_div"]/table')),
('add_button', form_buttons.add),
('cancel_button', form_buttons.cancel)
]
)
# Forms
basic_info_form = Form(
fields=[
('name_text', Input("name")),
('description_text', Input("description")),
('display_checkbox', Input("display")),
('select_catalog', AngularSelect('catalog_id')),
('select_dialog', AngularSelect('dialog_id')),
('select_orch_template', AngularSelect('template_id')),
('select_provider', AngularSelect('manager_id')),
('select_config_template', AngularSelect('template_id')),
('field_entry_point', Input("fqname")),
('retirement_entry_point', Input("retire_fqname")),
('edit_button', form_buttons.save),
('apply_btn', '//a[normalize-space(.)="Apply"]')
])
# TODO: Replace with Taggable
edit_tags_form = Form(
fields=[
("select_tag", AngularSelect('tag_cat')),
("select_value", AngularSelect('tag_add'))
])
detail_form = Form(
fields=[
('long_desc', Input('long_description')),
])
resources_form = Form(
fields=[
('choose_resource', Select("//select[@id='resource_id']")),
('add_button', form_buttons.add),
('save_button', form_buttons.save)
])
button_group_form = Form(
fields=[
('btn_group_text', Input("name")),
('btn_group_hvr_text', Input("description")),
('add_button', form_buttons.add)
])
button_form = Form(
fields=[
('btn_text', Input("name")),
('btn_hvr_text', Input("description")),
('select_dialog', Select("//select[@id='dialog_id']")),
('system_process', Select("//select[@id='instance_name']")),
('request', Input("object_request")),
('add_button', form_buttons.add)
])
match_page = partial(match_location, title='Catalogs', controller='catalog')
class CatalogItem(Updateable, Pretty, Navigatable):
pretty_attrs = ['name', 'item_type', 'catalog', 'catalog_name', 'provider', 'domain']
def __init__(self, item_type=None, vm_name=None, name=None, description=None,
display_in=False, catalog=None, dialog=None,
catalog_name=None, orch_template=None, provider_type=None,
provider=None, config_template=None, prov_data=None, domain="ManageIQ (Locked)",
appliance=None):
self.item_type = item_type
self.vm_name = vm_name
self.name = name
self.description = description
self.display_in = display_in
self.catalog = catalog
self.dialog = dialog
self.catalog_name = catalog_name
self.orch_template = orch_template
self.provider = provider
self.config_template = config_template
self.provider_type = provider_type
self.provisioning_data = prov_data
self.domain = domain
Navigatable.__init__(self, appliance=appliance)
def __str__(self):
return self.name
def create(self):
# Create has sequential forms, the first is only the provider type
navigate_to(self, 'Add')
# For element not found exception (To be removed)
sel.sleep(5)
sel.select("//select[@id='st_prov_type']",
self.provider_type or self.item_type or 'Generic')
sel.wait_for_element(basic_info_form.name_text)
catalog = fakeobject_or_object(self.catalog, "name", "Unassigned")
dialog = fakeobject_or_object(self.dialog, "name", "No Dialog")
# Need to provide the (optional) provider name to the form, not the object
provider_name = None
provider_required_types = ['AnsibleTower', 'Orchestration']
if self.item_type in provider_required_types \
or self.provider_type in provider_required_types:
provider_name = self.provider.name
# For tests where orchestration template is None
orch_template = None
if self.orch_template:
orch_template = self.orch_template.template_name
fill(basic_info_form, {'name_text': self.name,
'description_text': self.description,
'display_checkbox': self.display_in,
'select_catalog': catalog.name,
'select_dialog': dialog.name,
'select_orch_template': orch_template,
'select_provider': provider_name,
'select_config_template': self.config_template})
if not (self.item_type in provider_required_types):
sel.click(basic_info_form.field_entry_point)
if version.current_version() < "5.7":
dynamic_tree.click_path("Datastore", self.domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
else:
entry_tree.click_path("Datastore", self.domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
sel.click(basic_info_form.apply_btn)
if version.current_version() >= "5.7" and self.item_type == "AnsibleTower":
sel.click(basic_info_form.retirement_entry_point)
entry_tree.click_path("Datastore", self.domain, "Service", "Retirement",
"StateMachines", "ServiceRetirement", "Generic")
sel.click(basic_info_form.apply_btn)
if self.catalog_name is not None \
and self.provisioning_data is not None \
and not isinstance(self.provider, NoneType):
tabstrip.select_tab("Request Info")
tabstrip.select_tab("Catalog")
template = template_select_form.template_table.find_row_by_cells({
'Name': self.catalog_name,
'Provider': self.provider.name
})
sel.click(template)
request_form.fill(self.provisioning_data)
sel.click(template_select_form.add_button)
def update(self, updates):
navigate_to(self, 'Edit')
fill(basic_info_form, {'name_text': updates.get('name', None),
'description_text':
updates.get('description', None)},
action=basic_info_form.edit_button)
flash.assert_success_message('Service Catalog Item "{}" was saved'.format(self.name))
def delete(self, from_dest='All'):
if from_dest in navigator.list_destinations(self):
navigate_to(self, from_dest)
else:
msg = 'cfme.services.catalogs.catalog_item does not have destination {}'\
.format(from_dest)
raise DestinationNotFound(msg)
if from_dest == 'All':
# select the row for deletion
listview_table.select_row_by_cells({'Name': self.name,
'Description': self.description})
cfg_btn(version.pick({version.LOWEST: 'Remove Items from the VMDB',
'5.7': 'Remove Catalog Items'}), invokes_alert=True)
if from_dest == 'Details':
cfg_btn(version.pick({version.LOWEST: 'Remove Item from the VMDB',
'5.7': 'Remove Catalog Item'}), invokes_alert=True)
sel.handle_alert()
flash.assert_success_message(version.pick(
{version.LOWEST: 'The selected 1 Catalog Item were deleted',
'5.7': 'The selected 1 Catalog Item was deleted'}))
def add_button_group(self):
navigate_to(self, 'Details')
cfg_btn("Add a new Button Group", invokes_alert=True)
sel.wait_for_element(button_group_form.btn_group_text)
fill(button_group_form, {'btn_group_text': "group_text",
'btn_group_hvr_text': "descr"})
if current_version() > "5.5":
select = AngularSelect("button_image")
select.select_by_visible_text("Button Image 1")
else:
select = DHTMLSelect("div#button_div")
select.select_by_value(1)
sel.click(button_group_form.add_button)
flash.assert_success_message('Buttons Group "descr" was added')
def add_button(self):
navigate_to(self, 'Details')
cfg_btn('Add a new Button', invokes_alert=True)
sel.wait_for_element(button_form.btn_text)
fill(button_form, {'btn_text': "btn_text",
'btn_hvr_text': "btn_descr"})
if current_version() > "5.5":
select = AngularSelect("button_image")
select.select_by_visible_text("Button Image 1")
else:
select = DHTMLSelect("div#button_div")
select.select_by_value(2)
fill(button_form, {'select_dialog': self.dialog,
'system_process': "Request",
'request': "InspectMe"})
sel.click(button_form.add_button)
flash.assert_success_message('Button "btn_descr" was added')
def edit_tags(self, tag, value):
navigate_to(self, 'Details')
policy_btn('Edit Tags', invokes_alert=True)
fill(edit_tags_form, {'select_tag': tag,
'select_value': value},
action=form_buttons.save)
flash.assert_success_message('Tag edits were successfully saved')
class CatalogBundle(Updateable, Pretty, Navigatable):
pretty_attrs = ['name', 'catalog', 'dialog']
def __init__(self, name=None, description=None, display_in=None, catalog=None, dialog=None,
appliance=None):
self.name = name
self.description = description
self.display_in = display_in
self.catalog = catalog
self.dialog = dialog
Navigatable.__init__(self, appliance=appliance)
def __str__(self):
return self.name
def create(self, cat_items):
navigate_to(self, 'Add')
domain = "ManageIQ (Locked)"
fill(basic_info_form, {'name_text': self.name,
'description_text': self.description,
'display_checkbox': self.display_in,
'select_catalog': str(self.catalog),
'select_dialog': str(self.dialog)})
sel.click(basic_info_form.field_entry_point)
if sel.text(basic_info_form.field_entry_point) == "":
if version.current_version() < "5.7":
dynamic_tree.click_path("Datastore", domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
else:
entry_tree.click_path("Datastore", domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
sel.click(basic_info_form.apply_btn)
tabstrip.select_tab("Resources")
for cat_item in cat_items:
fill(resources_form, {'choose_resource': cat_item})
sel.click(resources_form.add_button)
flash.assert_success_message('Catalog Bundle "{}" was added'.format(self.name))
def update(self, updates):
navigate_to(self, 'Edit')
fill(basic_info_form, {'name_text': updates.get('name', None),
'description_text':
updates.get('description', None)})
tabstrip.select_tab("Resources")
fill(resources_form, {'choose_resource':
updates.get('cat_item', None)},
action=resources_form.save_button)
flash.assert_success_message('Catalog Bundle "{}" was saved'.format(self.name))
@navigator.register(CatalogItem, 'All')
class ItemAll(CFMENavigateStep):
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def am_i_here(self):
return match_page(summary='All Service Catalog Items')
def step(self):
self.prerequisite_view.navigation.select('Services', 'Catalogs')
tree = accordion.tree('Catalog Items')
tree.click_path('All Catalog Items')
def resetter(self):
tb.refresh()
tb.select('List View')
# Ensure no rows are checked
if paginator.page_controls_exist():
sel.check(paginator.check_all())
sel.uncheck(paginator.check_all())
@navigator.register(CatalogItem, 'Details')
class ItemDetails(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
# No am_i_here() due to summary duplication between item and bundle
def step(self):
listview_table.click_row_by_cells({'Name': self.obj.name,
'Description': self.obj.description,
'Type': 'Item'})
def resetter(self):
tb.refresh()
@navigator.register(CatalogItem, 'Add')
class ItemAdd(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
def am_i_here(self):
return match_page(summary='Adding a new Service Catalog Item')
def step(self):
cfg_btn('Add a New Catalog Item')
@navigator.register(CatalogItem, 'Edit')
class ItemEdit(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def am_i_here(self):
return match_page(summary='Editing Service Catalog Item "{}"'.format(self.obj.name))
def step(self):
cfg_btn('Edit this Item')
@navigator.register(CatalogBundle, 'All')
class BundleAll(CFMENavigateStep):
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def am_i_here(self):
return match_page(summary='All Service Catalog Items')
def step(self):
self.prerequisite_view.navigation.select('Services', 'Catalogs')
tree = accordion.tree('Catalog Items')
tree.click_path('All Catalog Items')
def resetter(self):
tb.refresh()
tb.select('List View')
# Ensure no rows are checked
if paginator.page_controls_exist():
sel.check(paginator.check_all())
sel.uncheck(paginator.check_all())
@navigator.register(CatalogBundle, 'Details')
class BundleDetails(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
# No am_i_here() due to summary duplication between item and bundle
def step(self):
listview_table.click_row_by_cells({'Name': self.obj.name,
'Description': self.obj.description,
'Type': 'Bundle'})
def resetter(self):
tb.refresh()
@navigator.register(CatalogBundle, 'Add')
class BundleAdd(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
def am_i_here(self):
return match_page(summary='Adding a new Catalog Bundle')
def step(self):
cfg_btn('Add a New Catalog Bundle')
@navigator.register(CatalogBundle, 'Edit')
class BundleEdit(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def am_i_here(self):
return match_page(summary='Editing Catalog Bundle "{}"'.format(self.obj.name))
def step(self):
cfg_btn('Edit this Item')
|
rananda/cfme_tests
|
cfme/services/catalogs/catalog_item.py
|
Python
|
gpl-2.0
| 16,459 | 0.001701 |
# Taken from here: https://stackoverflow.com/questions/50566934/why-is-this-singleton-implementation-not-thread-safe
import functools
import threading
lock = threading.Lock()
def synchronized(lock):
""" Synchronization decorator """
def wrapper(f):
@functools.wraps(f)
def inner_wrapper(*args, **kw):
with lock:
return f(*args, **kw)
return inner_wrapper
return wrapper
class SingletonOptimized(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._locked_call(*args, **kwargs)
return cls._instances[cls]
@synchronized(lock)
def _locked_call(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(SingletonOptimized, cls).__call__(*args, **kwargs)
|
cinepost/Copperfield_FX
|
copper/core/utils/singleton.py
|
Python
|
unlicense
| 849 | 0.003534 |
from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'^$', 'boar.accounts.views.accounts', name='accounts'),
url(r'^signout/$', 'boar.accounts.views.signout', name='accounts_signout'),
url(r'^settings/$', 'boar.accounts.views.settings_form', name='accounts_settings'),
url(r'^mailing-lists/unsubscribe/(?P<user_id>\d+)-(?P<mailing_list_id>\d+)-(?P<token>.+)/$',
'boar.mailing_lists.views.unsubscribe',
name='accounts_mailing_lists_unsubscribe'),
url(r'^user-data/$', 'boar.accounts.views.user_data'),
)
|
boar/boar
|
boar/accounts/urls.py
|
Python
|
bsd-3-clause
| 562 | 0.010676 |
#!/usr/bin/env python
# encoding: utf-8
from conftests import *
from rurouni.exceptions import *
from rurouni.types import *
from rurouni import Database, Column, Table
def test_insert_errors(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
with pytest.raises(UnknownField):
id = Client.insert(name="John", last_name="Doe")
assert 'id' not in locals()
db.destroy()
def test_insert(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
name1 = "John"
birthdate1 = randomDate()
name2 = "Jack"
birthdate2 = randomDate()
name3 = "Bob"
birthdate3 = randomDate()
c1 = Client.insert(name=name1, birthdate=birthdate1)
c2 = Client.insert(name=name2, birthdate=birthdate2)
c3 = Client.insert(name=name3, birthdate=birthdate3)
assert c1.id == 1
assert c1.name == name1
assert c1.birthdate == birthdate1
assert c2.id == 2
assert c2.name == name2
assert c2.birthdate == birthdate2
assert c3.id == 3
assert c3.name == name3
assert c3.birthdate == birthdate3
db.destroy()
def test_insert_many_errors(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
with pytest.raises(InvalidData):
Client.insert_many() == []
with pytest.raises(InvalidData):
Client.insert_many(None, None)
with pytest.raises(InvalidData):
Client.insert_many({})
with pytest.raises(UnknownField):
Client.insert_many({'n':'err'})
data = [
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
]
Client.insert_many(*data)
for i, data in enumerate(data, 1):
c = Client(i)
assert c.name == data['name']
assert c.birthdate == data['birthdate']
db.destroy()
def test_hasId(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
assert not Client.has(1)
assert not (1 in Client)
c = Client.insert(name='John', birthdate=randomDate())
assert c.id == 1
assert Client.has(1)
assert 1 in Client
db.destroy()
def test_delete(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
data = [
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
]
Client.insert_many(*data)
assert Client.has(1) == True
assert Client.has(2) == True
assert Client.has(3) == True
Client.delete(2)
assert Client.has(1) == True
assert Client.has(2) == False
assert Client.has(3) == True
with pytest.raises(InvalidId):
Client.delete(None)
Client.delete_all()
assert Client.has(1) == False
assert Client.has(2) == False
assert Client.has(3) == False
assert Client.isEmpty()
db.destroy()
def test_delete_many(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
data = [
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
]
Client.insert_many(*data)
Client.delete_many([1, 3, 5])
assert Client.has(1) == False
assert Client.has(2) == True
assert Client.has(3) == False
assert Client.has(4) == True
assert Client.has(5) == False
assert Client.has(6) == True
assert len(Client) == 3
db.destroy()
def test_iter(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
data = [
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
]
Client.insert_many(*data)
# Iterate using Client.all()
count = 0
for (n, c) in enumerate(Client.all()):
vals = data[n]
assert c.id == n + 1
assert c.name == vals['name']
assert c.birthdate == vals['birthdate']
count += 1
assert count == 3
# Iterate using Client.__iter__
count = 0
for (n, c) in enumerate(Client):
vals = data[n]
assert c.id == n + 1
assert c.name == vals['name']
assert c.birthdate == vals['birthdate']
count += 1
assert count == 3
db.destroy()
def test_count(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
assert Client.count() == 0
assert len(Client) == 0
Client.insert(name='John', birthdate=randomDate())
assert Client.count() == 1
assert len(Client) == 1
Client.insert(name='Jack', birthdate=randomDate())
assert Client.count() == 2
assert len(Client) == 2
Client.insert(name='Bob', birthdate=randomDate())
assert Client.count() == 3
assert len(Client) == 3
db.destroy()
def test_empty(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
assert Client.isEmpty()
Client.insert(name='John', birthdate=randomDate())
assert not Client.isEmpty()
db.destroy()
|
magnunleno/Rurouni
|
tests/test_basic_operations.py
|
Python
|
gpl-3.0
| 5,681 | 0.010033 |
# -*- coding: utf-8 -*-
"""
Clement Michard (c) 2015
"""
import os
import sys
import nltk
from emotion import Emotion
from nltk.corpus import WordNetCorpusReader
import xml.etree.ElementTree as ET
class WNAffect:
"""WordNet-Affect ressource."""
def __init__(self, wordnet16_dir, wn_domains_dir):
"""Initializes the WordNet-Affect object."""
try:
cwd = os.getcwd()
nltk.data.path.append(cwd)
wn16_path = "{0}/dict".format(wordnet16_dir)
self.wn16 = WordNetCorpusReader(os.path.abspath("{0}/{1}".format(cwd, wn16_path)), nltk.data.find(wn16_path))
self.flat_pos = {'NN':'NN', 'NNS':'NN', 'JJ':'JJ', 'JJR':'JJ', 'JJS':'JJ', 'RB':'RB', 'RBR':'RB', 'RBS':'RB', 'VB':'VB', 'VBD':'VB', 'VGB':'VB', 'VBN':'VB', 'VBP':'VB', 'VBZ':'VB'}
self.wn_pos = {'NN':self.wn16.NOUN, 'JJ':self.wn16.ADJ, 'VB':self.wn16.VERB, 'RB':self.wn16.ADV}
self._load_emotions(wn_domains_dir)
self.synsets = self._load_synsets(wn_domains_dir)
except:
print "Please download the dependencies and re-run the script after installing them successfully. Exiting !"
exit()
def _load_synsets(self, wn_domains_dir):
"""Returns a dictionary POS tag -> synset offset -> emotion (str -> int -> str)."""
tree = ET.parse("{0}/wn-affect-1.1/a-synsets.xml".format(wn_domains_dir))
root = tree.getroot()
pos_map = { "noun": "NN", "adj": "JJ", "verb": "VB", "adv": "RB" }
synsets = {}
for pos in ["noun", "adj", "verb", "adv"]:
tag = pos_map[pos]
synsets[tag] = {}
for elem in root.findall(".//{0}-syn-list//{0}-syn".format(pos, pos)):
offset = int(elem.get("id")[2:])
if not offset: continue
if elem.get("categ"):
synsets[tag][offset] = Emotion.emotions[elem.get("categ")] if elem.get("categ") in Emotion.emotions else None
elif elem.get("noun-id"):
synsets[tag][offset] = synsets[pos_map["noun"]][int(elem.get("noun-id")[2:])]
return synsets
def _load_emotions(self, wn_domains_dir):
"""Loads the hierarchy of emotions from the WordNet-Affect xml."""
tree = ET.parse("{0}/wn-affect-1.1/a-hierarchy.xml".format(wn_domains_dir))
root = tree.getroot()
for elem in root.findall("categ"):
name = elem.get("name")
if name == "root":
Emotion.emotions["root"] = Emotion("root")
else:
Emotion.emotions[name] = Emotion(name, elem.get("isa"))
def get_emotion(self, word, pos):
"""Returns the emotion of the word.
word -- the word (str)
pos -- part-of-speech (str)
"""
if pos in self.flat_pos:
pos = self.flat_pos[pos]
synsets = self.wn16.synsets(word, self.wn_pos[pos])
if synsets:
offset = synsets[0].offset
if offset in self.synsets[pos]:
return self.synsets[pos][offset], offset
return None
def get_emotion_synset(self, offset):
"""Returns the emotion of the synset.
offset -- synset offset (int)
"""
for pos in self.flat_pos.values():
if offset in self.synsets[pos]:
return self.synsets[pos][offset]
return None
|
Arnukk/TDS
|
wnaffect.py
|
Python
|
mit
| 3,540 | 0.012994 |
from _pydev_imps._pydev_saved_modules import threading
def wrapper(fun):
def pydev_after_run_call():
pass
def inner(*args, **kwargs):
fun(*args, **kwargs)
pydev_after_run_call()
return inner
def wrap_attr(obj, attr):
t_save_start = getattr(obj, attr)
setattr(obj, attr, wrapper(t_save_start))
obj._pydev_run_patched = True
class ObjectWrapper(object):
def __init__(self, obj):
self.wrapped_object = obj
try:
import functools
functools.update_wrapper(self, obj)
except:
pass
def __getattr__(self, attr):
orig_attr = getattr(self.wrapped_object, attr) #.__getattribute__(attr)
if callable(orig_attr):
def patched_attr(*args, **kwargs):
self.call_begin(attr)
result = orig_attr(*args, **kwargs)
self.call_end(attr)
if result == self.wrapped_object:
return self
return result
return patched_attr
else:
return orig_attr
def call_begin(self, attr):
pass
def call_end(self, attr):
pass
def __enter__(self):
self.call_begin("__enter__")
self.wrapped_object.__enter__()
self.call_end("__enter__")
def __exit__(self, exc_type, exc_val, exc_tb):
self.call_begin("__exit__")
self.wrapped_object.__exit__(exc_type, exc_val, exc_tb)
def factory_wrapper(fun):
def inner(*args, **kwargs):
obj = fun(*args, **kwargs)
return ObjectWrapper(obj)
return inner
def wrap_threads():
# TODO: add wrappers for thread and _thread
# import _thread as mod
# print("Thread imported")
# mod.start_new_thread = wrapper(mod.start_new_thread)
import threading
threading.Lock = factory_wrapper(threading.Lock)
threading.RLock = factory_wrapper(threading.RLock)
# queue patching
try:
import queue # @UnresolvedImport
queue.Queue = factory_wrapper(queue.Queue)
except:
import Queue
Queue.Queue = factory_wrapper(Queue.Queue)
|
SlicerRt/SlicerDebuggingTools
|
PyDevRemoteDebug/ptvsd-4.1.3/ptvsd/_vendored/pydevd/pydevd_concurrency_analyser/pydevd_thread_wrappers.py
|
Python
|
bsd-3-clause
| 2,233 | 0.002239 |
# -*- coding: utf-8 -*-
__author__ = 'hal9000'
__all__ = ['Cache', 'CacheServer']
import socket
import time
import json
import hashlib
from sqlite3 import dbapi2 as sqlite
import xbmc
import log
import gui
import system
SOCKET = '127.0.0.1', 59999
CLEAR = 60*60*24 # 1 day
class SQL:
def __init__(self, name, version):
self.fs = system.FS('cache')
if self.fs.exists('sandbox://' + name + '.sqlite'):
self.con = sqlite.connect(self.fs('sandbox://' + name + '.sqlite'))
else:
self.con = sqlite.connect(self.fs('sandbox://' + name + '.sqlite'))
self.sql_set('pragma auto_vacuum=1')
self.sql_set('create table meta(data text)')
self.sql_set('insert into meta(data) values(?)', (json.dumps({'version': version, 'timeout': int(time.time()) + CLEAR}),))
self.sql_set('create table cache(token varchar(32) unique, expire integer, data text)')
self.sql_set('create index dataindex on cache(expire)')
self.meta_load()
def health(self, version):
if self.meta['version'] != version:
self.meta_save('version', version)
self.clear()
elif self.meta['timeout'] < int(time.time()):
self.sql_set('delete from cache where expire<?', (int(time.time()), ))
self.meta_save('timeout', int(time.time()) + CLEAR)
def get(self, token):
return self.sql_get('select data from cache where token=? and expire>? limit 1', (hashlib.md5(str(token)).hexdigest(), int(time.time())))
def set(self, token, expire, data):
try:
jsdata = json.dumps(data)
except:
pass
else:
self.sql_set('replace into cache(token,expire,data) values(?,?,?)', (hashlib.md5(str(token)).hexdigest(), int(time.time()) + expire, jsdata))
def clear(self):
self.sql_set('delete from cache')
self.meta_save('timeout', int(time.time()) + CLEAR)
# Private
def sql_get(self, sql, *args):
cur = self.con.cursor()
cur.execute(sql, *args)
rows = cur.fetchall()
cur.close()
try:
return json.loads(rows[0][0])
except:
return None
def sql_set(self, sql, *args):
cur = self.con.cursor()
cur.execute(sql, *args)
self.con.commit()
cur.close()
def meta_load(self):
self.meta = self.sql_get('select data from meta')
if not self.meta:
self.meta = {'version': '', 'timeout': 0}
def meta_save(self, key, value):
self.meta[key] = value
self.sql_set('update meta set data=?', (json.dumps(self.meta),))
class Base:
def recv(self, sock):
data = ''
length = ''
idle = time.time()
while True:
try:
if isinstance(length, basestring):
c = sock.recv(1)
if c == '.':
length = int(length)
else:
length += c
else:
data = sock.recv(length - len(data))
except socket.error, e:
if not e.errno in (10035, 35):
self.log('Recive', repr(e))
if e.errno in (22,):
self.log('Socket error 22')
return None
if idle + 10 < time.time():
self.log('Timeout')
return None
else:
if not isinstance(length, basestring) and len(data) == length:
try:
return json.loads(data)
except Exception, e:
self.log('JSON', repr(e))
return None
def send(self, sock, data):
try:
jsdata = json.dumps(data)
except:
jsdata = 'null'
sock.send(str(len(jsdata)) + '.' + jsdata)
def log(self, *args):
log.error(str(self.__class__.__name__), *args)
class Cache(Base):
def __init__(self, name, version=None):
self.name = str(name).strip()
self.version = str(version).strip()
def call(self, token, fun, *args, **kwargs):
cache = self._call([1, token])
if cache is not None:
return cache
res = fun(*args, **kwargs)
if res is None:
return None
else:
if isinstance(res, tuple) and len(res) == 2 and isinstance(res[0], int):
self._call([2, token, res[0], res[1]])
return res[1]
else:
return res
def clear(self):
self._call('clear')
def _call(self, data):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect(SOCKET)
except socket.error, e:
if e.errno in (111,):
self.log("CacheServer isn't running")
else:
self.log('Connect', repr(e))
return None
except:
return None
else:
self.send(sock, [self.name, self.version] + data)
r = self.recv(sock)
sock.close()
return r
class CacheServer(Base):
def __init__(self):
self.sql = {}
def run(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(SOCKET)
except Exception, e:
self.log('Bind', repr(e))
gui.message('Failed to start CacheServer. Check log.')
else:
sock.listen(1)
sock.setblocking(0)
idle = time.time()
while not xbmc.abortRequested:
try:
(client, address) = sock.accept()
except socket.error, e:
if e.errno == 11 or e.errno == 10035 or e.errno == 35:
if idle + 3 < time.time():
time.sleep(0.5)
continue
self.log('Accept', repr(e))
continue
except:
continue
else:
self.send(client, self.command(self.recv(client)))
idle = time.time()
sock.close()
def command(self, data):
if not data or not isinstance(data, list) or len(data) < 3 or data[2] not in (1, 2, 3):
return None
sql = self.open(data[0], data[1])
if not sql:
return None
if data[2] == 1 and len(data) == 4 and isinstance(data[3], basestring):
return sql.get(data[3])
elif data[2] == 2 and len(data) == 6 and isinstance(data[3], basestring) and isinstance(data[4], int):
sql.set(data[3], data[4], data[5])
return 1
elif data[2] == 3:
sql.clear()
return 1
return None
def open(self, db, version):
name = str(db).strip()
if not name:
return None
ver = str(version).strip()
if db not in self.sql:
self.sql[db] = SQL(db, ver)
self.sql[db].health(ver)
return self.sql[db]
|
chimkentec/KodiMODo_rep
|
script.module.xbmcup/lib/xbmcup/cache.py
|
Python
|
gpl-3.0
| 7,268 | 0.004403 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0013_merge'),
]
operations = [
migrations.AlterField(
model_name='user',
name='public',
field=models.BooleanField(default=True, help_text=b'Determines whether or not your profile is open to the public'),
preserve_default=True,
),
]
|
joshsamara/game-website
|
core/migrations/0014_auto_20150413_1639.py
|
Python
|
mit
| 496 | 0.002016 |
#!/usr/bin/env python
"""
@file costFunctionChecker.py
@author Michael Behrisch
@author Daniel Krajzewicz
@author Jakob Erdmann
@date 2009-08-31
@version $Id: costFunctionChecker.py 13811 2013-05-01 20:31:43Z behrisch $
Run duarouter repeatedly and simulate weight changes via a cost function.
SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/
Copyright (C) 2009-2013 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""
import os, sys, subprocess, types
from datetime import datetime
from optparse import OptionParser
from xml.sax import make_parser, handler
def call(command, log):
if not isinstance(args, types.StringTypes):
command = [str(c) for c in command]
print >> log, "-" * 79
print >> log, command
log.flush()
retCode = subprocess.call(command, stdout=log, stderr=log)
if retCode != 0:
print >> sys.stderr, "Execution of %s failed. Look into %s for details." % (command, log.name)
sys.exit(retCode)
def writeRouteConf(step, options, file, output):
fd = open("iteration_" + str(step) + ".duarcfg", "w")
print >> fd, """<configuration>
<input>
<net-file value="%s"/>""" % options.net
if step==0:
if options.flows:
print >> fd, ' <flow-definition value="%s"/>' % file
else:
print >> fd, ' <trip-defs value="%s"/>' % file
else:
print >> fd, ' <alternatives value="%s"/>' % file
print >> fd, ' <weights value="dump_%s_%s.xml"/>' % (step-1, options.aggregation)
print >> fd, """ </input>
<output>
<output-file value="%s"/>
<exit-times value="True"/>
</output>""" % output
print >> fd, """ <processing>
<continue-on-unbuild value="%s"/>
<expand-weights value="True"/>
<gBeta value="%s"/>
<gA value="%s"/>
</processing>""" % (options.continueOnUnbuild, options.gBeta, options.gA)
print >> fd, ' <random_number><abs-rand value="%s"/></random_number>' % options.absrand
print >> fd, ' <time><begin value="%s"/>' % options.begin,
if options.end:
print >> fd, '<end value="%s"/>' % options.end,
print >> fd, """</time>
<report>
<verbose value="%s"/>
<suppress-warnings value="%s"/>
</report>
</configuration>""" % (options.verbose, options.noWarnings)
fd.close()
class RouteReader(handler.ContentHandler):
def __init__(self):
self._edgeWeights = {}
self._maxDepart = 0
def startElement(self, name, attrs):
if name == 'route':
for edge in attrs['edges'].split():
if not edge in self._edgeWeights:
self._edgeWeights[edge] = 0
self._edgeWeights[edge] += 1
elif name == 'vehicle':
if float(attrs['depart']) > self._maxDepart:
self._maxDepart = float(attrs['depart'])
def getWeight(self, edge):
return self._edgeWeights.get(edge, 0)
def getMaxDepart(self):
return self._maxDepart
class NetReader(handler.ContentHandler):
def __init__(self):
self._edges = []
def startElement(self, name, attrs):
if name == 'edge':
if not attrs.has_key('function') or attrs['function'] == 'normal':
self._edges.append(attrs['id'])
def getEdges(self):
return self._edges
def identity(edge, weight):
return weight
def generateWeights(step, options, edges, weights, costFunction):
fd = open("dump_%s_%s.xml" % (step, options.aggregation), "w")
print >> fd, '<?xml version="1.0"?>\n<netstats>'
for time in range(0, int(reader.getMaxDepart()+1), options.aggregation):
print >> fd, ' <interval begin="%s" end="%s" id="dump_%s">' % (time, time + options.aggregation, options.aggregation)
for edge in edges:
cost = costFunction(edge, weights.getWeight(edge))
if cost != None:
print >> fd, ' <edge id="%s" traveltime="%s"/>' % (edge, cost)
print >> fd, ' </interval>'
print >> fd, '</netstats>'
fd.close()
optParser = OptionParser()
optParser.add_option("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="tell me what you are doing")
optParser.add_option("-C", "--continue-on-unbuild", action="store_true", dest="continueOnUnbuild",
default=False, help="continues on unbuild routes")
optParser.add_option("-w", "--disable-warnings", action="store_true", dest="noWarnings",
default=False, help="disables warnings")
optParser.add_option("-n", "--net-file", dest="net",
help="SUMO network (mandatory)", metavar="FILE")
optParser.add_option("-t", "--trips", dest="trips",
help="trips in step 0 (this or flows is mandatory)", metavar="FILE")
optParser.add_option("-F", "--flows",
help="flows in step 0 (this or trips is mandatory)", metavar="FILE")
optParser.add_option("-+", "--additional", dest="additional",
default="", help="Additional files")
optParser.add_option("-b", "--begin", dest="begin",
type="int", default=0, help="Set simulation/routing begin [default: %default]")
optParser.add_option("-e", "--end", dest="end",
type="int", help="Set simulation/routing end [default: %default]")
optParser.add_option("-R", "--route-steps", dest="routeSteps",
type="int", default=200, help="Set simulation route steps [default: %default]")
optParser.add_option("-a", "--aggregation", dest="aggregation",
type="int", default=900, help="Set main weights aggregation period [default: %default]")
optParser.add_option("-A", "--gA", dest="gA",
type="float", default=.5, help="Sets Gawron's Alpha [default: %default]")
optParser.add_option("-B", "--gBeta", dest="gBeta",
type="float", default=.9, help="Sets Gawron's Beta [default: %default]")
optParser.add_option("-f", "--first-step", dest="firstStep",
type="int", default=0, help="First DUA step [default: %default]")
optParser.add_option("-l", "--last-step", dest="lastStep",
type="int", default=50, help="Last DUA step [default: %default]")
optParser.add_option("-p", "--path", dest="path",
default=os.environ.get("SUMO_BINDIR", ""), help="Path to binaries [default: %default]")
optParser.add_option("-y", "--absrand", dest="absrand", action="store_true",
default=False, help="use current time to generate random number")
optParser.add_option("-c", "--cost-function", dest="costfunc",
default="identity", help="(python) function to use as cost function")
(options, args) = optParser.parse_args()
if not options.net or not (options.trips or options.flows):
optParser.error("At least --net-file and --trips or --flows have to be given!")
duaBinary = os.environ.get("DUAROUTER_BINARY", os.path.join(options.path, "duarouter"))
log = open("dua-log.txt", "w+")
parser = make_parser()
reader = NetReader()
parser.setContentHandler(reader)
parser.parse(options.net)
edges = reader.getEdges()
if "." in options.costfunc:
idx = options.costfunc.rfind(".")
module = options.costfunc[:idx]
func = options.costfunc[idx+1:]
exec("from %s import %s as costFunction" % (module, func))
else:
exec("costFunction = %s" % options.costfunc)
if options.flows:
tripFiles = options.flows.split(",")
else:
tripFiles = options.trips.split(",")
starttime = datetime.now()
for step in range(options.firstStep, options.lastStep):
btimeA = datetime.now()
print "> Executing step " + str(step)
# router
files = []
for tripFile in tripFiles:
file = tripFile
tripFile = os.path.basename(tripFile)
if step>0:
file = tripFile[:tripFile.find(".")] + "_%s.rou.alt.xml" % (step-1)
output = tripFile[:tripFile.find(".")] + "_%s.rou.xml" % step
print ">> Running router with " + file
btime = datetime.now()
print ">>> Begin time: %s" % btime
writeRouteConf(step, options, file, output)
retCode = call([duaBinary, "-c", "iteration_%s.duarcfg" % step], log)
etime = datetime.now()
print ">>> End time: %s" % etime
print ">>> Duration: %s" % (etime-btime)
print "<<"
files.append(output)
# generating weights file
print ">> Generating weights"
reader = RouteReader()
parser.setContentHandler(reader)
for f in files:
parser.parse(f)
generateWeights(step, options, edges, reader, costFunction)
print "<<"
print "< Step %s ended (duration: %s)" % (step, datetime.now() - btimeA)
print "------------------\n"
sys.stdout.flush()
print "dua-iterate ended (duration: %s)" % (datetime.now() - starttime)
log.close()
|
rudhir-upretee/Sumo17_With_Netsim
|
tools/assign/costFunctionChecker.py
|
Python
|
gpl-3.0
| 8,986 | 0.004118 |
import datetime
import logging
import time
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail.message import EmailMultiAlternatives
from django.core.management.base import BaseCommand
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.timezone import utc
from custom.models import Profile, FriendJoinedEmailLog
# Get an instance of a logger
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Friend joined email daemon'
args = ''
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.site = Site.objects.get_current()
def close_db_connection(self):
from django import db
db.close_connection()
def handle(self, *args, **options):
while True:
last_hour = datetime.datetime.utcnow().replace(tzinfo=utc) - datetime.timedelta(hours=1)
profiles = Profile.objects.select_related().filter(
user__date_joined__gte=last_hour,
user_referrer__profile__enable_email_updates=True,
user_referrer__is_active=True,
)
for profile in profiles:
if not profile.user_referrer.email:
continue
try:
FriendJoinedEmailLog.objects.get(user=profile.user_referrer, user_referred=profile.user)
except FriendJoinedEmailLog.DoesNotExist:
dict_context = {
'site': self.site,
'referred_profile': profile,
'referring_profile': profile.user_referrer.get_profile(),
}
email_subject = render_to_string('emails/friend-joined/subject.txt', dict_context).strip()
email_txt = render_to_string('emails/friend-joined/message.txt', dict_context)
email_html = render_to_string('emails/friend-joined/message.html', dict_context)
email = EmailMultiAlternatives(
email_subject, email_txt, settings.DEFAULT_FROM_EMAIL, [profile.user_referrer.email,]
)
email.attach_alternative(email_html, 'text/html')
email.send()
FriendJoinedEmailLog.objects.create(user=profile.user_referrer, user_referred=profile.user)
self.close_db_connection()
time.sleep(600)
|
waterdotorg/power.Water
|
project/custom/management/commands/friend_joined_email.py
|
Python
|
gpl-3.0
| 2,527 | 0.004353 |
# ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Test BLEUScore metric against reference
'''
from neon.transforms.cost import BLEUScore
def test_bleuscore():
# dataset with two sentences
sentences = ["a quick brown fox jumped",
"the rain in spain falls mainly on the plains"]
references = [["a fast brown fox jumped",
"a quick brown fox vaulted",
"a rapid fox of brown color jumped",
"the dog is running on the grass"],
["the precipitation in spain falls on the plains",
"spanish rain falls for the most part on the plains",
"the rain in spain falls in the plains most of the time",
"it is raining today"]]
# reference scores for the given set of reference sentences
bleu_score_references = [92.9, 88.0, 81.5, 67.1] # bleu1, bleu2, bleu3, bleu4
# compute scores
bleu_metric = BLEUScore()
bleu_metric(sentences, references)
# check against references
for score, reference in zip(bleu_metric.bleu_n, bleu_score_references):
assert round(score, 1) == reference
if __name__ == '__main__':
test_bleuscore()
|
matthijsvk/multimodalSR
|
code/Experiments/neon-master/tests/test_bleuscore.py
|
Python
|
mit
| 1,914 | 0.001045 |
"""
A Pillow loader for .ftc and .ftu files (FTEX)
Jerome Leclanche <jerome@leclan.ch>
The contents of this file are hereby released in the public domain (CC0)
Full text of the CC0 license:
https://creativecommons.org/publicdomain/zero/1.0/
Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
packed custom format called FTEX. This file format uses file extensions FTC
and FTU.
* FTC files are compressed textures (using standard texture compression).
* FTU files are not compressed.
Texture File Format
The FTC and FTU texture files both use the same format. This
has the following structure:
{header}
{format_directory}
{data}
Where:
{header} = {
u32:magic,
u32:version,
u32:width,
u32:height,
u32:mipmap_count,
u32:format_count
}
* The "magic" number is "FTEX".
* "width" and "height" are the dimensions of the texture.
* "mipmap_count" is the number of mipmaps in the texture.
* "format_count" is the number of texture formats (different versions of the
same texture) in this file.
{format_directory} = format_count * { u32:format, u32:where }
The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB
uncompressed textures.
The texture data for a format starts at the position "where" in the file.
Each set of texture data in the file has the following structure:
{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
* "mipmap_size" is the number of bytes in that mip level. For compressed
textures this is the size of the texture data compressed with DXT1. For 24 bit
uncompressed textures, this is 3 * width * height. Following this are the image
bytes for that mipmap level.
Note: All data is stored in little-Endian (Intel) byte order.
"""
import struct
from io import BytesIO
from . import Image, ImageFile
MAGIC = b"FTEX"
FORMAT_DXT1 = 0
FORMAT_UNCOMPRESSED = 1
class FtexImageFile(ImageFile.ImageFile):
format = "FTEX"
format_description = "Texture File Format (IW2:EOC)"
def _open(self):
struct.unpack("<I", self.fp.read(4)) # magic
struct.unpack("<i", self.fp.read(4)) # version
self._size = struct.unpack("<2i", self.fp.read(8))
mipmap_count, format_count = struct.unpack("<2i", self.fp.read(8))
self.mode = "RGB"
# Only support single-format files.
# I don't know of any multi-format file.
assert format_count == 1
format, where = struct.unpack("<2i", self.fp.read(8))
self.fp.seek(where)
mipmap_size, = struct.unpack("<i", self.fp.read(4))
data = self.fp.read(mipmap_size)
if format == FORMAT_DXT1:
self.mode = "RGBA"
self.tile = [("bcn", (0, 0) + self.size, 0, (1))]
elif format == FORMAT_UNCOMPRESSED:
self.tile = [("raw", (0, 0) + self.size, 0, ('RGB', 0, 1))]
else:
raise ValueError(
"Invalid texture compression format: %r" % (format))
self.fp.close()
self.fp = BytesIO(data)
def load_seek(self, pos):
pass
def _validate(prefix):
return prefix[:4] == MAGIC
Image.register_open(FtexImageFile.format, FtexImageFile, _validate)
Image.register_extensions(FtexImageFile.format, [".ftc", ".ftu"])
|
ryfeus/lambda-packs
|
pytorch/source/PIL/FtexImagePlugin.py
|
Python
|
mit
| 3,322 | 0 |
"""
Provides functionality to interact with thermostats.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/thermostat/
"""
import logging
import os
import voluptuous as vol
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.config import load_yaml_config_file
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.temperature import convert
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
from homeassistant.components import (ecobee, zwave)
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_ON, STATE_OFF, STATE_UNKNOWN,
TEMP_CELSIUS)
DOMAIN = "thermostat"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = 60
SERVICE_SET_AWAY_MODE = "set_away_mode"
SERVICE_SET_TEMPERATURE = "set_temperature"
SERVICE_SET_FAN_MODE = "set_fan_mode"
STATE_HEAT = "heat"
STATE_COOL = "cool"
STATE_IDLE = "idle"
ATTR_CURRENT_TEMPERATURE = "current_temperature"
ATTR_AWAY_MODE = "away_mode"
ATTR_FAN = "fan"
ATTR_MAX_TEMP = "max_temp"
ATTR_MIN_TEMP = "min_temp"
ATTR_TEMPERATURE_LOW = "target_temp_low"
ATTR_TEMPERATURE_HIGH = "target_temp_high"
ATTR_OPERATION = "current_operation"
_LOGGER = logging.getLogger(__name__)
DISCOVERY_PLATFORMS = {
ecobee.DISCOVER_THERMOSTAT: 'ecobee',
zwave.DISCOVER_THERMOSTATS: 'zwave'
}
SET_AWAY_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_AWAY_MODE): cv.boolean,
})
SET_TEMPERATURE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_TEMPERATURE): vol.Coerce(float),
})
SET_FAN_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_FAN): cv.boolean,
})
def set_away_mode(hass, away_mode, entity_id=None):
"""Turn all or specified thermostat away mode on."""
data = {
ATTR_AWAY_MODE: away_mode
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_AWAY_MODE, data)
def set_temperature(hass, temperature, entity_id=None):
"""Set new target temperature."""
data = {ATTR_TEMPERATURE: temperature}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_TEMPERATURE, data)
def set_fan_mode(hass, fan_mode, entity_id=None):
"""Turn all or specified thermostat fan mode on."""
data = {
ATTR_FAN: fan_mode
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_FAN_MODE, data)
# pylint: disable=too-many-branches
def setup(hass, config):
"""Setup thermostats."""
component = EntityComponent(_LOGGER, DOMAIN, hass,
SCAN_INTERVAL, DISCOVERY_PLATFORMS)
component.setup(config)
descriptions = load_yaml_config_file(
os.path.join(os.path.dirname(__file__), 'services.yaml'))
def away_mode_set_service(service):
"""Set away mode on target thermostats."""
target_thermostats = component.extract_from_service(service)
away_mode = service.data[ATTR_AWAY_MODE]
for thermostat in target_thermostats:
if away_mode:
thermostat.turn_away_mode_on()
else:
thermostat.turn_away_mode_off()
thermostat.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_AWAY_MODE, away_mode_set_service,
descriptions.get(SERVICE_SET_AWAY_MODE),
schema=SET_AWAY_MODE_SCHEMA)
def temperature_set_service(service):
"""Set temperature on the target thermostats."""
target_thermostats = component.extract_from_service(service)
temperature = service.data[ATTR_TEMPERATURE]
for thermostat in target_thermostats:
thermostat.set_temperature(convert(
temperature, hass.config.temperature_unit,
thermostat.unit_of_measurement))
thermostat.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_TEMPERATURE, temperature_set_service,
descriptions.get(SERVICE_SET_TEMPERATURE),
schema=SET_TEMPERATURE_SCHEMA)
def fan_mode_set_service(service):
"""Set fan mode on target thermostats."""
target_thermostats = component.extract_from_service(service)
fan_mode = service.data[ATTR_FAN]
for thermostat in target_thermostats:
if fan_mode:
thermostat.turn_fan_on()
else:
thermostat.turn_fan_off()
thermostat.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_FAN_MODE, fan_mode_set_service,
descriptions.get(SERVICE_SET_FAN_MODE),
schema=SET_FAN_MODE_SCHEMA)
return True
class ThermostatDevice(Entity):
"""Representation of a thermostat."""
# pylint: disable=no-self-use
@property
def state(self):
"""Return the current state."""
return self.target_temperature or STATE_UNKNOWN
@property
def state_attributes(self):
"""Return the optional state attributes."""
data = {
ATTR_CURRENT_TEMPERATURE:
self._convert_for_display(self.current_temperature),
ATTR_MIN_TEMP: self._convert_for_display(self.min_temp),
ATTR_MAX_TEMP: self._convert_for_display(self.max_temp),
ATTR_TEMPERATURE:
self._convert_for_display(self.target_temperature),
ATTR_TEMPERATURE_LOW:
self._convert_for_display(self.target_temperature_low),
ATTR_TEMPERATURE_HIGH:
self._convert_for_display(self.target_temperature_high),
}
operation = self.operation
if operation is not None:
data[ATTR_OPERATION] = operation
is_away = self.is_away_mode_on
if is_away is not None:
data[ATTR_AWAY_MODE] = STATE_ON if is_away else STATE_OFF
is_fan_on = self.is_fan_on
if is_fan_on is not None:
data[ATTR_FAN] = STATE_ON if is_fan_on else STATE_OFF
return data
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
raise NotImplementedError
@property
def current_temperature(self):
"""Return the current temperature."""
raise NotImplementedError
@property
def operation(self):
"""Return current operation ie. heat, cool, idle."""
return None
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
raise NotImplementedError
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
return self.target_temperature
@property
def target_temperature_high(self):
"""Return the upper bound temperature we try to reach."""
return self.target_temperature
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return None
@property
def is_fan_on(self):
"""Return true if the fan is on."""
return None
def set_temperate(self, temperature):
"""Set new target temperature."""
pass
def turn_away_mode_on(self):
"""Turn away mode on."""
pass
def turn_away_mode_off(self):
"""Turn away mode off."""
pass
def turn_fan_on(self):
"""Turn fan on."""
pass
def turn_fan_off(self):
"""Turn fan off."""
pass
@property
def min_temp(self):
"""Return the minimum temperature."""
return convert(7, TEMP_CELSIUS, self.unit_of_measurement)
@property
def max_temp(self):
"""Return the maximum temperature."""
return convert(35, TEMP_CELSIUS, self.unit_of_measurement)
def _convert_for_display(self, temp):
"""Convert temperature into preferred units for display purposes."""
if temp is None:
return None
value = convert(temp, self.unit_of_measurement,
self.hass.config.temperature_unit)
if self.hass.config.temperature_unit is TEMP_CELSIUS:
decimal_count = 1
else:
# Users of fahrenheit generally expect integer units.
decimal_count = 0
return round(value, decimal_count)
|
Zyell/home-assistant
|
homeassistant/components/thermostat/__init__.py
|
Python
|
mit
| 8,539 | 0 |
#!/usr/bin/env python3
import sys
import os
import path_utils
import git_lib
import svn_lib
REPO_TYPE_GIT_BARE="git/bare"
REPO_TYPE_GIT_STD="git/std"
REPO_TYPE_GIT_SUB="git/sub"
REPO_TYPE_SVN="svn"
REPO_TYPES = [REPO_TYPE_GIT_BARE, REPO_TYPE_GIT_STD, REPO_TYPE_GIT_SUB, REPO_TYPE_SVN]
def detect_repo_type(path):
if not os.path.exists(path):
return False, "Path %s doesn't exist." % path
v, r = git_lib.is_repo_bare(path)
if v and r:
return True, REPO_TYPE_GIT_BARE
v, r = git_lib.is_repo_standard(path)
if v and r:
return True, REPO_TYPE_GIT_STD
v, r = git_lib.is_repo_submodule(path)
if v and r:
return True, REPO_TYPE_GIT_SUB
v, r = svn_lib.is_svn_repo(path)
if v and r:
return True, REPO_TYPE_SVN
return True, None
def is_any_repo_type(repo_type):
return repo_type in REPO_TYPES
def puaq():
print("Usage: %s path" % path_utils.basename_filtered(__file__))
sys.exit(1)
if __name__ == "__main__":
if len(sys.argv) < 2:
puaq()
path = sys.argv[1]
v, r = detect_repo_type(path)
print(r)
if not v:
sys.exit(1)
|
mvendra/mvtools
|
detect_repo_type.py
|
Python
|
mit
| 1,152 | 0.007813 |
"""Compute the sound field generated by a sound source.
The Green's function describes the spatial sound propagation over time.
.. include:: math-definitions.rst
"""
from __future__ import division
import numpy as np
from .. import util
from .. import defs
def point(xs, signal, observation_time, grid, c=None):
r"""Source model for a point source: 3D Green's function.
Calculates the scalar sound pressure field for a given point in
time, evoked by source excitation signal.
Parameters
----------
xs : (3,) array_like
Position of source in cartesian coordinates.
signal : (N,) array_like + float
Excitation signal consisting of (mono) audio data and a sampling
rate (in Hertz). A `DelayedSignal` object can also be used.
observation_time : float
Observed point in time.
grid : triple of array_like
The grid that is used for the sound field calculations.
See `sfs.util.xyz_grid()`.
c : float, optional
Speed of sound.
Returns
-------
numpy.ndarray
Scalar sound pressure field, evaluated at positions given by
*grid*.
Notes
-----
.. math::
g(x-x_s,t) = \frac{1}{4 \pi |x - x_s|} \dirac{t - \frac{|x -
x_s|}{c}}
"""
xs = util.asarray_1d(xs)
data, samplerate, signal_offset = util.as_delayed_signal(signal)
data = util.asarray_1d(data)
grid = util.as_xyz_components(grid)
if c is None:
c = defs.c
r = np.linalg.norm(grid - xs)
# evaluate g over grid
weights = 1 / (4 * np.pi * r)
delays = r / c
base_time = observation_time - signal_offset
return weights * np.interp(base_time - delays,
np.arange(len(data)) / samplerate,
data, left=0, right=0)
def point_image_sources(x0, signal, observation_time, grid, L, max_order,
coeffs=None, c=None):
"""Point source in a rectangular room using the mirror image source model.
Parameters
----------
x0 : (3,) array_like
Position of source in cartesian coordinates.
signal : (N,) array_like + float
Excitation signal consisting of (mono) audio data and a sampling
rate (in Hertz). A `DelayedSignal` object can also be used.
observation_time : float
Observed point in time.
grid : triple of array_like
The grid that is used for the sound field calculations.
See `sfs.util.xyz_grid()`.
L : (3,) array_like
Dimensions of the rectangular room.
max_order : int
Maximum number of reflections for each image source.
coeffs : (6,) array_like, optional
Reflection coeffecients of the walls.
If not given, the reflection coefficients are set to one.
c : float, optional
Speed of sound.
Returns
-------
numpy.ndarray
Scalar sound pressure field, evaluated at positions given by
*grid*.
"""
if coeffs is None:
coeffs = np.ones(6)
positions, order = util.image_sources_for_box(x0, L, max_order)
source_strengths = np.prod(coeffs**order, axis=1)
p = 0
for position, strength in zip(positions, source_strengths):
if strength != 0:
p += strength * point(position, signal, observation_time, grid, c)
return p
|
chris-hld/sfs-python
|
sfs/time/source.py
|
Python
|
mit
| 3,359 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Dummy conftest.py for graph_stix.
If you don't know what this is for, just leave it empty.
Read more about conftest.py under:
https://pytest.org/latest/plugins.html
"""
from __future__ import print_function, absolute_import, division
import pytest
|
arangaraju/graph-stix
|
tests/conftest.py
|
Python
|
mit
| 316 | 0 |
#!/usr/bin/env python
import os.path
import random
import subprocess as sp
from runtest import TestBase
TDIR = 'xxx'
class TestCase(TestBase):
def __init__(self):
TestBase.__init__(self, 'abc', """
# DURATION TID FUNCTION
62.202 us [28141] | __cxa_atexit();
[28141] | main() {
[28141] | a() {
[28141] | b() {
[28141] | c() {
0.753 us [28141] | getpid();
1.430 us [28141] | } /* c */
1.915 us [28141] | } /* b */
2.405 us [28141] | } /* a */
3.005 us [28141] | } /* main */
""")
def prerun(self, timeout):
self.gen_port()
self.subcmd = 'recv'
self.option = '-d %s --port %s' % (TDIR, self.port)
self.exearg = ''
recv_cmd = self.runcmd()
self.pr_debug('prerun command: ' + recv_cmd)
self.recv_p = sp.Popen(recv_cmd.split())
# recorded but not used
self.subcmd = 'record'
self.option = '--host %s --port %s' % ('localhost', self.port)
self.exearg = 't-' + self.name
record_cmd = self.runcmd()
self.pr_debug('prerun command: ' + record_cmd)
sp.call(record_cmd.split())
# use this
self.pr_debug('run another record')
self.dirname = 'dir-' + str(random.randint(100000, 999999))
self.pr_debug('after randint')
self.option += ' -d ' + self.dirname
record_cmd = self.runcmd()
self.pr_debug('prerun command: ' + record_cmd)
sp.call(record_cmd.split())
return TestBase.TEST_SUCCESS
def setup(self):
self.subcmd = 'replay'
self.option = '-d %s' % os.path.join(TDIR, self.dirname)
def postrun(self, ret):
self.recv_p.terminate()
return ret
|
namhyung/uftrace
|
tests/t142_recv_multi.py
|
Python
|
gpl-2.0
| 1,783 | 0.001122 |
"""
Commands that are available from the connect screen.
"""
import re
import traceback
from django.conf import settings
from src.players.models import PlayerDB
from src.objects.models import ObjectDB
from src.server.models import ServerConfig
from src.comms.models import Channel
from src.utils import create, logger, utils, ansi
from src.commands.default.muxcommand import MuxCommand
from src.commands.cmdhandler import CMD_LOGINSTART
# limit symbol import for API
__all__ = ("CmdUnconnectedConnect", "CmdUnconnectedCreate", "CmdUnconnectedQuit", "CmdUnconnectedLook", "CmdUnconnectedHelp", "Magic")
CONNECTION_SCREEN_MODULE = settings.CONNECTION_SCREEN_MODULE
CONNECTION_SCREEN = ""
try:
CONNECTION_SCREEN = ansi.parse_ansi(utils.string_from_module(CONNECTION_SCREEN_MODULE))
except Exception:
pass
if not CONNECTION_SCREEN:
CONNECTION_SCREEN = "\nEvennia: Error in CONNECTION_SCREEN MODULE (randomly picked connection screen variable is not a string). \nEnter 'help' for aid."
class Magic(MuxCommand):
"""
Hidden command for the web client's magic cookie authenticator.
"""
key = "magic"
def func(self):
session = self.caller
player = PlayerDB.objects.player_search(self.lhs)
if len(player) != 1:
player = None
else:
player = player[0]
if player.name.lower() != self.lhs.lower():
player=None
pswd = None
if player:
pswd = self.rhs == player.db.magic_cookie
if not (player and pswd):
# No playername or password match
session.msg("Could not verify Magic Cookie. Please email the server administrator for assistance.")
return
# Check IP and/or name bans
bans = ServerConfig.objects.conf("server_bans")
if bans and (any(tup[0]==player.name for tup in bans)
or
any(tup[2].match(session.address[0]) for tup in bans if tup[2])):
# this is a banned IP or name!
string = "{rYou have been banned and cannot continue from here."
string += "\nIf you feel this ban is in error, please email an admin.{x"
session.msg(string)
session.execute_cmd("quit")
return
session.sessionhandler.login(session, player)
class Connect(MuxCommand):
"""
Connect to the game.
Usage (at login screen):
connect playername password
connect "player name" "pass word"
Use the create command to first create an account before logging in.
If you have spaces in your name, enclose it in quotes.
"""
key = "connect"
aliases = ["conn", "con", "co"]
locks = "cmd:all()" # not really needed
def func(self):
"""
Uses the Django admin api. Note that unlogged-in commands
have a unique position in that their func() receives
a session object instead of a source_object like all
other types of logged-in commands (this is because
there is no object yet before the player has logged in)
"""
session = self.caller
args = self.args
# extract quoted parts
parts = [part.strip() for part in re.split(r"\"|\'", args) if part.strip()]
if len(parts) == 1:
# this was (hopefully) due to no quotes being found
parts = parts[0].split(None, 1)
if len(parts) != 2:
session.msg("\n\r Usage (without <>): connect <name> <password>")
return
playername, password = parts
# Match account name and check password
player = PlayerDB.objects.player_search(playername)
if len(player) != 1:
player = None
else:
player = player[0]
if player.name.lower() != playername.lower():
player=None
pswd = None
if player:
pswd = player.check_password(password)
if not (player and pswd):
# No playername or password match
string = "Wrong login information given.\nIf you have spaces in your name or "
string += "password, don't forget to enclose it in quotes. Also capitalization matters."
string += "\nIf you are new you should first create a new account "
string += "using the 'create' command."
session.msg(string)
return
# Check IP and/or name bans
bans = ServerConfig.objects.conf("server_bans")
if bans and (any(tup[0]==player.name for tup in bans)
or
any(tup[2].match(session.address[0]) for tup in bans if tup[2])):
# this is a banned IP or name!
string = "{rYou have been banned and cannot continue from here."
string += "\nIf you feel this ban is in error, please email an admin.{x"
session.msg(string)
session.execute_cmd("quit")
return
# actually do the login. This will call all other hooks:
# session.at_init()
# if character:
# at_first_login() # only once
# at_pre_login()
# player.at_post_login() - calls look if no character is set
# character.at_post_login() - this calls look command by default
session.sessionhandler.login(session, player)
class Create(MuxCommand):
"""
Create a new account.
Usage (at login screen):
create <playername> <password>
create "player name" "pass word"
This creates a new player account.
If you have spaces in your name, enclose it in quotes.
"""
key = "create"
aliases = ["cre", "cr"]
locks = "cmd:all()"
def func(self):
"Do checks and create account"
session = self.caller
args = self.args.strip()
# extract quoted parts
parts = [part.strip() for part in re.split(r"\"|\'", args) if part.strip()]
if len(parts) == 1:
# this was (hopefully) due to no quotes being found
parts = parts[0].split(None, 1)
if len(parts) != 2:
string = "\n Usage (without <>): create <name> <password>"
string += "\nIf <name> or <password> contains spaces, enclose it in quotes."
session.msg(string)
return
playername, password = parts
print "playername '%s', password: '%s'" % (playername, password)
# sanity checks
if not re.findall('^[\w. @+-]+$', playername) or not (0 < len(playername) <= 30):
# this echoes the restrictions made by django's auth module (except not
# allowing spaces, for convenience of logging in).
string = "\n\r Playername can max be 30 characters or fewer. Letters, spaces, digits and @/./+/-/_ only."
session.msg(string)
return
# strip excessive spaces in playername
playername = re.sub(r"\s+", " ", playername).strip()
if PlayerDB.objects.filter(user__username__iexact=playername) or PlayerDB.objects.filter(username__iexact=playername):
# player already exists (we also ignore capitalization here)
session.msg("Sorry, there is already a player with the name '%s'." % playername)
return
if not re.findall('^[\w. @+-]+$', password) or not (3 < len(password)):
string = "\n\r Password should be longer than 3 characers. Letters, spaces, digits and @\.\+\-\_ only."
string += "\nFor best security, make it longer than 8 characters. You can also use a phrase of"
string += "\nmany words if you enclose the password in quotes."
session.msg(string)
return
# everything's ok. Create the new player account.
try:
default_home = ObjectDB.objects.get_id(settings.CHARACTER_DEFAULT_HOME)
typeclass = settings.BASE_CHARACTER_TYPECLASS
permissions = settings.PERMISSION_PLAYER_DEFAULT
try:
new_character = create.create_player(playername, None, password,
permissions=permissions,
character_typeclass=typeclass,
character_location=default_home,
character_home=default_home)
except Exception:
session.msg("There was an error creating the default Character/Player:\n%s\n If this problem persists, contact an admin.")
return
new_player = new_character.player
# This needs to be called so the engine knows this player is logging in for the first time.
# (so it knows to call the right hooks during login later)
utils.init_new_player(new_player)
# join the new player to the public channel
pchanneldef = settings.CHANNEL_PUBLIC
if pchanneldef:
pchannel = Channel.objects.get_channel(pchanneldef[0])
if not pchannel.connect_to(new_player):
string = "New player '%s' could not connect to public channel!" % new_player.key
logger.log_errmsg(string)
# allow only the character itself and the player to puppet this character (and Immortals).
new_character.locks.add("puppet:id(%i) or pid(%i) or perm(Immortals) or pperm(Immortals)" %
(new_character.id, new_player.id))
# If no description is set, set a default description
if not new_character.db.desc:
new_character.db.desc = "This is a Player."
# tell the caller everything went well.
string = "A new account '%s' was created. Welcome!"
if " " in playername:
string += "\n\nYou can now log in with the command 'connect \"%s\" <your password>'."
else:
string += "\n\nYou can now log with the command 'connect %s <your password>'."
session.msg(string % (playername, playername))
except Exception:
# We are in the middle between logged in and -not, so we have to handle tracebacks
# ourselves at this point. If we don't, we won't see any errors at all.
string = "%s\nThis is a bug. Please e-mail an admin if the problem persists."
session.msg(string % (traceback.format_exc()))
logger.log_errmsg(traceback.format_exc())
class CmdUnconnectedQuit(MuxCommand):
"""
We maintain a different version of the quit command
here for unconnected players for the sake of simplicity. The logged in
version is a bit more complicated.
"""
key = "quit"
aliases = ["q", "qu"]
locks = "cmd:all()"
def func(self):
"Simply close the connection."
session = self.caller
session.msg("Good bye! Disconnecting ...")
session.session_disconnect()
class CmdUnconnectedLook(MuxCommand):
"""
This is an unconnected version of the look command for simplicity.
This is called by the server and kicks everything in gear.
All it does is display the connect screen.
"""
key = CMD_LOGINSTART
aliases = ["look", "l"]
locks = "cmd:all()"
def func(self):
"Show the connect screen."
self.caller.msg(CONNECTION_SCREEN)
class CmdUnconnectedHelp(MuxCommand):
"""
This is an unconnected version of the help command,
for simplicity. It shows a pane of info.
"""
key = "help"
aliases = ["h", "?"]
locks = "cmd:all()"
def func(self):
"Shows help"
string = \
"""
You are not yet logged into the game. Commands available at this point:
{wcreate, connect, look, help, quit{n
To login to the system, you need to do one of the following:
{w1){n If you have no previous account, you need to use the 'create'
command.
{wcreate Anna c67jHL8p{n
Note that if you use spaces in your name, you have to enclose in quotes.
{wcreate "Anna the Barbarian" c67jHL8p{n
It's always a good idea (not only here, but everywhere on the net)
to not use a regular word for your password. Make it longer than
6 characters or write a passphrase.
{w2){n If you have an account already, either because you just created
one in {w1){n above or you are returning, use the 'connect' command:
{wconnect Anna c67jHL8p{n
(Again, if there are spaces in the name you have to enclose it in quotes).
This should log you in. Run {whelp{n again once you're logged in
to get more aid. Hope you enjoy your stay!
You can use the {wlook{n command if you want to see the connect screen again.
"""
self.caller.msg(string)
|
TaliesinSkye/evennia
|
wintersoasis-master/commands/unloggedin.py
|
Python
|
bsd-3-clause
| 12,835 | 0.004441 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('robocrm', '0006_auto_20141005_1800'),
]
operations = [
migrations.AddField(
model_name='robouser',
name='magnetic',
field=models.CharField(max_length=9, null=True, blank=True),
preserve_default=True,
),
]
|
CMU-Robotics-Club/roboticsclub.org
|
robocrm/migrations/0007_robouser_magnetic.py
|
Python
|
mit
| 461 | 0 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Helper CGI for Apiserver in the development app server.
This is a fake apiserver proxy that does simple transforms on requests that
come in to /_ah/api and then re-dispatches them to /_ah/spi. It does not do
any authentication, quota checking, DoS checking, etc.
In addition, the proxy loads api configs from
/_ah/spi/BackendService.getApiConfigs prior to making the first call to the
backend at /_ah/spi and afterwards if app.yaml is changed.
"""
from __future__ import with_statement
import base64
import cgi
import cStringIO
import httplib
try:
import json
except ImportError:
import simplejson as json
import logging
import mimetools
import re
API_SERVING_PATTERN = '/_ah/api/.*'
SPI_ROOT_FORMAT = 'http://127.0.0.1:%s/_ah/spi/%s'
_API_REST_PATH_FORMAT = '{!name}/{!version}/%s'
_PATH_VARIABLE_PATTERN = r'[a-zA-Z_][a-zA-Z_.\d]*'
_RESERVED_PATH_VARIABLE_PATTERN = r'!' + _PATH_VARIABLE_PATTERN
_PATH_VALUE_PATTERN = r'[^:/?#\[\]{}]*'
_CORS_HEADER_ORIGIN = 'Origin'.lower()
_CORS_HEADER_REQUEST_METHOD = 'Access-Control-Request-Method'.lower()
_CORS_HEADER_REQUEST_HEADERS = 'Access-Control-Request-Headers'.lower()
_CORS_HEADER_ALLOW_ORIGIN = 'Access-Control-Allow-Origin'
_CORS_HEADER_ALLOW_METHODS = 'Access-Control-Allow-Methods'
_CORS_HEADER_ALLOW_HEADERS = 'Access-Control-Allow-Headers'
_CORS_ALLOWED_METHODS = frozenset(('DELETE', 'GET', 'PATCH', 'POST', 'PUT'))
_INVALID_ENUM_TEMPLATE = 'Invalid string value: %r. Allowed values: %r'
class RequestRejectionError(Exception):
"""Base class for rejected requests.
To be raised when parsing the request values and comparing them against the
generated discovery document.
"""
def Message(self): raise NotImplementedError
def Errors(self): raise NotImplementedError
def ToJson(self):
"""JSON string representing the rejected value.
Calling this will fail on the base class since it relies on Message and
Errors being implemented on the class. It is up to a subclass to implement
these methods.
Returns:
JSON string representing the rejected value.
"""
return json.dumps({
'error': {
'errors': self.Errors(),
'code': 400,
'message': self.Message(),
},
})
class EnumRejectionError(RequestRejectionError):
"""Custom request rejection exception for enum values."""
def __init__(self, parameter_name, value, allowed_values):
"""Constructor for EnumRejectionError.
Args:
parameter_name: String; the name of the enum parameter which had a value
rejected.
value: The actual value passed in for the enum. Usually string.
allowed_values: List of strings allowed for the enum.
"""
self.parameter_name = parameter_name
self.value = value
self.allowed_values = allowed_values
def Message(self):
"""A descriptive message describing the error."""
return _INVALID_ENUM_TEMPLATE % (self.value, self.allowed_values)
def Errors(self):
"""A list containing the errors associated with the rejection.
Intended to mimic those returned from an API in production in Google's API
infrastructure.
Returns:
A list with a single element that is a dictionary containing the error
information.
"""
return [
{
'domain': 'global',
'reason': 'invalidParameter',
'message': self.Message(),
'locationType': 'parameter',
'location': self.parameter_name,
},
]
class ApiRequest(object):
"""Simple data object representing an API request.
Takes an app_server CGI request and environment in the constructor.
Parses the request into convenient pieces and stores them as members.
"""
API_PREFIX = '/_ah/api/'
def __init__(self, base_env_dict, old_dev_appserver, request=None):
"""Constructor.
Args:
base_env_dict: Dictionary of CGI environment parameters.
old_dev_appserver: used to call standard SplitURL method.
request: AppServerRequest. Can be None.
"""
self.cgi_env = base_env_dict
self.headers = {}
self.http_method = base_env_dict['REQUEST_METHOD']
self.port = base_env_dict['SERVER_PORT']
if request:
self.path, self.query = old_dev_appserver.SplitURL(request.relative_url)
self.body = request.infile.read()
for header in request.headers.headers:
header_name, header_value = header.split(':', 1)
self.headers[header_name.strip()] = header_value.strip()
else:
self.body = ''
self.path = self.API_PREFIX
self.query = ''
assert self.path.startswith(self.API_PREFIX)
self.path = self.path[len(self.API_PREFIX):]
self.parameters = cgi.parse_qs(self.query, keep_blank_values=True)
self.body_obj = json.loads(self.body) if self.body else {}
self.request_id = None
def _IsRpc(self):
return self.path == 'rpc'
class DiscoveryApiProxy(object):
"""Proxies discovery service requests to a known cloud endpoint."""
_DISCOVERY_PROXY_HOST = 'webapis-discovery.appspot.com'
_STATIC_PROXY_HOST = 'webapis-discovery.appspot.com'
_DISCOVERY_API_PATH_PREFIX = '/_ah/api/discovery/v1/'
def _DispatchRequest(self, path, body):
"""Proxies GET request to discovery service API.
Args:
path: URL path relative to discovery service.
body: HTTP POST request body.
Returns:
HTTP response body or None if it failed.
"""
full_path = self._DISCOVERY_API_PATH_PREFIX + path
headers = {'Content-type': 'application/json'}
connection = httplib.HTTPSConnection(self._DISCOVERY_PROXY_HOST)
try:
connection.request('POST', full_path, body, headers)
response = connection.getresponse()
response_body = response.read()
if response.status != 200:
logging.error('Discovery API proxy failed on %s with %d.\r\n'
'Request: %s\r\nResponse: %s',
full_path, response.status, body, response_body)
return None
return response_body
finally:
connection.close()
def GenerateDiscoveryDoc(self, api_config, api_format):
"""Generates a discovery document from an API file.
Args:
api_config: .api file contents as string.
api_format: 'rest' or 'rpc' depending on the which kind of discvoery doc.
Returns:
Discovery doc as JSON string.
Raises:
ValueError: When api_format is invalid.
"""
if api_format not in ['rest', 'rpc']:
raise ValueError('Invalid API format')
path = 'apis/generate/' + api_format
request_dict = {'config': json.dumps(api_config)}
request_body = json.dumps(request_dict)
return self._DispatchRequest(path, request_body)
def GenerateDirectory(self, api_configs):
"""Generates an API directory from a list of API files.
Args:
api_configs: list of strings which are the .api file contents.
Returns:
API directory as JSON string.
"""
request_dict = {'configs': api_configs}
request_body = json.dumps(request_dict)
return self._DispatchRequest('apis/generate/directory', request_body)
def GetStaticFile(self, path):
"""Returns static content via a GET request.
Args:
path: URL path after the domain.
Returns:
Tuple of (response, response_body):
response: HTTPResponse object.
response_body: Response body as string.
"""
connection = httplib.HTTPSConnection(self._STATIC_PROXY_HOST)
try:
connection.request('GET', path, None, {})
response = connection.getresponse()
response_body = response.read()
finally:
connection.close()
return response, response_body
class DiscoveryService(object):
"""Implements the local devserver discovery service.
This has a static minimal version of the discoverable part of the
discovery .api file.
It only handles returning the discovery doc and directory, and ignores
directory parameters to filter the results.
The discovery docs/directory are created by calling a cloud endpoint
discovery service to generate the discovery docs/directory from an .api
file/set of .api files.
"""
_GET_REST_API = 'apisdev.getRest'
_GET_RPC_API = 'apisdev.getRpc'
_LIST_API = 'apisdev.list'
API_CONFIG = {
'name': 'discovery',
'version': 'v1',
'methods': {
'discovery.apis.getRest': {
'path': 'apis/{api}/{version}/rest',
'httpMethod': 'GET',
'rosyMethod': _GET_REST_API,
},
'discovery.apis.getRpc': {
'path': 'apis/{api}/{version}/rpc',
'httpMethod': 'GET',
'rosyMethod': _GET_RPC_API,
},
'discovery.apis.list': {
'path': 'apis',
'httpMethod': 'GET',
'rosyMethod': _LIST_API,
},
}
}
def __init__(self, config_manager, api_request, outfile):
"""Initializes an instance of the DiscoveryService.
Args:
config_manager: an instance of ApiConfigManager.
api_request: an instance of ApiRequest.
outfile: the CGI file object to write the response to.
"""
self._config_manager = config_manager
self._params = json.loads(api_request.body or '{}')
self._outfile = outfile
self._discovery_proxy = DiscoveryApiProxy()
def _SendSuccessResponse(self, response):
"""Sends an HTTP 200 json success response.
Args:
response: Response body as string to return.
Returns:
Sends back an HTTP 200 json success response.
"""
headers = {'Content-Type': 'application/json; charset=UTF-8'}
return SendCGIResponse('200', headers, response, self._outfile)
def _GetRpcOrRest(self, api_format):
"""Sends back HTTP response with API directory.
Args:
api_format: Either 'rest' or 'rpc'. Sends CGI response containing
the discovery doc for the api/version.
Returns:
None.
"""
api = self._params['api']
version = self._params['version']
lookup_key = (api, version)
api_config = self._config_manager.configs.get(lookup_key)
if not api_config:
logging.warn('No discovery doc for version %s of api %s', version, api)
SendCGINotFoundResponse(self._outfile)
return
doc = self._discovery_proxy.GenerateDiscoveryDoc(api_config, api_format)
if not doc:
error_msg = ('Failed to convert .api to discovery doc for '
'version %s of api %s') % (version, api)
logging.error('%s', error_msg)
SendCGIErrorResponse(error_msg, self._outfile)
return
self._SendSuccessResponse(doc)
def _GetRest(self):
return self._GetRpcOrRest('rest')
def _GetRpc(self):
return self._GetRpcOrRest('rpc')
def _List(self):
"""Sends HTTP response containing the API directory."""
api_configs = []
for api_config in self._config_manager.configs.itervalues():
if not api_config == self.API_CONFIG:
api_configs.append(json.dumps(api_config))
directory = self._discovery_proxy.GenerateDirectory(api_configs)
if not directory:
logging.error('Failed to get API directory')
SendCGINotFoundResponse(self._outfile)
return
self._SendSuccessResponse(directory)
def HandleDiscoveryRequest(self, path):
"""Returns the result of a discovery service request.
Args:
path: the SPI API path
Returns:
JSON string with result of discovery service API request.
"""
if path == self._GET_REST_API:
self._GetRest()
elif path == self._GET_RPC_API:
self._GetRpc()
elif path == self._LIST_API:
self._List()
else:
return False
return True
class ApiConfigManager(object):
"""Manages loading api configs and method lookup."""
def __init__(self):
self._rpc_method_dict = {}
self._rest_methods = []
self.configs = {}
@staticmethod
def HasSpiEndpoint(config):
"""Checks if an SPI is registered with this App.
Args:
config: Parsed app.yaml as an appinfo proto.
Returns:
True if any handler is registered for (/_ah/spi/.*).
"""
return any(h.url.startswith('/_ah/spi/') for h in config.handlers)
def _AddDiscoveryConfig(self):
lookup_key = (DiscoveryService.API_CONFIG['name'],
DiscoveryService.API_CONFIG['version'])
self.configs[lookup_key] = DiscoveryService.API_CONFIG
def ParseApiConfigResponse(self, body):
"""Parses a json api config and registers methods for dispatch.
Side effects:
Parses method name, etc for all methods and updates the indexing
datastructures with the information.
Args:
body: body of getApiConfigs response
"""
try:
response_obj = json.loads(body)
except ValueError, unused_err:
logging.error('Cannot parse BackendService.getApiConfigs response: %s',
body)
else:
self._AddDiscoveryConfig()
for api_config_json in response_obj.get('items', []):
try:
config = json.loads(api_config_json)
except ValueError, unused_err:
logging.error('Can not parse API config: %s',
api_config_json)
else:
lookup_key = config.get('name', ''), config.get('version', '')
self.configs[lookup_key] = config
for config in self.configs.itervalues():
version = config.get('version', '')
sorted_methods = self._GetSortedMethods(config.get('methods', {}))
for method_name, method in sorted_methods:
self.SaveRpcMethod(method_name, version, method)
self.SaveRestMethod(method_name, version, method)
def _GetSortedMethods(self, methods):
"""Get a copy of 'methods' sorted the same way AppEngine sorts them.
Args:
methods: Json configuration of an API's methods.
Returns:
The same configuration with the methods sorted based on what order
they'll be checked by the server.
"""
if not methods:
return methods
def _SortMethodsComparison(method_info1, method_info2):
"""Sort method info by path and http_method.
Args:
method_info1: Method name and info for the first method to compare.
method_info2: Method name and info for the method to compare to.
Returns:
Negative if the first method should come first, positive if the
first method should come after the second. Zero if they're
equivalent.
"""
def _ScorePath(path):
"""Calculate the score for this path, used for comparisons.
Higher scores have priority, and if scores are equal, the path text
is sorted alphabetically. Scores are based on the number and location
of the constant parts of the path. The server has some special handling
for variables with regexes, which we don't handle here.
Args:
path: The request path that we're calculating a score for.
Returns:
The score for the given path.
"""
score = 0
parts = path.split('/')
for part in parts:
score <<= 1
if not part or part[0] != '{':
score += 1
score <<= 31 - len(parts)
return score
path_score1 = _ScorePath(method_info1[1].get('path', ''))
path_score2 = _ScorePath(method_info2[1].get('path', ''))
if path_score1 != path_score2:
return path_score2 - path_score1
path_result = cmp(method_info1[1].get('path', ''),
method_info2[1].get('path', ''))
if path_result != 0:
return path_result
method_result = cmp(method_info1[1].get('httpMethod', ''),
method_info2[1].get('httpMethod', ''))
return method_result
return sorted(methods.items(), _SortMethodsComparison)
@staticmethod
def _ToSafePathParamName(matched_parameter):
"""Creates a safe string to be used as a regex group name.
Only alphanumeric characters and underscore are allowed in variable name
tokens, and numeric are not allowed as the first character.
We cast the matched_parameter to base32 (since the alphabet is safe),
strip the padding (= not safe) and prepend with _, since we know a token
can begin with underscore.
Args:
matched_parameter: String; parameter matched from URL template.
Returns:
String, safe to be used as a regex group name.
"""
return '_' + base64.b32encode(matched_parameter).rstrip('=')
@staticmethod
def _FromSafePathParamName(safe_parameter):
"""Takes a safe regex group name and converts it back to the original value.
Only alphanumeric characters and underscore are allowed in variable name
tokens, and numeric are not allowed as the first character.
The safe_parameter is a base32 representation of the actual value.
Args:
safe_parameter: String, safe regex group name.
Returns:
String; parameter matched from URL template.
"""
assert safe_parameter.startswith('_')
safe_parameter_as_base32 = safe_parameter[1:]
padding_length = - len(safe_parameter_as_base32) % 8
padding = '=' * padding_length
return base64.b32decode(safe_parameter_as_base32 + padding)
@staticmethod
def CompilePathPattern(pattern):
"""Generates a compiled regex pattern for a path pattern.
e.g. '/{!name}/{!version}/notes/{id}'
returns re.compile(r'/([^:/?#\[\]{}]*)'
r'/([^:/?#\[\]{}]*)'
r'/notes/(?P<id>[^:/?#\[\]{}]*)')
Note in this example that !name and !version are reserved variable names
used to match the API name and version that should not be migrated into the
method argument namespace. As such they are not named in the regex, so
groupdict() excludes them.
Args:
pattern: parameterized path pattern to be checked
Returns:
compiled regex to match this path pattern
"""
def ReplaceReservedVariable(match):
"""Replaces a {!variable} with a regex to match it not by name.
Args:
match: The matching regex group as sent by re.sub()
Returns:
Regex to match the variable by name, if the full pattern was matched.
"""
if match.lastindex > 1:
return '%s(%s)' % (match.group(1), _PATH_VALUE_PATTERN)
return match.group(0)
def ReplaceVariable(match):
"""Replaces a {variable} with a regex to match it by name.
Changes the string corresponding to the variable name to the base32
representation of the string, prepended by an underscore. This is
necessary because we can have message variable names in URL patterns
(e.g. via {x.y}) but the character '.' can't be in a regex group name.
Args:
match: The matching regex group as sent by re.sub()
Returns:
Regex to match the variable by name, if the full pattern was matched.
"""
if match.lastindex > 1:
var_name = ApiConfigManager._ToSafePathParamName(match.group(2))
return '%s(?P<%s>%s)' % (match.group(1), var_name,
_PATH_VALUE_PATTERN)
return match.group(0)
pattern = re.sub('(/|^){(%s)}(?=/|$)' % _RESERVED_PATH_VARIABLE_PATTERN,
ReplaceReservedVariable, pattern, 2)
pattern = re.sub('(/|^){(%s)}(?=/|$)' % _PATH_VARIABLE_PATTERN,
ReplaceVariable, pattern)
return re.compile(pattern + '/?$')
def SaveRpcMethod(self, method_name, version, method):
"""Store JsonRpc api methods in a map for lookup at call time.
(rpcMethodName, apiVersion) => method.
Args:
method_name: Name of the API method
version: Version of the API
method: method descriptor (as in the api config file).
"""
self._rpc_method_dict[(method_name, version)] = method
def LookupRpcMethod(self, method_name, version):
"""Lookup the JsonRPC method at call time.
The method is looked up in self._rpc_method_dict, the dictionary that
it is saved in for SaveRpcMethod().
Args:
method_name: String name of the method
version: String version of the API
Returns:
Method descriptor as specified in the API configuration.
"""
method = self._rpc_method_dict.get((method_name, version))
return method
def SaveRestMethod(self, method_name, version, method):
"""Store Rest api methods in a list for lookup at call time.
The list is self._rest_methods, a list of tuples:
[(<compiled_path>, <path_pattern>, <method_dict>), ...]
where:
<compiled_path> is a compiled regex to match against the incoming URL
<path_pattern> is a string representing the original path pattern,
checked on insertion to prevent duplicates. -and-
<method_dict> is a dict (httpMethod, apiVersion) => (method_name, method)
This structure is a bit complex, it supports use in two contexts:
Creation time:
- SaveRestMethod is called repeatedly, each method will have a path,
which we want to be compiled for fast lookup at call time
- We want to prevent duplicate incoming path patterns, so store the
un-compiled path, not counting on a compiled regex being a stable
comparison as it is not documented as being stable for this use.
- Need to store the method that will be mapped at calltime.
- Different methods may have the same path but different http method.
and/or API versions.
Call time:
- Quickly scan through the list attempting .match(path) on each
compiled regex to find the path that matches.
- When a path is matched, look up the API version and method from the
request and get the method name and method config for the matching
API method and method name.
Args:
method_name: Name of the API method
version: Version of the API
method: method descriptor (as in the api config file).
"""
path_pattern = _API_REST_PATH_FORMAT % method.get('path', '')
http_method = method.get('httpMethod', '').lower()
for _, path, methods in self._rest_methods:
if path == path_pattern:
methods[(http_method, version)] = method_name, method
break
else:
self._rest_methods.append(
(self.CompilePathPattern(path_pattern),
path_pattern,
{(http_method, version): (method_name, method)}))
@staticmethod
def _GetPathParams(match):
"""Gets path parameters from a regular expression match.
Args:
match: _sre.SRE_Match object for a path.
Returns:
A dictionary containing the variable names converted from base64
"""
result = {}
for var_name, value in match.groupdict().iteritems():
actual_var_name = ApiConfigManager._FromSafePathParamName(var_name)
result[actual_var_name] = value
return result
def LookupRestMethod(self, path, http_method):
"""Look up the rest method at call time.
The method is looked up in self._rest_methods, the list it is saved
in for SaveRestMethod.
Args:
path: Path from the URL of the request.
http_method: HTTP method of the request.
Returns:
Tuple of (<method name>, <method>, <params>)
Where:
<method name> is the string name of the method that was matched.
<method> is the descriptor as specified in the API configuration. -and-
<params> is a dict of path parameters matched in the rest request.
"""
for compiled_path_pattern, unused_path, methods in self._rest_methods:
match = compiled_path_pattern.match(path)
if match:
params = self._GetPathParams(match)
version = match.group(2)
method_key = (http_method.lower(), version)
method_name, method = methods.get(method_key, (None, None))
if method is not None:
break
else:
logging.warn('No endpoint found for path: %s', path)
method_name = None
method = None
params = None
return method_name, method, params
def CreateApiserverDispatcher(config_manager=None):
"""Function to create Apiserver dispatcher.
Args:
config_manager: Allow setting of ApiConfigManager for testing.
Returns:
New dispatcher capable of handling requests to the built-in apiserver
handlers.
"""
from google.appengine.tools import old_dev_appserver
class ApiserverDispatcher(old_dev_appserver.URLDispatcher):
"""Dispatcher that handles requests to the built-in apiserver handlers."""
_API_EXPLORER_URL = 'http://apis-explorer.appspot.com/apis-explorer/?base='
class RequestState(object):
"""Enum tracking request state."""
INIT = 0
GET_API_CONFIGS = 1
SPI_CALL = 2
END = 3
def __init__(self, config_manager=None, *args, **kwargs):
self._is_rpc = None
self.request = None
self._request_stage = self.RequestState.INIT
self._is_batch = False
if config_manager is None:
config_manager = ApiConfigManager()
self.config_manager = config_manager
self._dispatchers = []
self._AddDispatcher('/_ah/api/explorer/?$',
self.HandleApiExplorerRequest)
self._AddDispatcher('/_ah/api/static/.*$',
self.HandleApiStaticRequest)
old_dev_appserver.URLDispatcher.__init__(self, *args, **kwargs)
def _AddDispatcher(self, path_regex, dispatch_function):
"""Add a request path and dispatch handler.
Args:
path_regex: Regex path to match against incoming requests.
dispatch_function: Function to call for these requests. The function
should take (request, outfile, base_env_dict) as arguments and
return True.
"""
self._dispatchers.append((re.compile(path_regex), dispatch_function))
def _EndRequest(self):
"""End the request and clean up.
Sets the request state to END and cleans up any variables that
need it.
"""
self._request_stage = self.RequestState.END
self._is_batch = False
def IsRpc(self):
"""Check if the current request is an RPC request.
This should only be used after Dispatch, where this info is cached.
Returns:
True if the current request is an RPC. False if not.
"""
assert self._is_rpc is not None
return self._is_rpc
def DispatchNonApiRequests(self, request, outfile, base_env_dict):
"""Dispatch this request if this is a request to a reserved URL.
Args:
request: AppServerRequest.
outfile: The response file.
base_env_dict: Dictionary of CGI environment parameters if available.
Defaults to None.
Returns:
False if the request doesn't match one of the reserved URLs this
handles. True if it is handled.
"""
for path_regex, dispatch_function in self._dispatchers:
if path_regex.match(request.relative_url):
return dispatch_function(request, outfile, base_env_dict)
return False
def Dispatch(self,
request,
outfile,
base_env_dict=None):
"""Handles dispatch to apiserver handlers.
base_env_dict should contain at least:
REQUEST_METHOD, REMOTE_ADDR, SERVER_SOFTWARE, SERVER_NAME,
SERVER_PROTOCOL, SERVER_PORT
Args:
request: AppServerRequest.
outfile: The response file.
base_env_dict: Dictionary of CGI environment parameters if available.
Defaults to None.
Returns:
AppServerRequest internal redirect for normal API calls or
None for error conditions (e.g. method not found -> 404) and
other calls not requiring the GetApiConfigs redirect.
"""
if self._request_stage != self.RequestState.INIT:
return self.FailRequest('Dispatch in unexpected state', outfile)
if not base_env_dict:
return self.FailRequest('CGI Environment Not Available', outfile)
if self.DispatchNonApiRequests(request, outfile, base_env_dict):
return None
self.request = ApiRequest(base_env_dict, old_dev_appserver, request)
self._is_rpc = self.request._IsRpc()
self._request_stage = self.RequestState.GET_API_CONFIGS
return self.GetApiConfigs(base_env_dict, old_dev_appserver)
def HandleApiExplorerRequest(self, unused_request, outfile, base_env_dict):
"""Handler for requests to _ah/api/explorer.
Args:
unused_request: AppServerRequest.
outfile: The response file.
base_env_dict: Dictionary of CGI environment parameters
if available. Defaults to None.
Returns:
True
We will redirect these requests to the google apis explorer.
"""
base_url = 'http://%s:%s/_ah/api' % (base_env_dict['SERVER_NAME'],
base_env_dict['SERVER_PORT'])
redirect_url = self._API_EXPLORER_URL + base_url
SendCGIRedirectResponse(redirect_url, outfile)
return True
def HandleApiStaticRequest(self, request, outfile, unused_base_env_dict):
"""Handler for requests to _ah/api/static/.*.
Args:
request: AppServerRequest.
outfile: The response file.
unused_base_env_dict: Dictionary of CGI environment parameters
if available. Defaults to None.
Returns:
True
We will redirect these requests to an endpoint proxy.
"""
discovery_api_proxy = DiscoveryApiProxy()
response, body = discovery_api_proxy.GetStaticFile(request.relative_url)
if response.status == 200:
SendCGIResponse('200',
{'Content-Type': response.getheader('Content-Type')},
body, outfile)
else:
logging.error('Discovery API proxy failed on %s with %d. Details: %s',
request.relative_url, response.status, body)
SendCGIResponse(response.status, dict(response.getheaders()), body,
outfile)
return True
def EndRedirect(self, dispatched_output, outfile):
"""Handle the end of getApiConfigs and SPI complete notification.
This EndRedirect is called twice.
The first time is upon completion of the BackendService.getApiConfigs()
call. After this call, the set of all available methods and their
parameters / paths / config is contained in dispatched_output. This is
parsed and used to dispatch the request to the SPI backend itself.
In order to cause a second dispatch and EndRedirect, this EndRedirect
will return an AppServerRequest filled out with the SPI backend request.
The second time it is called is upon completion of the call to the SPI
backend. After this call, if the initial request (sent in Dispatch, prior
to getApiConfigs) is used to reformat the response as needed. This
currently only results in changes for JsonRPC requests, where the response
body is moved into {'result': response_body_goes_here} and the request id
is copied back into the response.
Args:
dispatched_output: resulting output from the SPI
outfile: final output file for this handler
Returns:
An AppServerRequest for redirect or None for an immediate response.
"""
if self._request_stage == self.RequestState.GET_API_CONFIGS:
if self.HandleGetApiConfigsResponse(dispatched_output, outfile):
return self.CallSpi(outfile)
elif self._request_stage == self.RequestState.SPI_CALL:
return self.HandleSpiResponse(dispatched_output, outfile)
else:
return self.FailRequest('EndRedirect in unexpected state', outfile)
def GetApiConfigs(self, cgi_env, old_dev_appserver):
"""Makes a call to BackendService.getApiConfigs and parses result.
Args:
cgi_env: CGI environment dictionary as passed in by the framework
old_dev_appserver:
old_dev_appserver instance used to generate AppServerRequest.
Returns:
AppServerRequest to be returned as an internal redirect to getApiConfigs
"""
request = ApiRequest(cgi_env, old_dev_appserver)
request.path = 'BackendService.getApiConfigs'
request.body = '{}'
return BuildCGIRequest(cgi_env, request, old_dev_appserver)
@staticmethod
def VerifyResponse(response, status_code, content_type=None):
"""Verifies that a response has the expected status and content type.
Args:
response: Response to be checked.
status_code: HTTP status code to be compared with response status.
content_type: acceptable Content-Type: header value, None allows any.
Returns:
True if both status_code and content_type match, else False.
"""
if response.status_code != status_code:
return False
if content_type is None:
return True
for header in response.headers:
if header.lower() == 'content-type':
return response.headers[header].lower() == content_type
else:
return False
@staticmethod
def ParseCgiResponse(response):
"""Parses a CGI response, returning a headers dict and body.
Args:
response: a CGI response
Returns:
tuple of ({header: header_value, ...}, body)
"""
header_dict = {}
for header in response.headers.headers:
header_name, header_value = header.split(':', 1)
header_dict[header_name.strip()] = header_value.strip()
if response.body:
body = response.body.read()
else:
body = ''
return header_dict, body
def HandleGetApiConfigsResponse(self, dispatched_output, outfile):
"""Parses the result of getApiConfigs, returning True on success.
Args:
dispatched_output: Output from the getApiConfigs call handler.
outfile: CGI output handle, used for error conditions.
Returns:
True on success, False on failure
"""
response = old_dev_appserver.RewriteResponse(dispatched_output)
if self.VerifyResponse(response, 200, 'application/json'):
self.config_manager.ParseApiConfigResponse(response.body.read())
return True
else:
self.FailRequest('BackendService.getApiConfigs Error', outfile)
return False
def CallSpi(self, outfile):
"""Generate SPI call (from earlier-saved request).
Side effects:
self.request is modified from Rest/JsonRPC format to apiserving format.
Args:
outfile: File to write out CGI-style response in case of error.
Returns:
AppServerRequest for redirect or None to send immediate CGI response.
"""
if self.IsRpc():
method_config = self.LookupRpcMethod()
params = None
else:
method_config, params = self.LookupRestMethod()
if method_config:
try:
self.TransformRequest(params, method_config)
discovery_service = DiscoveryService(self.config_manager,
self.request, outfile)
if not discovery_service.HandleDiscoveryRequest(self.request.path):
self._request_stage = self.RequestState.SPI_CALL
return BuildCGIRequest(self.request.cgi_env, self.request,
old_dev_appserver)
except RequestRejectionError, rejection_error:
self._EndRequest()
return SendCGIRejectedResponse(rejection_error, outfile)
else:
self._EndRequest()
cors_handler = ApiserverDispatcher.__CheckCorsHeaders(self.request)
return SendCGINotFoundResponse(outfile, cors_handler=cors_handler)
class __CheckCorsHeaders(object):
"""Track information about CORS headers and our response to them."""
def __init__(self, request):
self.allow_cors_request = False
self.origin = None
self.cors_request_method = None
self.cors_request_headers = None
self.__CheckCorsRequest(request)
def __CheckCorsRequest(self, request):
"""Check for a CORS request, and see if it gets a CORS response."""
for orig_header, orig_value in request.headers.iteritems():
if orig_header.lower() == _CORS_HEADER_ORIGIN:
self.origin = orig_value
if orig_header.lower() == _CORS_HEADER_REQUEST_METHOD:
self.cors_request_method = orig_value
if orig_header.lower() == _CORS_HEADER_REQUEST_HEADERS:
self.cors_request_headers = orig_value
if (self.origin and
((self.cors_request_method is None) or
(self.cors_request_method.upper() in _CORS_ALLOWED_METHODS))):
self.allow_cors_request = True
def UpdateHeaders(self, headers):
"""Add CORS headers to the response, if needed."""
if not self.allow_cors_request:
return
headers[_CORS_HEADER_ALLOW_ORIGIN] = self.origin
headers[_CORS_HEADER_ALLOW_METHODS] = ','.join(
tuple(_CORS_ALLOWED_METHODS))
if self.cors_request_headers is not None:
headers[_CORS_HEADER_ALLOW_HEADERS] = self.cors_request_headers
def HandleSpiResponse(self, dispatched_output, outfile):
"""Handle SPI response, transforming output as needed.
Args:
dispatched_output: Response returned by SPI backend.
outfile: File-like object to write transformed result.
Returns:
None
"""
response = old_dev_appserver.AppServerResponse(
response_file=dispatched_output)
response_headers, body = self.ParseCgiResponse(response)
headers = {}
for header, value in response_headers.items():
if (header.lower() == 'content-type' and
not value.lower().startswith('application/json')):
return self.FailRequest('Non-JSON reply: %s' % body, outfile)
elif header.lower() not in ('content-length', 'content-type'):
headers[header] = value
if self.IsRpc():
body = self.TransformJsonrpcResponse(body)
self._EndRequest()
cors_handler = ApiserverDispatcher.__CheckCorsHeaders(self.request)
return SendCGIResponse(response.status_code, headers, body, outfile,
cors_handler=cors_handler)
def FailRequest(self, message, outfile):
"""Write an immediate failure response to outfile, no redirect.
Args:
message: Error message to be displayed to user (plain text).
outfile: File-like object to write CGI response to.
Returns:
None
"""
self._EndRequest()
if self.request:
cors_handler = ApiserverDispatcher.__CheckCorsHeaders(self.request)
else:
cors_handler = None
return SendCGIErrorResponse(message, outfile, cors_handler=cors_handler)
def LookupRestMethod(self):
"""Looks up and returns rest method for the currently-pending request.
This method uses self.request as the currently-pending request.
Returns:
tuple of (method, parameters)
"""
method_name, method, params = self.config_manager.LookupRestMethod(
self.request.path, self.request.http_method)
self.request.method_name = method_name
return method, params
def LookupRpcMethod(self):
"""Looks up and returns RPC method for the currently-pending request.
This method uses self.request as the currently-pending request.
Returns:
RPC method that was found for the current request.
"""
if not self.request.body_obj:
return None
try:
method_name = self.request.body_obj.get('method', '')
except AttributeError:
if len(self.request.body_obj) != 1:
raise NotImplementedError('Batch requests with more than 1 element '
'not supported in dev_appserver. Found '
'%d elements.' % len(self.request.body_obj))
logging.info('Converting batch request to single request.')
self.request.body_obj = self.request.body_obj[0]
method_name = self.request.body_obj.get('method', '')
self._is_batch = True
version = self.request.body_obj.get('apiVersion', '')
self.request.method_name = method_name
return self.config_manager.LookupRpcMethod(method_name, version)
def TransformRequest(self, params, method_config):
"""Transforms self.request to apiserving request.
This method uses self.request to determine the currently-pending request.
This method accepts a rest-style or RPC-style request.
Side effects:
Updates self.request to apiserving format. (e.g. updating path to be the
method name, and moving request parameters to the body.)
Args:
params: Path parameters dictionary for rest request
method_config: API config of the method to be called
"""
if self.IsRpc():
self.TransformJsonrpcRequest()
else:
method_params = method_config.get('request', {}).get('parameters', {})
self.TransformRestRequest(params, method_params)
self.request.path = method_config.get('rosyMethod', '')
def _CheckEnum(self, parameter_name, value, field_parameter):
"""Checks if the parameter value is valid if an enum.
If the parameter is not an enum, does nothing. If it is, verifies that
its value is valid.
Args:
parameter_name: String; The name of the parameter, which is either just
a variable name or the name with the index appended. For example 'var'
or 'var[2]'.
value: String or list of Strings; The value(s) to be used as enum(s) for
the parameter.
field_parameter: The dictionary containing information specific to the
field in question. This is retrieved from request.parameters in the
method config.
Raises:
EnumRejectionError: If the given value is not among the accepted
enum values in the field parameter.
"""
if 'enum' not in field_parameter:
return
enum_values = [enum['backendValue']
for enum in field_parameter['enum'].values()
if 'backendValue' in enum]
if value not in enum_values:
raise EnumRejectionError(parameter_name, value, enum_values)
def _CheckParameter(self, parameter_name, value, field_parameter):
"""Checks if the parameter value is valid against all parameter rules.
First checks if the value is a list and recursively calls _CheckParameter
on the values in the list. Otherwise, checks all parameter rules for the
the current value.
In the list case, '[index-of-value]' is appended to the parameter name for
error reporting purposes.
Currently only checks if value adheres to enum rule, but more can be
added.
Args:
parameter_name: String; The name of the parameter, which is either just
a variable name or the name with the index appended in the recursive
case. For example 'var' or 'var[2]'.
value: String or List of values; The value(s) to be used for the
parameter.
field_parameter: The dictionary containing information specific to the
field in question. This is retrieved from request.parameters in the
method config.
"""
if isinstance(value, list):
for index, element in enumerate(value):
parameter_name_index = '%s[%d]' % (parameter_name, index)
self._CheckParameter(parameter_name_index, element, field_parameter)
return
self._CheckEnum(parameter_name, value, field_parameter)
def _AddMessageField(self, field_name, value, params):
"""Converts a . delimitied field name to a message field in parameters.
For example:
{'a.b.c': ['foo']}
becomes:
{'a': {'b': {'c': ['foo']}}}
Args:
field_name: String; the . delimitied name to be converted into a
dictionary.
value: The value to be set.
params: The dictionary holding all the parameters, where the value is
eventually set.
"""
if '.' not in field_name:
params[field_name] = value
return
root, remaining = field_name.split('.', 1)
sub_params = params.setdefault(root, {})
self._AddMessageField(remaining, value, sub_params)
def _UpdateFromBody(self, destination, source):
"""Updates the dictionary for an API payload with the request body.
The values from the body should override those already in the payload, but
for nested fields (message objects), the values can be combined
recursively.
Args:
destination: A dictionary containing an API payload parsed from the
path and query parameters in a request.
source: The object parsed from the body of the request.
"""
for key, value in source.iteritems():
destination_value = destination.get(key)
if isinstance(value, dict) and isinstance(destination_value, dict):
self._UpdateFromBody(destination_value, value)
else:
destination[key] = value
def TransformRestRequest(self, params, method_parameters):
"""Translates a Rest request/response into an apiserving request/response.
The request can receive values from the path, query and body and combine
them before sending them along to the SPI server. In cases of collision,
objects from the body take precedence over those from the query, which in
turn take precedence over those from the path.
In the case that a repeated value occurs in both the query and the path,
those values can be combined, but if that value also occurred in the body,
it would override any other values.
In the case of nested values from message fields, non-colliding values
from subfields can be combined. For example, if '?a.c=10' occurs in the
query string and "{'a': {'b': 11}}" occurs in the body, then they will be
combined as
{
'a': {
'b': 11,
'c': 10,
}
}
before being sent to the SPI server.
Side effects:
Updates self.request to apiserving format. (e.g. updating path to be the
method name, and moving request parameters to the body.)
Args:
params: URL path parameter dict extracted by config_manager lookup.
method_parameters: Dictionary; The parameters for the request from the
API config of the method.
"""
body_obj = {}
for key, value in params.iteritems():
body_obj[key] = [value]
if self.request.parameters:
for key, value in self.request.parameters.iteritems():
if key in body_obj:
body_obj[key] = value + body_obj[key]
else:
body_obj[key] = value
for key, value in body_obj.items():
current_parameter = method_parameters.get(key, {})
repeated = current_parameter.get('repeated', False)
if not repeated:
body_obj[key] = body_obj[key][0]
self._CheckParameter(key, body_obj[key], current_parameter)
message_value = body_obj.pop(key)
self._AddMessageField(key, message_value, body_obj)
if self.request.body_obj:
self._UpdateFromBody(body_obj, self.request.body_obj)
self.request.body_obj = body_obj
self.request.body = json.dumps(body_obj)
def TransformJsonrpcRequest(self):
"""Translates a JsonRpc request/response into apiserving request/response.
Side effects:
Updates self.request to apiserving format. (e.g. updating path to be the
method name, and moving request parameters to the body.)
"""
body_obj = json.loads(self.request.body) if self.request.body else {}
try:
self.request.request_id = body_obj.get('id')
except AttributeError:
assert self._is_batch
if len(body_obj) != 1:
raise NotImplementedError('Batch requests with more than 1 element '
'not supported in dev_appserver. Found '
'%d elements.' % len(self.request.body_obj))
body_obj = body_obj[0]
self.request.request_id = body_obj.get('id')
body_obj = body_obj.get('params', {})
self.request.body = json.dumps(body_obj)
def TransformJsonrpcResponse(self, response_body):
"""Translates a apiserving response to a JsonRpc response.
Side effects:
Updates self.request to JsonRpc format. (e.g. restoring request id
and moving body object into {'result': body_obj}
Args:
response_body: Backend response to transform back to JsonRPC
Returns:
Updated, JsonRPC-formatted request body
"""
body_obj = {'result': json.loads(response_body)}
if self.request.request_id is not None:
body_obj['id'] = self.request.request_id
if self._is_batch:
body_obj = [body_obj]
return json.dumps(body_obj)
return ApiserverDispatcher(config_manager)
def BuildCGIRequest(base_env_dict, request, old_dev_appserver):
"""Build a CGI request to Call a method on an SPI backend.
Args:
base_env_dict: CGI environment dict
request: ApiRequest to be converted to a CGI request
old_dev_appserver: Handle to old_dev_appserver to generate CGI request.
Returns:
old_dev_appserver.AppServerRequest internal redirect object
"""
if request.headers is None:
request.headers = {}
request.headers['Content-Type'] = 'application/json'
url = SPI_ROOT_FORMAT % (request.port, request.path)
base_env_dict['REQUEST_METHOD'] = 'POST'
header_outfile = cStringIO.StringIO()
body_outfile = cStringIO.StringIO()
WriteHeaders(request.headers, header_outfile, len(request.body))
body_outfile.write(request.body)
header_outfile.seek(0)
body_outfile.seek(0)
return old_dev_appserver.AppServerRequest(
url, None, mimetools.Message(header_outfile), body_outfile)
def WriteHeaders(headers, outfile, content_len=None):
"""Write headers to the output file, updating content length if needed.
Args:
headers: Header dict to be written
outfile: File-like object to send headers to
content_len: Optional updated content length to update content-length with
"""
wrote_content_length = False
for header, value in headers.iteritems():
if header.lower() == 'content-length' and content_len is not None:
value = content_len
wrote_content_length = True
outfile.write('%s: %s\r\n' % (header, value))
if not wrote_content_length and content_len:
outfile.write('Content-Length: %s\r\n' % content_len)
def SendCGINotFoundResponse(outfile, cors_handler=None):
SendCGIResponse('404', {'Content-Type': 'text/plain'}, 'Not Found', outfile,
cors_handler=cors_handler)
def SendCGIErrorResponse(message, outfile, cors_handler=None):
body = json.dumps({'error': {'message': message}})
SendCGIResponse('500', {'Content-Type': 'application/json'}, body, outfile,
cors_handler=cors_handler)
def SendCGIRejectedResponse(rejection_error, outfile, cors_handler=None):
body = rejection_error.ToJson()
SendCGIResponse('400', {'Content-Type': 'application/json'}, body, outfile,
cors_handler=cors_handler)
def SendCGIRedirectResponse(redirect_location, outfile, cors_handler=None):
SendCGIResponse('302', {'Location': redirect_location}, None, outfile,
cors_handler=cors_handler)
def SendCGIResponse(status, headers, content, outfile, cors_handler=None):
"""Dump reformatted response to CGI outfile.
Args:
status: HTTP status code to send
headers: Headers dictionary {header_name: header_value, ...}
content: Body content to write
outfile: File-like object where response will be written.
cors_handler: A handler to process CORS request headers and update the
headers in the response. Or this can be None, to bypass CORS checks.
Returns:
None
"""
if cors_handler:
cors_handler.UpdateHeaders(headers)
outfile.write('Status: %s\r\n' % status)
WriteHeaders(headers, outfile, len(content) if content else None)
outfile.write('\r\n')
if content:
outfile.write(content)
outfile.seek(0)
|
levibostian/myBlanky
|
googleAppEngine/google/appengine/tools/dev_appserver_apiserver.py
|
Python
|
mit
| 53,339 | 0.00718 |
"""
XML tag writer utilities.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
import cStringIO
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Nophead <http://hydraraptor.blogspot.com/>\nArt of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def addBeginEndInnerXMLTag(attributes, depth, innerText, localName, output, text=''):
'Add the begin and end xml tag and the inner text if any.'
if len( innerText ) > 0:
addBeginXMLTag(attributes, depth, localName, output, text)
output.write( innerText )
addEndXMLTag(depth, localName, output)
else:
addClosedXMLTag(attributes, depth, localName, output, text)
def addBeginXMLTag(attributes, depth, localName, output, text=''):
'Add the begin xml tag.'
depthStart = '\t' * depth
output.write('%s<%s%s>%s\n' % (depthStart, localName, getAttributesString(attributes), text))
def addClosedXMLTag(attributes, depth, localName, output, text=''):
'Add the closed xml tag.'
depthStart = '\t' * depth
attributesString = getAttributesString(attributes)
if len(text) > 0:
output.write('%s<%s%s >%s</%s>\n' % (depthStart, localName, attributesString, text, localName))
else:
output.write('%s<%s%s />\n' % (depthStart, localName, attributesString))
def addEndXMLTag(depth, localName, output):
'Add the end xml tag.'
depthStart = '\t' * depth
output.write('%s</%s>\n' % (depthStart, localName))
def addXMLFromLoopComplexZ(attributes, depth, loop, output, z):
'Add xml from loop.'
addBeginXMLTag(attributes, depth, 'path', output)
for pointComplexIndex in xrange(len(loop)):
pointComplex = loop[pointComplexIndex]
addXMLFromXYZ(depth + 1, pointComplexIndex, output, pointComplex.real, pointComplex.imag, z)
addEndXMLTag(depth, 'path', output)
def addXMLFromObjects(depth, objects, output):
'Add xml from objects.'
for object in objects:
object.addXML(depth, output)
def addXMLFromVertexes(depth, output, vertexes):
'Add xml from loop.'
for vertexIndex in xrange(len(vertexes)):
vertex = vertexes[vertexIndex]
addXMLFromXYZ(depth + 1, vertexIndex, output, vertex.x, vertex.y, vertex.z)
def addXMLFromXYZ(depth, index, output, x, y, z):
'Add xml from x, y & z.'
attributes = {'index' : str(index)}
if x != 0.0:
attributes['x'] = str(x)
if y != 0.0:
attributes['y'] = str(y)
if z != 0.0:
attributes['z'] = str(z)
addClosedXMLTag(attributes, depth, 'vertex', output)
def compareAttributeKeyAscending(key, otherKey):
'Get comparison in order to sort attribute keys in ascending order, with the id key first and name second.'
if key == 'id':
return - 1
if otherKey == 'id':
return 1
if key == 'name':
return - 1
if otherKey == 'name':
return 1
if key < otherKey:
return - 1
return int(key > otherKey)
def getAttributesString(attributes):
'Add the closed xml tag.'
attributesString = ''
attributesKeys = attributes.keys()
attributesKeys.sort(compareAttributeKeyAscending)
for attributesKey in attributesKeys:
valueString = str(attributes[attributesKey])
if "'" in valueString:
attributesString += ' %s="%s"' % (attributesKey, valueString)
else:
attributesString += " %s='%s'" % (attributesKey, valueString)
return attributesString
def getBeginGeometryXMLOutput(elementNode=None):
'Get the beginning of the string representation of this boolean geometry object info.'
output = getBeginXMLOutput()
attributes = {}
if elementNode != None:
documentElement = elementNode.getDocumentElement()
attributes = documentElement.attributes
addBeginXMLTag(attributes, 0, 'fabmetheus', output)
return output
def getBeginXMLOutput():
'Get the beginning of the string representation of this object info.'
output = cStringIO.StringIO()
output.write("<?xml version='1.0' ?>\n")
return output
def getDictionaryWithoutList(dictionary, withoutList):
'Get the dictionary without the keys in the list.'
dictionaryWithoutList = {}
for key in dictionary:
if key not in withoutList:
dictionaryWithoutList[key] = dictionary[key]
return dictionaryWithoutList
def getEndGeometryXMLString(output):
'Get the string representation of this object info.'
addEndXMLTag(0, 'fabmetheus', output)
return output.getvalue()
|
dob71/x2swn
|
skeinforge/fabmetheus_utilities/xml_simple_writer.py
|
Python
|
gpl-3.0
| 4,447 | 0.02586 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for volume init host method cases."""
import mock
from oslo_config import cfg
from cinder import context
from cinder import objects
from cinder.tests.unit import utils as tests_utils
from cinder.tests.unit import volume as base
from cinder.volume import driver
from cinder.volume import utils as volutils
CONF = cfg.CONF
class VolumeInitHostTestCase(base.BaseVolumeTestCase):
def setUp(self):
super(VolumeInitHostTestCase, self).setUp()
self.service_id = 1
@mock.patch('cinder.manager.CleanableManager.init_host')
def test_init_host_count_allocated_capacity(self, init_host_mock):
vol0 = tests_utils.create_volume(
self.context, size=100, host=CONF.host)
vol1 = tests_utils.create_volume(
self.context, size=128,
host=volutils.append_host(CONF.host, 'pool0'))
vol2 = tests_utils.create_volume(
self.context, size=256,
host=volutils.append_host(CONF.host, 'pool0'))
vol3 = tests_utils.create_volume(
self.context, size=512,
host=volutils.append_host(CONF.host, 'pool1'))
vol4 = tests_utils.create_volume(
self.context, size=1024,
host=volutils.append_host(CONF.host, 'pool2'))
self.volume.init_host(service_id=self.service_id)
init_host_mock.assert_called_once_with(
service_id=self.service_id, added_to_cluster=None)
stats = self.volume.stats
self.assertEqual(2020, stats['allocated_capacity_gb'])
self.assertEqual(
384, stats['pools']['pool0']['allocated_capacity_gb'])
self.assertEqual(
512, stats['pools']['pool1']['allocated_capacity_gb'])
self.assertEqual(
1024, stats['pools']['pool2']['allocated_capacity_gb'])
# NOTE(jdg): On the create we have host='xyz', BUT
# here we do a db.volume_get, and now the host has
# been updated to xyz#pool-name. Note this is
# done via the managers init, which calls the drivers
# get_pool method, which in the legacy case is going
# to be volume_backend_name or None
vol0.refresh()
expected_host = volutils.append_host(CONF.host, 'fake')
self.assertEqual(expected_host, vol0.host)
self.volume.delete_volume(self.context, vol0)
self.volume.delete_volume(self.context, vol1)
self.volume.delete_volume(self.context, vol2)
self.volume.delete_volume(self.context, vol3)
self.volume.delete_volume(self.context, vol4)
@mock.patch('cinder.manager.CleanableManager.init_host')
def test_init_host_count_allocated_capacity_cluster(self, init_host_mock):
cluster_name = 'mycluster'
self.volume.cluster = cluster_name
# All these volumes belong to the same cluster, so we will calculate
# the capacity of them all because we query the DB by cluster_name.
tests_utils.create_volume(self.context, size=100, host=CONF.host,
cluster_name=cluster_name)
tests_utils.create_volume(
self.context, size=128, cluster_name=cluster_name,
host=volutils.append_host(CONF.host, 'pool0'))
tests_utils.create_volume(
self.context, size=256, cluster_name=cluster_name,
host=volutils.append_host(CONF.host + '2', 'pool0'))
tests_utils.create_volume(
self.context, size=512, cluster_name=cluster_name,
host=volutils.append_host(CONF.host + '2', 'pool1'))
tests_utils.create_volume(
self.context, size=1024, cluster_name=cluster_name,
host=volutils.append_host(CONF.host + '3', 'pool2'))
# These don't belong to the cluster so they will be ignored
tests_utils.create_volume(
self.context, size=1024,
host=volutils.append_host(CONF.host, 'pool2'))
tests_utils.create_volume(
self.context, size=1024, cluster_name=cluster_name + '1',
host=volutils.append_host(CONF.host + '3', 'pool2'))
self.volume.init_host(service_id=self.service_id)
init_host_mock.assert_called_once_with(
service_id=self.service_id, added_to_cluster=None)
stats = self.volume.stats
self.assertEqual(2020, stats['allocated_capacity_gb'])
self.assertEqual(
384, stats['pools']['pool0']['allocated_capacity_gb'])
self.assertEqual(
512, stats['pools']['pool1']['allocated_capacity_gb'])
self.assertEqual(
1024, stats['pools']['pool2']['allocated_capacity_gb'])
@mock.patch.object(driver.BaseVD, "update_provider_info")
def test_init_host_sync_provider_info(self, mock_update):
vol0 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
vol1 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
vol2 = tests_utils.create_volume(
self.context, size=1, host=CONF.host, status='creating')
snap0 = tests_utils.create_snapshot(self.context, vol0.id)
snap1 = tests_utils.create_snapshot(self.context, vol1.id)
# Return values for update_provider_info
volumes = [{'id': vol0.id, 'provider_id': '1 2 xxxx'},
{'id': vol1.id, 'provider_id': '3 4 yyyy'}]
snapshots = [{'id': snap0.id, 'provider_id': '5 6 xxxx'},
{'id': snap1.id, 'provider_id': '7 8 yyyy'}]
mock_update.return_value = (volumes, snapshots)
# initialize
self.volume.init_host(service_id=self.service_id)
# Grab volume and snapshot objects
vol0_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol0.id)
vol1_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol1.id)
vol2_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol2.id)
snap0_obj = objects.Snapshot.get_by_id(self.context, snap0.id)
snap1_obj = objects.Snapshot.get_by_id(self.context, snap1.id)
# Check updated provider ids
self.assertEqual('1 2 xxxx', vol0_obj.provider_id)
self.assertEqual('3 4 yyyy', vol1_obj.provider_id)
self.assertIsNone(vol2_obj.provider_id)
self.assertEqual('5 6 xxxx', snap0_obj.provider_id)
self.assertEqual('7 8 yyyy', snap1_obj.provider_id)
# Clean up
self.volume.delete_snapshot(self.context, snap0_obj)
self.volume.delete_snapshot(self.context, snap1_obj)
self.volume.delete_volume(self.context, vol0)
self.volume.delete_volume(self.context, vol1)
@mock.patch.object(driver.BaseVD, "update_provider_info")
def test_init_host_sync_provider_info_no_update(self, mock_update):
vol0 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
vol1 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
snap0 = tests_utils.create_snapshot(self.context, vol0.id)
snap1 = tests_utils.create_snapshot(self.context, vol1.id)
mock_update.return_value = ([], [])
# initialize
self.volume.init_host(service_id=self.service_id)
# Grab volume and snapshot objects
vol0_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol0.id)
vol1_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol1.id)
snap0_obj = objects.Snapshot.get_by_id(self.context, snap0.id)
snap1_obj = objects.Snapshot.get_by_id(self.context, snap1.id)
# Check provider ids are not changed
self.assertIsNone(vol0_obj.provider_id)
self.assertIsNone(vol1_obj.provider_id)
self.assertIsNone(snap0_obj.provider_id)
self.assertIsNone(snap1_obj.provider_id)
# Clean up
self.volume.delete_snapshot(self.context, snap0_obj)
self.volume.delete_snapshot(self.context, snap1_obj)
self.volume.delete_volume(self.context, vol0)
self.volume.delete_volume(self.context, vol1)
@mock.patch.object(driver.BaseVD, "update_provider_info")
def test_init_host_sync_provider_info_no_update_cluster(self, mock_update):
cluster_name = 'mycluster'
self.volume.cluster = cluster_name
vol0 = tests_utils.create_volume(
self.context, size=1, host=CONF.host, cluster_name=cluster_name)
vol1 = tests_utils.create_volume(
self.context, size=1, host=CONF.host + '2',
cluster_name=cluster_name)
vol2 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
vol3 = tests_utils.create_volume(
self.context, size=1, host=CONF.host,
cluster_name=cluster_name + '2')
snap0 = tests_utils.create_snapshot(self.context, vol0.id)
snap1 = tests_utils.create_snapshot(self.context, vol1.id)
tests_utils.create_snapshot(self.context, vol2.id)
tests_utils.create_snapshot(self.context, vol3.id)
mock_update.return_value = ([], [])
# initialize
self.volume.init_host(service_id=self.service_id)
# Grab volume and snapshot objects
vol0_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol0.id)
vol1_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol1.id)
snap0_obj = objects.Snapshot.get_by_id(self.context, snap0.id)
snap1_obj = objects.Snapshot.get_by_id(self.context, snap1.id)
self.assertSetEqual({vol0.id, vol1.id},
{vol.id for vol in mock_update.call_args[0][0]})
self.assertSetEqual({snap0.id, snap1.id},
{snap.id for snap in mock_update.call_args[0][1]})
# Check provider ids are not changed
self.assertIsNone(vol0_obj.provider_id)
self.assertIsNone(vol1_obj.provider_id)
self.assertIsNone(snap0_obj.provider_id)
self.assertIsNone(snap1_obj.provider_id)
# Clean up
self.volume.delete_snapshot(self.context, snap0_obj)
self.volume.delete_snapshot(self.context, snap1_obj)
self.volume.delete_volume(self.context, vol0)
self.volume.delete_volume(self.context, vol1)
@mock.patch('cinder.volume.manager.VolumeManager.'
'_include_resources_in_cluster')
def test_init_host_cluster_not_changed(self, include_in_cluster_mock):
self.volume.init_host(added_to_cluster=False,
service_id=self.service_id)
include_in_cluster_mock.assert_not_called()
@mock.patch('cinder.objects.group.GroupList.include_in_cluster')
@mock.patch('cinder.objects.snapshot.SnapshotList.get_all',
return_value=[])
@mock.patch('cinder.objects.volume.VolumeList.get_all', return_value=[])
@mock.patch('cinder.objects.volume.VolumeList.include_in_cluster')
@mock.patch('cinder.objects.consistencygroup.ConsistencyGroupList.'
'include_in_cluster')
@mock.patch('cinder.db.image_volume_cache_include_in_cluster')
def test_init_host_added_to_cluster(self, image_cache_include_mock,
cg_include_mock,
vol_include_mock, vol_get_all_mock,
snap_get_all_mock, group_include_mock):
cluster = str(mock.sentinel.cluster)
self.mock_object(self.volume, 'cluster', cluster)
self.volume.init_host(added_to_cluster=True,
service_id=self.service_id)
vol_include_mock.assert_called_once_with(mock.ANY, cluster,
host=self.volume.host)
cg_include_mock.assert_called_once_with(mock.ANY, cluster,
host=self.volume.host)
image_cache_include_mock.assert_called_once_with(mock.ANY, cluster,
host=self.volume.host)
group_include_mock.assert_called_once_with(mock.ANY, cluster,
host=self.volume.host)
vol_get_all_mock.assert_called_once_with(
mock.ANY, filters={'cluster_name': cluster})
snap_get_all_mock.assert_called_once_with(
mock.ANY, filters={'cluster_name': cluster})
@mock.patch('cinder.keymgr.migration.migrate_fixed_key')
@mock.patch('cinder.volume.manager.VolumeManager._get_my_volumes')
@mock.patch('cinder.manager.ThreadPoolManager._add_to_threadpool')
def test_init_host_key_migration(self,
mock_add_threadpool,
mock_get_my_volumes,
mock_migrate_fixed_key):
self.volume.init_host(service_id=self.service_id)
mock_add_threadpool.assert_called_once_with(
mock_migrate_fixed_key,
volumes=mock_get_my_volumes())
|
phenoxim/cinder
|
cinder/tests/unit/volume/test_init_host.py
|
Python
|
apache-2.0
| 14,005 | 0 |
###############################################################################
# Name: style_editor.py #
# Purpose: Syntax Highlighting configuration dialog #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2008 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""
Provides an editor dialog for graphically editing how the text is presented in
the editor when syntax highlighting is turned on. It does this by taking the
data from the controls and formating it into an Editra Style Sheet that the
editor can load to configure the styles of the text.
@summary: Gui for creating custom Editra Style Sheets
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: style_editor.py 63520 2010-02-19 03:27:26Z CJP $"
__revision__ = "$Revision: 63520 $"
#--------------------------------------------------------------------------#
# Imports
import os
import glob
import wx
# Editra Imports
import ed_glob
from profiler import Profile_Get, Profile_Set
import ed_basestc
from ed_style import StyleItem
import util
import syntax.syntax as syntax
import eclib
# Function Aliases
#_ = wx.GetTranslation
from gettext import gettext as _
# Global Values
ID_STYLES = wx.NewId()
ID_FORE_COLOR = wx.NewId()
ID_BACK_COLOR = wx.NewId()
ID_BOLD = wx.NewId()
ID_ITALIC = wx.NewId()
ID_EOL = wx.NewId()
ID_ULINE = wx.NewId()
ID_FONT = wx.NewId()
ID_FONT_SIZE = wx.NewId()
SETTINGS_IDS = [ ID_FORE_COLOR, ID_BACK_COLOR, ID_BOLD, ID_ITALIC,
ID_EOL, ID_ULINE, ID_FONT, ID_FONT_SIZE ]
#--------------------------------------------------------------------------#
class StyleEditor(wx.Dialog):
"""This class creates the window that contains the controls
for editing/configuring the syntax highlighting styles it acts
as a graphical way to interact with the L{ed_style.StyleMgr}.
@see: ed_style.StyleMgr
"""
def __init__(self, parent, id_=wx.ID_ANY, title=_("Style Editor"),
style=wx.DEFAULT_DIALOG_STYLE | wx.RAISED_BORDER):
"""Initializes the Dialog
@todo: rework the layout
"""
wx.Dialog.__init__(self, parent, id_, title, style=style)
# Attributes
self.LOG = wx.GetApp().GetLog()
self.preview = ed_basestc.EditraBaseStc(self, wx.ID_ANY, size=(-1, 200),
style=wx.SUNKEN_BORDER)
self.styles_orig = self.preview.GetStyleSet()
self.preview.SetCaretLineVisible(True)
self.styles_new = DuplicateStyleDict(self.styles_orig)
self.preview.SetStyles('preview', self.styles_new, True)
self.OpenPreviewFile('cpp')
# XXX On Windows the settings pane must be made before the
# sizer it is to be put in or it becomes unable to recieve
# focus. But is the exact opposite on mac/gtk. This is really
# a pain or possibly a bug?
if wx.Platform == '__WXMSW__':
self._settings = SettingsPanel(self)
# Main Sizer
sizer = wx.BoxSizer(wx.VERTICAL)
# Control Panel
self.ctrl_pane = wx.Panel(self, wx.ID_ANY)
ctrl_sizer = wx.BoxSizer(wx.HORIZONTAL) # Main Control Sizer
left_colum = wx.BoxSizer(wx.VERTICAL) # Left Column
right_colum = wx.BoxSizer(wx.VERTICAL) # Right Column
# XXX On Mac/GTK if panel is created before sizer all controls in
# it become unable to recieve focus from clicks, but it is the
# exact opposite on windows!
if wx.Platform != '__WXMSW__':
self._settings = SettingsPanel(self)
# Control Panel Left Column
left_colum.AddMany([((10, 10), 0),
(self.__StyleSheets(), 0, wx.ALIGN_LEFT),
((10, 10), 0),
(self.__LexerChoice(), 0, wx.ALIGN_LEFT),
((10, 10), 0),
(self.__StyleTags(), 1, wx.ALIGN_LEFT|wx.EXPAND),
((10, 10), 0)])
ctrl_sizer.Add(left_colum, 0, wx.ALIGN_LEFT)
# Divider
ctrl_sizer.Add(wx.StaticLine(self.ctrl_pane, size=(-1, 2),
style=wx.LI_VERTICAL),
0, wx.ALIGN_CENTER_HORIZONTAL | wx.EXPAND)
ctrl_sizer.Add((5, 5), 0)
# Control Panel Right Column
right_colum.Add(self._settings, 1, wx.ALIGN_LEFT | wx.EXPAND)
ctrl_sizer.AddMany([(right_colum, 1, wx.ALIGN_RIGHT | wx.EXPAND),
((5, 5), 0)])
# Finish Control Panel Setup
self.ctrl_pane.SetSizer(ctrl_sizer)
sizer.AddMany([((10, 10)), (self.ctrl_pane, 0, wx.ALIGN_CENTER)])
# Preview Area
pre_sizer = wx.BoxSizer(wx.HORIZONTAL)
pre_sizer.AddMany([((10, 10), 0),
(wx.StaticText(self, label=_("Preview") + u": "),
0, wx.ALIGN_LEFT)])
sizer.AddMany([((10, 10), 0), (pre_sizer, 0, wx.ALIGN_LEFT),
(self.preview, 0, wx.EXPAND | wx.BOTTOM)])
# Create Buttons
b_sizer = wx.BoxSizer(wx.HORIZONTAL)
ok_b = wx.Button(self, wx.ID_OK, _("OK"))
ok_b.SetDefault()
b_sizer.AddMany([(wx.Button(self, wx.ID_CANCEL, _("Cancel")), 0),
((5, 5), 0),
(wx.Button(self, wx.ID_SAVE, _("Export")), 0),
((5, 5), 0), (ok_b, 0)])
sizer.Add(b_sizer, 0, wx.ALIGN_RIGHT |
wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
# Finish the Layout
self.SetSizer(sizer)
self.SetAutoLayout(True)
sizer.Fit(self)
self.EnableSettings(False)
# Event Handlers
self.Bind(wx.EVT_BUTTON, self.OnCancel, id=wx.ID_CANCEL)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=wx.ID_OK)
self.Bind(wx.EVT_BUTTON, self.OnExport, id=wx.ID_SAVE)
self.Bind(wx.EVT_CHOICE, self.OnChoice)
self.Bind(wx.EVT_CHECKBOX, self.OnCheck)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.Bind(wx.EVT_LISTBOX, self.OnListBox)
self.Bind(eclib.EVT_COLORSETTER, self.OnColor)
self.preview.Bind(wx.EVT_LEFT_UP, self.OnTextRegion)
self.preview.Bind(wx.EVT_KEY_UP, self.OnTextRegion)
#--- End Init ---#
def __LexerChoice(self):
"""Returns a sizer object containing a choice control with all
available lexers listed in it.
@return: sizer item containing a choice control with all available
syntax test files available
"""
lex_sizer = wx.BoxSizer(wx.HORIZONTAL)
lexer_lbl = wx.StaticText(self.ctrl_pane, wx.ID_ANY,
_("Syntax Files") + u": ")
lexer_lst = wx.Choice(self.ctrl_pane, ed_glob.ID_LEXER,
choices=syntax.GetLexerList())
lexer_lst.SetToolTip(wx.ToolTip(_("Set the preview file type")))
lexer_lst.SetStringSelection(u"CPP")
lex_sizer.AddMany([((10, 10)), (lexer_lbl, 0, wx.ALIGN_CENTER_VERTICAL),
((5, 0)), (lexer_lst, 1, wx.ALIGN_CENTER_VERTICAL),
((10, 10))])
return lex_sizer
def __StyleSheets(self):
"""Returns a sizer item that contains a choice control with
all the available style sheets listed in it.
@return: sizer item holding all installed style sheets
"""
ss_sizer = wx.BoxSizer(wx.HORIZONTAL)
ss_lbl = wx.StaticText(self.ctrl_pane, wx.ID_ANY,
_("Style Theme") + u": ")
ss_lst = util.GetResourceFiles(u'styles', get_all=True)
ss_choice = wx.Choice(self.ctrl_pane, ed_glob.ID_PREF_SYNTHEME,
choices=sorted(ss_lst))
ss_choice.SetToolTip(wx.ToolTip(_("Base new theme on existing one")))
ss_choice.SetStringSelection(Profile_Get('SYNTHEME', 'str'))
ss_new = wx.CheckBox(self.ctrl_pane, wx.ID_NEW, _("New"))
ss_new.SetToolTip(wx.ToolTip(_("Start a blank new style")))
ss_sizer.AddMany([((10, 10)), (ss_lbl, 0, wx.ALIGN_CENTER_VERTICAL),
((5, 0)),
(ss_choice, 0, wx.ALIGN_CENTER_VERTICAL), ((10, 0)),
(ss_new, 0, wx.ALIGN_CENTER_VERTICAL), ((10, 10))])
return ss_sizer
def __StyleTags(self):
"""Returns a sizer object containing a choice control with all
current style tags in it.
@return: sizer item containing list of all available style tags
"""
style_sizer = wx.BoxSizer(wx.HORIZONTAL)
style_sizer2 = wx.BoxSizer(wx.VERTICAL)
style_lbl = wx.StaticText(self.ctrl_pane, wx.ID_ANY,
_("Style Tags") + u": ")
style_tags = self.styles_orig.keys()
lsize = (-1, 100)
if wx.Platform == '__WXMAC__':
lsize = (-1, 120)
style_lst = wx.ListBox(self.ctrl_pane, ID_STYLES, size=lsize,
choices=sorted(style_tags), style=wx.LB_SINGLE)
style_sizer2.AddMany([(style_lbl, 0, wx.ALIGN_CENTER_VERTICAL),
(style_lst, 1, wx.EXPAND)])
style_sizer.AddMany([((10, 10), 0),
(style_sizer2, 1,
wx.ALIGN_CENTER_HORIZONTAL | wx.EXPAND),
((10, 10), 0)])
return style_sizer
def DiffStyles(self):
"""Checks if the current style set is different from the
original set. Used internally to check if a save prompt needs
to be brought up. Returns True if the style sets are different.
@return: whether style set has been modified or not
@rtype: bool
"""
diff = False
for key in self.styles_orig:
if unicode(self.styles_orig[key]) != unicode(self.styles_new[key]):
diff = True
break
result = wx.ID_NO
if diff:
dlg = wx.MessageDialog(self,
_("Some styles have been changed. Would "
"you like to save before exiting?"),
_("Save Styles"),
style=wx.YES_NO | wx.YES_DEFAULT | \
wx.CANCEL | wx.ICON_INFORMATION)
dlg.CenterOnParent()
result = dlg.ShowModal()
dlg.Destroy()
return result
def EnableSettings(self, enable=True):
"""Enables/Disables all settings controls
@keyword enable: whether to enable/disable settings controls
"""
for child in self._settings.GetChildren():
child.Enable(enable)
def ExportStyleSheet(self):
"""Writes the style sheet data out to a style sheet
@return: whether style sheet was exported properly or not
"""
if ed_glob.CONFIG['STYLES_DIR'] == ed_glob.CONFIG['SYS_STYLES_DIR']:
stdpath = wx.StandardPaths_Get()
user_config = os.path.join(stdpath.GetUserDataDir(), 'styles')
if not os.path.exists(user_config):
try:
os.mkdir(user_config)
except (OSError, IOError), msg:
self.LOG("[style_editor][err] %s" % msg)
else:
ed_glob.CONFIG['STYLES_DIR'] = user_config
result = wx.ID_CANCEL
ss_c = self.FindWindowById(ed_glob.ID_PREF_SYNTHEME)
new_cb = self.FindWindowById(wx.ID_NEW)
if new_cb.GetValue():
name = ''
else:
name = ss_c.GetStringSelection()
fname = wx.GetTextFromUser(_("Enter style sheet name"),
_("Export Style Sheet"),
name, self)
if len(fname):
sheet_path = os.path.join(ed_glob.CONFIG['STYLES_DIR'], fname)
if sheet_path.split(u'.')[-1] != u'ess':
sheet_path += u".ess"
try:
writer = util.GetFileWriter(sheet_path)
writer.write(self.GenerateStyleSheet())
writer.close()
except (AttributeError, IOError), msg:
self.LOG('[style_editor][err] Failed to export style sheet')
self.LOG('[style_editor][err] %s' % str(msg))
else:
# Update Style Sheet Control
sheet = ".".join(os.path.basename(sheet_path).split(u'.')[:-1])
ss_c.SetItems(util.GetResourceFiles(u'styles', get_all=True))
ss_c.SetStringSelection(sheet)
ss_c.Enable()
self.FindWindowById(wx.ID_NEW).SetValue(False)
self.styles_orig = self.styles_new
self.styles_new = DuplicateStyleDict(self.styles_orig)
result = wx.ID_OK
if sheet_path.startswith(ed_glob.CONFIG['STYLES_DIR']) or \
sheet_path.startswith(ed_glob.CONFIG['SYS_STYLES_DIR']):
# Update editor windows/buffer to use new style sheet
UpdateBufferStyles(sheet)
return result
def GenerateStyleSheet(self):
"""Generates a style sheet from the dialogs style data
@return: The dictionary of L{StyleItem} in self.styles_new transformed
into a string that is in Editra Style Sheet format.
"""
sty_sheet = list()
ditem = self.styles_new.get('default_style', StyleItem())
dvals = ';\n\t\t'.join([item.replace(',', ' ')
for item in ditem.GetAsList() ]) + ';'
sty_sheet.append(''.join(['default_style {\n\t\t', dvals, '\n\n}\n\n']))
tags = sorted(self.styles_new.keys())
for tag in tags:
item = self.styles_new[tag]
if item.IsNull() or tag == 'default_style':
continue
stage1 = wx.EmptyString
for attr in ('fore', 'back', 'face', 'size'):
ival = item.GetNamedAttr(attr)
if attr in ('fore', 'back'):
ival = ival.upper()
if ival is None or ival == ditem.GetNamedAttr(attr):
continue
stage1 = ''.join((stage1, attr, u':',
ival.replace(',', ' '), u';'))
# Add any modifiers to the modifier tag
modifiers = item.GetModifiers()
if len(modifiers):
stage1 += (u"modifiers:" + modifiers + u";").replace(',', ' ')
# If the StyleItem had any set attributes add it to the stylesheet
if len(stage1):
sty_sheet.append(tag + u" {\n")
stage2 = u"\t\t" + stage1[0:-1].replace(u";", u";\n\t\t") + u";"
sty_sheet.append(stage2)
sty_sheet.append(u"\n}\n\n")
return u"".join(sty_sheet)
def OnCancel(self, evt):
"""Catches the cancel button clicks and checks if anything
needs to be done before closing the window.
@param evt: event that called this handler
"""
self.LOG('[style_editor][evt] Cancel Clicked Closing Window')
evt.Skip()
def OnCheck(self, evt):
"""Handles Checkbox events
@param evt: event that called this handler
"""
e_id = evt.GetId()
e_obj = evt.GetEventObject()
if e_id == wx.ID_NEW:
val = e_obj.GetValue()
choice = self.ctrl_pane.FindWindowById(ed_glob.ID_PREF_SYNTHEME)
choice.Enable(not val)
if val:
self.styles_orig = self.preview.BlankStyleDictionary()
self.styles_new = DuplicateStyleDict(self.styles_orig)
self.preview.SetStyles('preview', self.styles_new, nomerge=True)
self.preview.UpdateAllStyles('preview')
# For some reason this causes the text display to refresh
# properly when nothing else would work.
self.OnTextRegion()
else:
scheme = choice.GetStringSelection().lower()
self.preview.UpdateAllStyles(scheme)
self.styles_orig = self.preview.GetStyleSet()
self.styles_new = DuplicateStyleDict(self.styles_orig)
elif e_id in [ID_BOLD, ID_EOL, ID_ULINE, ID_ITALIC]:
self.UpdateStyleSet(e_id)
else:
evt.Skip()
def OnChoice(self, evt):
"""Handles the events generated from the choice controls
@param evt: event that called this handler
"""
e_id = evt.GetId()
e_obj = evt.GetEventObject()
val = e_obj.GetStringSelection()
if e_id == ed_glob.ID_LEXER:
self.OpenPreviewFile(val)
elif e_id == ed_glob.ID_PREF_SYNTHEME:
# TODO Need to check for style changes before switching this
self.preview.UpdateAllStyles(val)
self.styles_new = self.preview.GetStyleSet()
self.styles_orig = DuplicateStyleDict(self.styles_new)
ctrl = self.FindWindowById(ID_STYLES)
tag = ctrl.GetStringSelection()
if tag != wx.EmptyString:
self.UpdateSettingsPane(self.styles_new[tag])
elif e_id in [ID_FONT, ID_FONT_SIZE]:
self.UpdateStyleSet(e_id)
else:
evt.Skip()
def OnClose(self, evt):
"""Handles the window closer event
@param evt: event that called this handler
"""
self.LOG("[style_editor][evt] Dialog closing...")
self.OnOk(evt)
def OnColor(self, evt):
"""Handles color selection events
@param evt: event that called this handler
"""
# Update The Style data for current tag
self.UpdateStyleSet(evt.GetId())
def OnTextRegion(self, evt=None):
"""Processes clicks in the preview control and sets the style
selection in the style tags list to the style tag of the area
the cursor has moved into.
@param evt: event that called this handler
"""
if evt is not None:
evt.Skip()
style_id = self.preview.GetStyleAt(self.preview.GetCurrentPos())
tag_lst = self.FindWindowById(ID_STYLES)
data = self.preview.FindTagById(style_id)
if data != wx.EmptyString and data in self.styles_new:
tag_lst.SetStringSelection(data)
if wx.Platform == '__WXGTK__':
tag_lst.SetFirstItemStr(data)
self.UpdateSettingsPane(self.styles_new[data])
self.EnableSettings()
def OnListBox(self, evt):
"""Catches the selection of a style tag in the listbox
and updates the style window appropriately.
@param evt: event that called this handler
"""
tag = evt.GetEventObject().GetStringSelection()
if tag != wx.EmptyString and tag in self.styles_new:
self.UpdateSettingsPane(self.styles_new[tag])
self.EnableSettings()
else:
self.EnableSettings(False)
def OnOk(self, evt):
"""Catches the OK button click and checks if any changes need to be
saved before the window closes.
@param evt: event that called this handler
"""
self.LOG('[style_editor][evt] Ok Clicked Closing Window')
result = self.DiffStyles()
if result == wx.ID_NO:
# Get Current Selection to update buffers
csheet = self.FindWindowById(ed_glob.ID_PREF_SYNTHEME).GetStringSelection()
UpdateBufferStyles(csheet)
evt.Skip()
elif result == wx.ID_CANCEL:
self.LOG('[style_editor][info] canceled closing')
else:
result = self.ExportStyleSheet()
if result != wx.ID_CANCEL:
evt.Skip()
def OnExport(self, evt):
"""Catches save button event
@param evt: event that called this handler
"""
self.LOG('[style_editor][evt] Export Clicked')
self.ExportStyleSheet()
def OpenPreviewFile(self, file_lbl):
"""Opens a file using the names in the Syntax Files choice
control as a search query.
@param file_lbl: name of file to open in test data directory
"""
fname = file_lbl.replace(u" ", u"_").replace(u"/", u"_").lower()
fname = fname.replace('#', 'sharp')
try:
fname = glob.glob(ed_glob.CONFIG['TEST_DIR'] + fname + ".*")[0]
except IndexError:
self.LOG('[style_editor][err] File %s Does not exist' % fname)
return False
self.preview.SetFileName(fname)
self.preview.ClearAll()
self.preview.LoadFile(fname)
self.preview.FindLexer()
self.preview.EmptyUndoBuffer()
return True
def UpdateSettingsPane(self, syntax_data):
"""Updates all the settings controls to hold the
values of the selected tag.
@param syntax_data: syntax data set to configure panel from
"""
val_str = unicode(syntax_data)
val_map = { ID_FORE_COLOR : syntax_data.GetFore(),
ID_BACK_COLOR : syntax_data.GetBack(),
ID_BOLD : "bold" in val_str,
ID_ITALIC : "italic" in val_str,
ID_EOL : "eol" in val_str,
ID_ULINE : "underline" in val_str,
ID_FONT : syntax_data.GetFace(),
ID_FONT_SIZE : syntax_data.GetSize()
}
# Fall back to defaults for color values
# that we may not be able to understand
if u"#" not in val_map[ID_FORE_COLOR]:
val_map[ID_FORE_COLOR] = self.preview.GetDefaultForeColour(as_hex=True)
if u"#" not in val_map[ID_BACK_COLOR]:
val_map[ID_BACK_COLOR] = self.preview.GetDefaultBackColour(as_hex=True)
for sid in SETTINGS_IDS:
ctrl = self.FindWindowById(sid)
c_type = ctrl.GetClassName()
if c_type == 'wxCheckBox':
ctrl.SetValue(val_map[sid])
elif c_type == "wxChoice":
ctrl.SetStringSelection(val_map[sid])
elif isinstance(ctrl, eclib.ColorSetter):
ctrl.SetLabel(val_map[sid][:7])
return True
def UpdateStyleSet(self, id_):
"""Updates the value of the style tag to reflect any changes
made in the settings controls.
@param id_: identifier of the style tag in the list
"""
# Get the tag that has been modified
tag = self.FindWindowById(ID_STYLES)
tag = tag.GetStringSelection()
if tag == None or tag == wx.EmptyString:
return False
# Get the modified value
ctrl = self.FindWindowById(id_)
ctrl_t = ctrl.GetClassName()
if ctrl_t == 'wxCheckBox':
val = ctrl.GetValue()
elif ctrl_t == 'wxChoice':
val = ctrl.GetStringSelection()
elif isinstance(ctrl, eclib.ColorSetter):
val = ctrl.GetLabel()
else:
return False
# Update the value of the modified tag
val_map = { ID_FONT : u'face',
ID_FONT_SIZE : u'size',
ID_BOLD : u"bold",
ID_EOL : u"eol",
ID_ITALIC : u"italic",
ID_ULINE : u"underline",
ID_FORE_COLOR : u"fore",
ID_BACK_COLOR : u"back"
}
if id_ in [ ID_FONT, ID_FONT_SIZE, ID_FORE_COLOR, ID_BACK_COLOR ]:
self.styles_new[tag].SetNamedAttr(val_map[id_], val)
elif id_ in [ ID_BOLD, ID_ITALIC, ID_ULINE, ID_EOL ]:
self.styles_new[tag].SetExAttr(val_map[id_], val)
else:
return False
# Update the Preview Area
self.preview.SetStyleTag(tag, self.styles_new[tag])
self.preview.RefreshStyles()
#-----------------------------------------------------------------------------#
class SettingsPanel(wx.Panel):
"""Panel holding all settings controls for changing the font,
colors, styles, ect.. in the style set.
"""
def __init__(self, parent):
"""Create the settings panel"""
wx.Panel.__init__(self, parent)
# Attributes
# Layout
self.__DoLayout()
def __DoLayout(self):
"""Layout the controls in the panel"""
setting_sizer = wx.BoxSizer(wx.VERTICAL)
setting_top = wx.BoxSizer(wx.HORIZONTAL)
# Settings top
setting_sizer.Add((10, 10))
cbox_sizer = wx.StaticBoxSizer(wx.StaticBox(self,
label=_("Color") + u":"), wx.VERTICAL)
# Foreground
fground_sizer = wx.BoxSizer(wx.HORIZONTAL)
fground_lbl = wx.StaticText(self, label=_("Foreground") + u": ")
fground_sel = eclib.ColorSetter(self, ID_FORE_COLOR, wx.BLACK)
fground_sizer.AddMany([((5, 5)),
(fground_lbl, 0, wx.ALIGN_CENTER_VERTICAL),
((2, 2), 1, wx.EXPAND),
(fground_sel, 0, wx.ALIGN_CENTER_VERTICAL),
((5, 5))])
cbox_sizer.AddMany([(fground_sizer, 0, wx.ALIGN_LEFT | wx.EXPAND),
((10, 10))])
# Background
bground_sizer = wx.BoxSizer(wx.HORIZONTAL)
bground_lbl = wx.StaticText(self, label=_("Background") + u": ")
bground_sel = eclib.ColorSetter(self, ID_BACK_COLOR, wx.WHITE)
bground_sizer.AddMany([((5, 5)),
(bground_lbl, 0, wx.ALIGN_CENTER_VERTICAL),
((2, 2), 1, wx.EXPAND),
(bground_sel, 0, wx.ALIGN_CENTER_VERTICAL),
((5, 5))])
cbox_sizer.Add(bground_sizer, 0, wx.ALIGN_LEFT | wx.EXPAND)
setting_top.AddMany([(cbox_sizer, 0, wx.ALIGN_TOP), ((10, 10))])
# Attrib Box
attrib_box = wx.StaticBox(self, label=_("Attributes") + u":")
abox_sizer = wx.StaticBoxSizer(attrib_box, wx.VERTICAL)
# Attributes
bold_cb = wx.CheckBox(self, ID_BOLD, _("bold"))
eol_cb = wx.CheckBox(self, ID_EOL, _("eol"))
ital_cb = wx.CheckBox(self, ID_ITALIC, _("italic"))
uline_cb = wx.CheckBox(self, ID_ULINE, _("underline"))
abox_sizer.AddMany([(bold_cb, 0, wx.ALIGN_CENTER_VERTICAL),
(eol_cb, 0, wx.ALIGN_CENTER_VERTICAL),
(ital_cb, 0, wx.ALIGN_CENTER_VERTICAL),
(uline_cb, 0, wx.ALIGN_CENTER_VERTICAL)])
setting_top.Add(abox_sizer, 0, wx.ALIGN_TOP)
# Font
fh_sizer = wx.BoxSizer(wx.HORIZONTAL)
font_box = wx.StaticBox(self, label=_("Font Settings") + u":")
fbox_sizer = wx.StaticBoxSizer(font_box, wx.VERTICAL)
# Font Face Name
fsizer = wx.BoxSizer(wx.HORIZONTAL)
flbl = wx.StaticText(self, label=_("Font") + u": ")
fontenum = wx.FontEnumerator()
if wx.Platform == '__WXMAC__':
# FixedWidthOnly Asserts on wxMac
fontenum.EnumerateFacenames(fixedWidthOnly=False)
else:
fontenum.EnumerateFacenames(fixedWidthOnly=True)
font_lst = ["%(primary)s", "%(secondary)s"]
font_lst.extend(sorted(fontenum.GetFacenames()))
fchoice = wx.Choice(self, ID_FONT, choices=font_lst)
fsizer.AddMany([((5, 5), 0), (flbl, 0, wx.ALIGN_CENTER_VERTICAL),
(fchoice, 0, wx.ALIGN_CENTER_VERTICAL), ((5, 5))])
fbox_sizer.Add(fsizer, 0, wx.ALIGN_LEFT)
# Font Size
fsize_sizer = wx.BoxSizer(wx.HORIZONTAL)
fsize_lbl = wx.StaticText(self, label=_("Size") + u": ")
fsizes = ['%(size)d', '%(size2)d']
fsizes.extend([ str(x) for x in xrange(4, 21) ])
fs_choice = wx.Choice(self, ID_FONT_SIZE, choices=fsizes)
fsize_sizer.AddMany([((5, 5), 0),
(fsize_lbl, 0, wx.ALIGN_CENTER_VERTICAL),
(fs_choice, 1, wx.EXPAND | wx.ALIGN_RIGHT),
((5, 5), 0)])
fbox_sizer.AddMany([((5, 5)),
(fsize_sizer, 0, wx.ALIGN_LEFT | wx.EXPAND)])
fh_sizer.AddMany([(fbox_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL),
((10, 10))])
# Build Section
setting_sizer.AddMany([(setting_top, 0, wx.ALIGN_CENTER_HORIZONTAL),
((10, 10), 1, wx.EXPAND),
(fh_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL)])
self.SetSizer(setting_sizer)
self.SetAutoLayout(True)
#-----------------------------------------------------------------------------#
# Utility funtcions
def DuplicateStyleDict(style_dict):
"""Duplicates the style dictionary to make a true copy of
it, as simply assigning the dictionary to two different variables
only copies a reference leaving both variables pointing to the
same object.
@param style_dict: dictionary of tags->StyleItems
@return: a copy of the given styleitem dictionary
"""
new_dict = dict()
for tag in style_dict:
new_dict[tag] = StyleItem()
is_ok = new_dict[tag].SetAttrFromStr(unicode(style_dict[tag]))
if not is_ok:
new_dict[tag].null = True
return new_dict
def UpdateBufferStyles(sheet):
"""Update the style used in all buffers
@param sheet: Style sheet to use
"""
# Only update if the sheet has changed
if sheet is None or sheet == Profile_Get('SYNTHEME'):
return
Profile_Set('SYNTHEME', sheet)
|
ezequielpereira/Time-Line
|
libs/wx/tools/Editra/src/style_editor.py
|
Python
|
gpl-3.0
| 30,179 | 0.001922 |
#!/usr/bin/env python
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
|
alvarolopez/pyocci
|
setup.py
|
Python
|
apache-2.0
| 171 | 0 |
# -*- coding: utf-8 -*-
# ocitysmap, city map and street index generator from OpenStreetMap data
# Copyright (C) 2010 David Decotigny
# Copyright (C) 2010 Frédéric Lehobey
# Copyright (C) 2010 Pierre Mauduit
# Copyright (C) 2010 David Mentré
# Copyright (C) 2010 Maxime Petazzoni
# Copyright (C) 2010 Thomas Petazzoni
# Copyright (C) 2010 Gaël Utard
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# PT/metrics conversion routines
PT_PER_INCH = 72.0
def convert_pt_to_dots(pt, dpi = PT_PER_INCH):
return float(pt * dpi) / PT_PER_INCH
def convert_mm_to_pt(mm):
return ((mm/10.0) / 2.54) * 72
def convert_pt_to_mm(pt):
return (float(pt) * 10.0 * 2.54) / 72
|
dittaeva/ocitysmap
|
ocitysmap2/layoutlib/commons.py
|
Python
|
agpl-3.0
| 1,272 | 0.003943 |
"""
AWGs
"""
from atom.api import Atom, List, Int, Float, Range, Enum, Bool, Constant, Str
from Instrument import Instrument
import enaml
from enaml.qt.qt_application import QtApplication
from instruments.AWGBase import AWGChannel, AWG, AWGDriver
from plugins import find_plugins
AWGList = []
# local plugin registration to enable access by AWGs.plugin
plugins = find_plugins(AWG, verbose=False)
for plugin in plugins:
if plugin not in AWGList:
AWGList.append(plugin)
if plugin.__name__ not in globals().keys():
globals().update({plugin.__name__: plugin})
print 'Registered Plugin {0}'.format(plugin.__name__)
if __name__ == "__main__":
with enaml.imports():
from AWGsViews import AWGView
awg = APS(label='BBNAPS1')
app = QtApplication()
view = AWGView(awg=awg)
view.show()
app.start()
|
rmcgurrin/PyQLab
|
instruments/AWGs.py
|
Python
|
apache-2.0
| 872 | 0 |
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleFailJson, AnsibleExitJson
from ansible.module_utils import basic
from ansible.module_utils.network.ftd.common import FtdConfigurationError, FtdServerError, FtdUnexpectedResponse
from ansible.module_utils.network.ftd.configuration import FtdInvalidOperationNameError, CheckModeException
from ansible.module_utils.network.ftd.fdm_swagger_client import ValidationError
from ansible.modules.network.ftd import ftd_configuration
class TestFtdConfiguration(object):
module = ftd_configuration
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture(autouse=True)
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.modules.network.ftd.ftd_configuration.Connection')
return connection_class_mock.return_value
@pytest.fixture
def resource_mock(self, mocker):
resource_class_mock = mocker.patch('ansible.modules.network.ftd.ftd_configuration.BaseConfigurationResource')
resource_instance = resource_class_mock.return_value
return resource_instance.execute_operation
def test_module_should_fail_when_ftd_invalid_operation_name_error(self, resource_mock):
operation_name = 'test name'
resource_mock.side_effect = FtdInvalidOperationNameError(operation_name)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert 'Invalid operation name provided: %s' % operation_name == result['msg']
def test_module_should_fail_when_ftd_configuration_error(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = FtdConfigurationError(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert 'Failed to execute %s operation because of the configuration error: %s' % (operation_name, msg) == \
result['msg']
def test_module_should_fail_when_ftd_server_error(self, resource_mock):
operation_name = 'test name'
code = 500
response = {'error': 'foo'}
resource_mock.side_effect = FtdServerError(response, code)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert 'Server returned an error trying to execute %s operation. Status code: %s. ' \
'Server response: %s' % (operation_name, code, response) == \
result['msg']
def test_module_should_fail_when_validation_error(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = ValidationError(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert msg == result['msg']
def test_module_should_fail_when_unexpected_server_response(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = FtdUnexpectedResponse(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert msg == result['msg']
def test_module_should_fail_when_check_mode_exception(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = CheckModeException(msg)
result = self._run_module({'operation': operation_name})
assert not result['changed']
def test_module_should_run_successful(self, resource_mock):
operation_name = 'test name'
resource_mock.return_value = 'ok'
result = self._run_module({'operation': operation_name})
assert result['response'] == 'ok'
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
def _run_module_with_fail_json(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleFailJson) as exc:
self.module.main()
result = exc.value.args[0]
return result
|
Jorge-Rodriguez/ansible
|
test/units/modules/network/ftd/test_ftd_configuration.py
|
Python
|
gpl-3.0
| 5,145 | 0.002527 |
# Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Nontransitive dice in Google CP Solver.
From
http://en.wikipedia.org/wiki/Nontransitive_dice
'''
A set of nontransitive dice is a set of dice for which the relation
'is more likely to roll a higher number' is not transitive. See also
intransitivity.
This situation is similar to that in the game Rock, Paper, Scissors,
in which each element has an advantage over one choice and a
disadvantage to the other.
'''
I start with the 3 dice version
'''
* die A has sides {2,2,4,4,9,9},
* die B has sides {1,1,6,6,8,8}, and
* die C has sides {3,3,5,5,7,7}.
'''
3 dice:
Maximum winning: 27
comp: [19, 27, 19]
dice:
[[0, 0, 3, 6, 6, 6],
[2, 5, 5, 5, 5, 5],
[1, 1, 4, 4, 4, 7]]
max_win: 27
Number of solutions: 1
Nodes: 1649873 Time: 25.94
getFailures: 1649853
getBacktracks: 1649873
getPropags: 98105090
Max winnings where they are the same: 21
comp: [21, 21, 21]
dice:
[[0, 0, 3, 3, 3, 6],
[2, 2, 2, 2, 2, 5],
[1, 1, 1, 4, 4, 4]]
max_win: 21
Compare with these models:
* MiniZinc: http://hakank.org/minizinc/nontransitive_dice.mzn
* Comet: http://hakank.org/comet/nontransitive_dice.co
This model was created by Hakan Kjellerstrand (hakank@bonetmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
import sys
import string
from ortools.constraint_solver import pywrapcp
def main(m=3, n=6, minimize_val=0):
# Create the solver.
solver = pywrapcp.Solver("Nontransitive dice")
#
# data
#
print "number of dice:", m
print "number of sides:", n
#
# declare variables
#
dice = {}
for i in range(m):
for j in range(n):
dice[(i, j)] = solver.IntVar(1, n * 2, "dice(%i,%i)" % (i, j))
dice_flat = [dice[(i, j)] for i in range(m) for j in range(n)]
comp = {}
for i in range(m):
for j in range(2):
comp[(i, j)] = solver.IntVar(0, n * n, "comp(%i,%i)" % (i, j))
comp_flat = [comp[(i, j)] for i in range(m) for j in range(2)]
# The following variables are for summaries or objectives
gap = [solver.IntVar(0, n * n, "gap(%i)" % i) for i in range(m)]
gap_sum = solver.IntVar(0, m * n * n, "gap_sum")
max_val = solver.IntVar(0, n * 2, "max_val")
max_win = solver.IntVar(0, n * n, "max_win")
# number of occurrences of each value of the dice
counts = [solver.IntVar(0, n * m, "counts(%i)" % i) for i in range(n * 2 + 1)]
#
# constraints
#
# number of occurrences for each number
solver.Add(solver.Distribute(dice_flat, range(n * 2 + 1), counts))
solver.Add(max_win == solver.Max(comp_flat))
solver.Add(max_val == solver.Max(dice_flat))
# order of the number of each die, lowest first
[solver.Add(dice[(i, j)] <= dice[(i, j + 1)])
for i in range(m) for j in range(n - 1)]
# nontransitivity
[comp[i, 0] > comp[i, 1] for i in range(m)],
# probability gap
[solver.Add(gap[i] == comp[i, 0] - comp[i, 1]) for i in range(m)]
[solver.Add(gap[i] > 0) for i in range(m)]
solver.Add(gap_sum == solver.Sum(gap))
# and now we roll...
# Number of wins for [A vs B, B vs A]
for d in range(m):
b1 = [solver.IsGreaterVar(dice[d % m, r1], dice[(d + 1) % m, r2])
for r1 in range(n) for r2 in range(n)]
solver.Add(comp[d % m, 0] == solver.Sum(b1))
b2 = [solver.IsGreaterVar(dice[(d + 1) % m, r1], dice[d % m, r2])
for r1 in range(n) for r2 in range(n)]
solver.Add(comp[d % m, 1] == solver.Sum(b2))
# objective
if minimize_val != 0:
print "Minimizing max_val"
objective = solver.Minimize(max_val, 1)
# other experiments
# objective = solver.Maximize(max_win, 1)
# objective = solver.Maximize(gap_sum, 1)
#
# solution and search
#
db = solver.Phase(dice_flat + comp_flat,
solver.INT_VAR_DEFAULT,
solver.ASSIGN_MIN_VALUE)
if minimize_val:
solver.NewSearch(db, [objective])
else:
solver.NewSearch(db)
num_solutions = 0
while solver.NextSolution():
print "gap_sum:", gap_sum.Value()
print "gap:", [gap[i].Value() for i in range(m)]
print "max_val:", max_val.Value()
print "max_win:", max_win.Value()
print "dice:"
for i in range(m):
for j in range(n):
print dice[(i, j)].Value(),
print
print "comp:"
for i in range(m):
for j in range(2):
print comp[(i, j)].Value(),
print
print "counts:", [counts[i].Value() for i in range(n * 2 + 1)]
print
num_solutions += 1
solver.EndSearch()
print
print "num_solutions:", num_solutions
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
m = 3 # number of dice
n = 6 # number of sides of each die
minimize_val = 0 # Minimizing max value (0: no, 1: yes)
if __name__ == "__main__":
if len(sys.argv) > 1:
m = string.atoi(sys.argv[1])
if len(sys.argv) > 2:
n = string.atoi(sys.argv[2])
if len(sys.argv) > 3:
minimize_val = string.atoi(sys.argv[3])
main(m, n, minimize_val)
|
capturePointer/or-tools
|
examples/python/nontransitive_dice.py
|
Python
|
apache-2.0
| 5,680 | 0.011972 |
# -*- coding: utf-8 -*-
'''
This module (mostly) uses the XenAPI to manage Xen virtual machines.
Big fat warning: the XenAPI used in this file is the one bundled with
Xen Source, NOT XenServer nor Xen Cloud Platform. As a matter of fact it
*will* fail under those platforms. From what I've read, little work is needed
to adapt this code to XS/XCP, mostly playing with XenAPI version, but as
XCP is not taking precedence on Xen Source on many platforms, please keep
compatibility in mind.
Useful documentation:
. http://downloads.xen.org/Wiki/XenAPI/xenapi-1.0.6.pdf
. http://docs.vmd.citrix.com/XenServer/6.0.0/1.0/en_gb/api/
. https://github.com/xen-org/xen-api/tree/master/scripts/examples/python
. http://xenbits.xen.org/gitweb/?p=xen.git;a=tree;f=tools/python/xen/xm;hb=HEAD
'''
# Import python libs
import sys
import contextlib
import os
try:
import importlib
HAS_IMPORTLIB = True
except ImportError:
# Python < 2.7 does not have importlib
HAS_IMPORTLIB = False
# Import salt libs
from salt.exceptions import CommandExecutionError
import salt.utils
# Define the module's virtual name
__virtualname__ = 'virt'
# This module has only been tested on Debian GNU/Linux and NetBSD, it
# probably needs more path appending for other distributions.
# The path to append is the path to python Xen libraries, where resides
# XenAPI.
def _check_xenapi():
if __grains__['os'] == 'Debian':
debian_xen_version = '/usr/lib/xen-common/bin/xen-version'
if os.path.isfile(debian_xen_version):
# __salt__ is not available in __virtual__
xenversion = salt.modules.cmdmod._run_quiet(debian_xen_version)
xapipath = '/usr/lib/xen-{0}/lib/python'.format(xenversion)
if os.path.isdir(xapipath):
sys.path.append(xapipath)
try:
if HAS_IMPORTLIB:
return importlib.import_module('xen.xm.XenAPI')
return __import__('xen.xm.XenAPI')
except ImportError:
return False
def __virtual__():
if _check_xenapi() is not False:
return __virtualname__
return False
@contextlib.contextmanager
def _get_xapi_session():
'''
Get a session to XenAPI. By default, use the local UNIX socket.
'''
_xenapi = _check_xenapi()
xapi_uri = __salt__['config.option']('xapi.uri')
xapi_login = __salt__['config.option']('xapi.login')
xapi_password = __salt__['config.option']('xapi.password')
if not xapi_uri:
# xend local UNIX socket
xapi_uri = 'httpu:///var/run/xend/xen-api.sock'
if not xapi_login:
xapi_login = ''
if not xapi_password:
xapi_password = ''
try:
session = _xenapi.Session(xapi_uri)
session.xenapi.login_with_password(xapi_login, xapi_password)
yield session.xenapi
except Exception:
raise CommandExecutionError('Failed to connect to XenAPI socket.')
finally:
session.xenapi.session.logout()
# Used rectypes (Record types):
#
# host
# host_cpu
# VM
# VIF
# VBD
def _get_xtool():
'''
Internal, returns xl or xm command line path
'''
for xtool in ['xl', 'xm']:
path = salt.utils.which(xtool)
if path is not None:
return path
def _get_all(xapi, rectype):
'''
Internal, returns all members of rectype
'''
return getattr(xapi, rectype).get_all()
def _get_label_uuid(xapi, rectype, label):
'''
Internal, returns label's uuid
'''
try:
return getattr(xapi, rectype).get_by_name_label(label)[0]
except Exception:
return False
def _get_record(xapi, rectype, uuid):
'''
Internal, returns a full record for uuid
'''
return getattr(xapi, rectype).get_record(uuid)
def _get_record_by_label(xapi, rectype, label):
'''
Internal, returns a full record for uuid
'''
uuid = _get_label_uuid(xapi, rectype, label)
if uuid is False:
return False
return getattr(xapi, rectype).get_record(uuid)
def _get_metrics_record(xapi, rectype, record):
'''
Internal, returns metrics record for a rectype
'''
metrics_id = record['metrics']
return getattr(xapi, '{0}_metrics'.format(rectype)).get_record(metrics_id)
def _get_val(record, keys):
'''
Internal, get value from record
'''
data = record
for key in keys:
if key in data:
data = data[key]
else:
return None
return data
def list_vms():
'''
Return a list of virtual machine names on the minion
CLI Example:
.. code-block:: bash
salt '*' virt.list_vms
'''
with _get_xapi_session() as xapi:
hosts = xapi.VM.get_all()
ret = []
for _host in hosts:
if xapi.VM.get_record(_host)['is_control_domain'] is False:
ret.append(xapi.VM.get_name_label(_host))
return ret
def vm_info(vm_=None):
'''
Return detailed information about the vms.
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_info
'''
with _get_xapi_session() as xapi:
def _info(vm_):
vm_rec = _get_record_by_label(xapi, 'VM', vm_)
if vm_rec is False:
return False
vm_metrics_rec = _get_metrics_record(xapi, 'VM', vm_rec)
return {'cpu': vm_metrics_rec['VCPUs_number'],
'maxCPU': _get_val(vm_rec, ['VCPUs_max']),
'cputime': vm_metrics_rec['VCPUs_utilisation'],
'disks': get_disks(vm_),
'nics': get_nics(vm_),
'maxMem': int(_get_val(vm_rec, ['memory_dynamic_max'])),
'mem': int(vm_metrics_rec['memory_actual']),
'state': _get_val(vm_rec, ['power_state'])
}
info = {}
if vm_:
ret = _info(vm_)
if ret is not None:
info[vm_] = ret
else:
for vm_ in list_vms():
ret = _info(vm_)
if ret is not None:
info[vm_] = _info(vm_)
return info
def vm_state(vm_=None):
'''
Return list of all the vms and their state.
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_state <vm name>
'''
with _get_xapi_session() as xapi:
info = {}
if vm_:
info[vm_] = _get_record_by_label(xapi, 'VM', vm_)['power_state']
return info
for vm_ in list_vms():
info[vm_] = _get_record_by_label(xapi, 'VM', vm_)['power_state']
return info
def node_info():
'''
Return a dict with information about this node
CLI Example:
.. code-block:: bash
salt '*' virt.node_info
'''
with _get_xapi_session() as xapi:
# get node uuid
host_rec = _get_record(xapi, 'host', _get_all(xapi, 'host')[0])
# get first CPU (likely to be a core) uuid
host_cpu_rec = _get_record(xapi, 'host_cpu', host_rec['host_CPUs'][0])
# get related metrics
host_metrics_rec = _get_metrics_record(xapi, 'host', host_rec)
# adapted / cleaned up from Xen's xm
def getCpuMhz():
cpu_speeds = [int(host_cpu_rec["speed"])
for host_cpu_it in host_cpu_rec
if "speed" in host_cpu_it]
if cpu_speeds:
return sum(cpu_speeds) / len(cpu_speeds)
else:
return 0
def getCpuFeatures():
if host_cpu_rec:
return host_cpu_rec['features']
def getFreeCpuCount():
cnt = 0
for host_cpu_it in host_cpu_rec:
if len(host_cpu_rec['cpu_pool']) == 0:
cnt += 1
return cnt
info = {
'cpucores': _get_val(host_rec,
["cpu_configuration", "nr_cpus"]),
'cpufeatures': getCpuFeatures(),
'cpumhz': getCpuMhz(),
'cpuarch': _get_val(host_rec,
["software_version", "machine"]),
'cputhreads': _get_val(host_rec,
["cpu_configuration", "threads_per_core"]),
'phymemory': int(host_metrics_rec["memory_total"]) / 1024 / 1024,
'cores_per_sockets': _get_val(host_rec,
["cpu_configuration", "cores_per_socket"]),
'free_cpus': getFreeCpuCount(),
'free_memory': int(host_metrics_rec["memory_free"]) / 1024 / 1024,
'xen_major': _get_val(host_rec,
["software_version", "xen_major"]),
'xen_minor': _get_val(host_rec,
["software_version", "xen_minor"]),
'xen_extra': _get_val(host_rec,
["software_version", "xen_extra"]),
'xen_caps': " ".join(_get_val(host_rec, ["capabilities"])),
'xen_scheduler': _get_val(host_rec,
["sched_policy"]),
'xen_pagesize': _get_val(host_rec,
["other_config", "xen_pagesize"]),
'platform_params': _get_val(host_rec,
["other_config", "platform_params"]),
'xen_commandline': _get_val(host_rec,
["other_config", "xen_commandline"]),
'xen_changeset': _get_val(host_rec,
["software_version", "xen_changeset"]),
'cc_compiler': _get_val(host_rec,
["software_version", "cc_compiler"]),
'cc_compile_by': _get_val(host_rec,
["software_version", "cc_compile_by"]),
'cc_compile_domain': _get_val(host_rec,
["software_version", "cc_compile_domain"]),
'cc_compile_date': _get_val(host_rec,
["software_version", "cc_compile_date"]),
'xend_config_format': _get_val(host_rec,
["software_version", "xend_config_format"])
}
return info
def get_nics(vm_):
'''
Return info about the network interfaces of a named vm
CLI Example:
.. code-block:: bash
salt '*' virt.get_nics <vm name>
'''
with _get_xapi_session() as xapi:
nic = {}
vm_rec = _get_record_by_label(xapi, 'VM', vm_)
if vm_rec is False:
return False
for vif in vm_rec['VIFs']:
vif_rec = _get_record(xapi, 'VIF', vif)
nic[vif_rec['MAC']] = {
'mac': vif_rec['MAC'],
'device': vif_rec['device'],
'mtu': vif_rec['MTU']
}
return nic
def get_macs(vm_):
'''
Return a list off MAC addresses from the named vm
CLI Example:
.. code-block:: bash
salt '*' virt.get_macs <vm name>
'''
macs = []
nics = get_nics(vm_)
if nics is None:
return None
for nic in nics:
macs.append(nic)
return macs
def get_disks(vm_):
'''
Return the disks of a named vm
CLI Example:
.. code-block:: bash
salt '*' virt.get_disks <vm name>
'''
with _get_xapi_session() as xapi:
disk = {}
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
for vbd in xapi.VM.get_VBDs(vm_uuid):
dev = xapi.VBD.get_device(vbd)
if not dev:
continue
prop = xapi.VBD.get_runtime_properties(vbd)
disk[dev] = {
'backend': prop['backend'],
'type': prop['device-type'],
'protocol': prop['protocol']
}
return disk
def setmem(vm_, memory):
'''
Changes the amount of memory allocated to VM.
Memory is to be specified in MB
CLI Example:
.. code-block:: bash
salt '*' virt.setmem myvm 768
'''
with _get_xapi_session() as xapi:
mem_target = int(memory) * 1024 * 1024
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
try:
xapi.VM.set_memory_dynamic_max_live(vm_uuid, mem_target)
xapi.VM.set_memory_dynamic_min_live(vm_uuid, mem_target)
return True
except Exception:
return False
def setvcpus(vm_, vcpus):
'''
Changes the amount of vcpus allocated to VM.
vcpus is an int representing the number to be assigned
CLI Example:
.. code-block:: bash
salt '*' virt.setvcpus myvm 2
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
try:
xapi.VM.set_VCPUs_number_live(vm_uuid, vcpus)
return True
except Exception:
return False
def vcpu_pin(vm_, vcpu, cpus):
'''
Set which CPUs a VCPU can use.
CLI Example:
.. code-block:: bash
salt 'foo' virt.vcpu_pin domU-id 2 1
salt 'foo' virt.vcpu_pin domU-id 2 2-6
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
# from xm's main
def cpu_make_map(cpulist):
cpus = []
for c in cpulist.split(','):
if c == '':
continue
if '-' in c:
(x, y) = c.split('-')
for i in range(int(x), int(y) + 1):
cpus.append(int(i))
else:
# remove this element from the list
if c[0] == '^':
cpus = [x for x in cpus if x != int(c[1:])]
else:
cpus.append(int(c))
cpus.sort()
return ','.join(map(str, cpus))
if cpus == 'all':
cpumap = cpu_make_map('0-63')
else:
cpumap = cpu_make_map('{0}'.format(cpus))
try:
xapi.VM.add_to_VCPUs_params_live(vm_uuid,
'cpumap{0}'.format(vcpu), cpumap)
return True
# VM.add_to_VCPUs_params_live() implementation in xend 4.1+ has
# a bug which makes the client call fail.
# That code is accurate for all others XenAPI implementations, but
# for that particular one, fallback to xm / xl instead.
except Exception:
return __salt__['cmd.run']('{0} vcpu-pin {1} {2} {3}'.format(
_get_xtool(), vm_, vcpu, cpus))
def freemem():
'''
Return an int representing the amount of memory that has not been given
to virtual machines on this node
CLI Example:
.. code-block:: bash
salt '*' virt.freemem
'''
return node_info()['free_memory']
def freecpu():
'''
Return an int representing the number of unallocated cpus on this
hypervisor
CLI Example:
.. code-block:: bash
salt '*' virt.freecpu
'''
return node_info()['free_cpus']
def full_info():
'''
Return the node_info, vm_info and freemem
CLI Example:
.. code-block:: bash
salt '*' virt.full_info
'''
return {'node_info': node_info(), 'vm_info': vm_info()}
def shutdown(vm_):
'''
Send a soft shutdown signal to the named vm
CLI Example:
.. code-block:: bash
salt '*' virt.shutdown <vm name>
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
try:
xapi.VM.clean_shutdown(vm_uuid)
return True
except Exception:
return False
def pause(vm_):
'''
Pause the named vm
CLI Example:
.. code-block:: bash
salt '*' virt.pause <vm name>
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
try:
xapi.VM.pause(vm_uuid)
return True
except Exception:
return False
def resume(vm_):
'''
Resume the named vm
CLI Example:
.. code-block:: bash
salt '*' virt.resume <vm name>
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
try:
xapi.VM.unpause(vm_uuid)
return True
except Exception:
return False
# FIXME / TODO
# This function does NOT use the XenAPI. Instead, it use good old xm / xl.
# On Xen Source, creating a virtual machine using XenAPI is really painful.
# XCP / XS make it really easy using xapi.Async.VM.start, but I don't use
# those on any of my networks.
def create(config_):
'''
Start a defined domain
CLI Example:
.. code-block:: bash
salt '*' virt.create <path to Xen cfg file>
'''
return __salt__['cmd.run']('{0} create {1}'.format(_get_xtool(), config_))
def start(config_):
'''
Alias for the obscurely named 'create' function
CLI Example:
.. code-block:: bash
salt '*' virt.start <path to Xen cfg file>
'''
return create(config_)
def reboot(vm_):
'''
Reboot a domain via ACPI request
CLI Example:
.. code-block:: bash
salt '*' virt.reboot <vm name>
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
try:
xapi.VM.clean_reboot(vm_uuid)
return True
except Exception:
return False
def reset(vm_):
'''
Reset a VM by emulating the reset button on a physical machine
CLI Example:
.. code-block:: bash
salt '*' virt.reset <vm name>
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
try:
xapi.VM.hard_reboot(vm_uuid)
return True
except Exception:
return False
def migrate(vm_, target,
live=1, port=0, node=-1, ssl=None, change_home_server=0):
'''
Migrates the virtual machine to another hypervisor
CLI Example:
.. code-block:: bash
salt '*' virt.migrate <vm name> <target hypervisor> [live] [port] [node] [ssl] [change_home_server]
Optional values:
live
Use live migration
port
Use a specified port
node
Use specified NUMA node on target
ssl
use ssl connection for migration
change_home_server
change home server for managed domains
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
other_config = {
'port': port,
'node': node,
'ssl': ssl,
'change_home_server': change_home_server
}
try:
xapi.VM.migrate(vm_uuid, target, bool(live), other_config)
return True
except Exception:
return False
def destroy(vm_):
'''
Hard power down the virtual machine, this is equivalent to pulling the
power
CLI Example:
.. code-block:: bash
salt '*' virt.destroy <vm name>
'''
with _get_xapi_session() as xapi:
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
try:
xapi.VM.hard_shutdown(vm_uuid)
return True
except Exception:
return False
def is_hyper():
'''
Returns a bool whether or not this node is a hypervisor of any kind
CLI Example:
.. code-block:: bash
salt '*' virt.is_hyper
'''
try:
if __grains__['virtual_subtype'] != 'Xen Dom0':
return False
except KeyError:
# virtual_subtype isn't set everywhere.
return False
try:
if 'xen_' not in salt.utils.fopen('/proc/modules').read():
return False
except IOError:
return False
# there must be a smarter way...
return 'xenstore' in __salt__['cmd.run'](__grains__['ps'])
def vm_cputime(vm_=None):
'''
Return cputime used by the vms on this hyper in a
list of dicts:
.. code-block:: python
[
'your-vm': {
'cputime' <int>
'cputime_percent' <int>
},
...
]
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_cputime
'''
with _get_xapi_session() as xapi:
def _info(vm_):
host_rec = _get_record_by_label(xapi, 'VM', vm_)
host_cpus = len(host_rec['host_CPUs'])
if host_rec is False:
return False
host_metrics = _get_metrics_record(xapi, 'VM', host_rec)
vcpus = int(host_metrics['VCPUs_number'])
cputime = int(host_metrics['VCPUs_utilisation']['0'])
cputime_percent = 0
if cputime:
# Divide by vcpus to always return a number between 0 and 100
cputime_percent = (1.0e-7 * cputime / host_cpus) / vcpus
return {'cputime': int(cputime),
'cputime_percent': int('%.0f' % cputime_percent)}
info = {}
if vm_:
info[vm_] = _info(vm_)
return info
for vm_ in list_vms():
info[vm_] = _info(vm_)
return info
def vm_netstats(vm_=None):
'''
Return combined network counters used by the vms on this hyper in a
list of dicts:
.. code-block:: python
[
'your-vm': {
'io_read_kbs' : 0,
'io_total_read_kbs' : 0,
'io_total_write_kbs' : 0,
'io_write_kbs' : 0
},
...
]
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_netstats
'''
with _get_xapi_session() as xapi:
def _info(vm_):
ret = {}
vm_rec = _get_record_by_label(xapi, 'VM', vm_)
if vm_rec is False:
return False
for vif in vm_rec['VIFs']:
vif_rec = _get_record(xapi, 'VIF', vif)
ret[vif_rec['device']] = _get_metrics_record(xapi, 'VIF',
vif_rec)
del ret[vif_rec['device']]['last_updated']
return ret
info = {}
if vm_:
info[vm_] = _info(vm_)
else:
for vm_ in list_vms():
info[vm_] = _info(vm_)
return info
def vm_diskstats(vm_=None):
'''
Return disk usage counters used by the vms on this hyper in a
list of dicts:
.. code-block:: python
[
'your-vm': {
'io_read_kbs' : 0,
'io_write_kbs' : 0
},
...
]
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_diskstats
'''
with _get_xapi_session() as xapi:
def _info(vm_):
ret = {}
vm_uuid = _get_label_uuid(xapi, 'VM', vm_)
if vm_uuid is False:
return False
for vbd in xapi.VM.get_VBDs(vm_uuid):
vbd_rec = _get_record(xapi, 'VBD', vbd)
ret[vbd_rec['device']] = _get_metrics_record(xapi, 'VBD',
vbd_rec)
del ret[vbd_rec['device']]['last_updated']
return ret
info = {}
if vm_:
info[vm_] = _info(vm_)
else:
for vm_ in list_vms():
info[vm_] = _info(vm_)
return info
|
victorywang80/Maintenance
|
saltstack/src/salt/modules/xapi.py
|
Python
|
apache-2.0
| 24,954 | 0.000761 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======================================
"""Library of TPU helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import enum
import typing
from typing import Any, Callable, Iterable, List, Optional, Text, Tuple, Union
from absl import logging
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.compiler.tf2xla.python import xla as tf2xla
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.protobuf.tpu import dynamic_padding_pb2 as dynamic_padding
from tensorflow.core.protobuf.tpu import tpu_embedding_configuration_pb2 as embedding_pb2
from tensorflow.python.compiler.xla import xla
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import auto_control_deps
from tensorflow.python.framework import c_api_util
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.tpu import device_assignment as device_assignment_lib
from tensorflow.python.tpu import tpu_feed
from tensorflow.python.tpu import tpu_function
from tensorflow.python.tpu import tpu_name_util
from tensorflow.python.tpu.ops import tpu_ops
from tensorflow.python.types import core as core_types
from tensorflow.python.util import compat
from tensorflow.python.util import nest
from tensorflow.python.util import object_identity
from tensorflow.python.util.tf_export import tf_export
ops.NotDifferentiable("TPUReplicatedInput")
# Operations that indicate some error in the users graph, e.g. a placeholder
# that's introduced outside of the infeed.
_DENYLISTED_OPS = set([
"Placeholder",
])
# XLA doesn't currently support reading of intermediate tensors, thus some ops
# are not supported.
_UNSUPPORTED_OPS = set([
"AudioSummary",
"AudioSummaryV2",
"HistogramSummary",
"ImageSummary",
"MergeSummary",
"Print",
"ScalarSummary",
"TensorSummary",
"TensorSummaryV2",
])
# Ops which can be safely pruned from XLA compile if they have no consumers.
# These ops should also have no inputs.
_UNCONNECTED_OPS_TO_PRUNE = set(["Placeholder", "VarHandleOp"])
_MAX_WARNING_LINES = 5
_TPU_REPLICATE_ATTR = "_tpu_replicate"
_POST_DEVICE_REWRITE_ATTR = "_post_device_rewrite"
_TPU_COMPILATION_STATUS_ATTR = "_tpu_compilation_status"
_OUTSIDE_COMPILATION_ATTR = "_xla_outside_compilation"
_PIVOT_FOR_CLUSTER = "_pivot_for_cluster"
core = tpu_name_util.core
def _tpu_system_device_name(job: Optional[Text]) -> Text:
"""Returns the device name for the TPU_SYSTEM device of `job`."""
if job is None:
return "/device:TPU_SYSTEM:0"
else:
return "/job:%s/device:TPU_SYSTEM:0" % job
@tf_export(v1=["tpu.initialize_system"])
def initialize_system(
embedding_config: Optional[embedding_pb2.TPUEmbeddingConfiguration] = None,
job: Optional[Text] = None,
compilation_failure_closes_chips: bool = True
) -> core_types.Tensor:
"""Initializes a distributed TPU system for use with TensorFlow.
Args:
embedding_config: If not None, a `TPUEmbeddingConfiguration` proto
describing the desired configuration of the hardware embedding lookup
tables. If embedding_config is None, no hardware embeddings can be used.
job: The job (the XXX in TensorFlow device specification /job:XXX) that
contains the TPU devices that will be initialized. If job=None it is
assumed there is only one job in the TensorFlow flock, and an error will
be returned if this assumption does not hold.
compilation_failure_closes_chips: Set the configuration whether
we want to close TPU chips when there is a compilation failure.
Returns:
A serialized `TopologyProto` that describes the TPU system. Note:
the topology must be evaluated using `Session.run` before it can be used.
"""
config_string = ("" if embedding_config is None else
embedding_config.SerializeToString())
with ops.device(_tpu_system_device_name(job)):
topology = tpu_ops.configure_distributed_tpu(
compilation_failure_closes_chips=compilation_failure_closes_chips)
if embedding_config is None:
return topology
# This set of control dependencies is needed as this function is expected to
# return an op which will return the topology when executed, but we need to
# call the embedding initialization op between initializing the TPU and
# returning the topology.
with ops.control_dependencies([topology]):
embedding_init = tpu_ops.configure_tpu_embedding(config=config_string)
with ops.control_dependencies([embedding_init]):
return array_ops.identity(topology, name="tpu_init_identity")
def initialize_system_for_tpu_embedding(
embedding_config: embedding_pb2.TPUEmbeddingConfiguration,
job: Optional[Text] = None,
) -> ops.Operation:
"""Initializes a distributed TPU Embedding system for use with TensorFlow.
The following two are equivalent:
1. initialize_system() with embedding_config.
2. initialize_system() without embedding_config, then
initialize_system_for_tpu_embedding().
initialize_system() should not be called with embedding_config if
initialize_system_for_tpu_embedding() is meant to be called later.
Args:
embedding_config: a `TPUEmbeddingConfiguration` proto describing the desired
configuration of the hardware embedding lookup tables.
job: The job (the XXX in TensorFlow device specification /job:XXX) that
contains the TPU devices that will be initialized. If job=None it is
assumed there is only one job in the TensorFlow flock, and an error will
be returned if this assumption does not hold.
Returns:
A no-op.
"""
config_string = embedding_config.SerializeToString()
with ops.device(_tpu_system_device_name(job)):
return tpu_ops.configure_tpu_embedding(config=config_string)
@tf_export(v1=["tpu.shutdown_system"])
def shutdown_system(job: Optional[Text] = None) -> ops.Operation:
"""Shuts down a running a distributed TPU system.
Args:
job: The job (the XXX in TensorFlow device specification /job:XXX) that
contains the TPU devices that will be shutdown. If job=None it is
assumed there is only one job in the TensorFlow flock, and an error will
be returned if this assumption does not hold.
"""
with ops.device(_tpu_system_device_name(job)):
shutdown_distributed_tpu = tpu_ops.shutdown_distributed_tpu()
return shutdown_distributed_tpu
def _enclosing_tpu_context_and_graph() -> Tuple[Any, Any]:
"""Returns the TPUReplicateContext and its associated graph."""
graph = ops.get_default_graph()
while graph is not None:
# pylint: disable=protected-access
context_ = graph._get_control_flow_context()
# pylint: enable=protected-access
while context_ is not None:
if isinstance(context_, TPUReplicateContext):
return context_, graph
context_ = context_.outer_context
graph = getattr(graph, "outer_graph", None)
raise ValueError("get_replicated_var_handle() called without "
"TPUReplicateContext. This shouldn't happen. Please file "
"a bug.")
def is_tpu_strategy(strategy: Any) -> bool:
is_tpu_strat = lambda k: k.__name__.startswith("TPUStrategy")
clz = strategy.__class__
return is_tpu_strat(clz) or any(map(is_tpu_strat, clz.__bases__))
def _enclosing_tpu_device_assignment(
) -> Optional[device_assignment_lib.DeviceAssignment]:
if not distribution_strategy_context.has_strategy():
return None
strategy = distribution_strategy_context.get_strategy()
if not is_tpu_strategy(strategy):
return None
return strategy.extended._device_assignment # pylint: disable=protected-access
@auto_control_deps.register_acd_resource_resolver
def tpu_replicated_input_resolver(
op: ops.Operation,
resource_reads: object_identity.ObjectIdentitySet,
resource_writes: object_identity.ObjectIdentitySet) -> bool:
"""Replaces TPUReplicatedInput outputs with its inputs in resource_inputs."""
# Ignore TPUReplicatedInput for ACD purposes since we will be directly adding
# control deps on the replicated inputs.
if op.type == "TPUReplicatedInput":
if resource_reads or resource_writes:
resource_reads.clear()
resource_writes.clear()
return True
else:
return False
# Replace tensors in `resource_inputs` which are outputs of TPUReplicatedInput
# with the actual replicated inputs. This allows ACD to correct add control
# deps when there are multiple calls to `run` in a
# `tf.function`.
def replace_with_unreplicated_resources(resource_inputs):
"""Replaces handles in `resource_inputs` with their unreplicated inputs."""
to_remove = []
to_add = []
for resource in resource_inputs:
if resource.op.type == "TPUReplicatedInput":
to_remove.append(resource)
to_add.extend(resource.op.inputs)
for t in to_remove:
resource_inputs.discard(t)
resource_inputs.update(to_add)
return to_add or to_remove
return bool(replace_with_unreplicated_resources(resource_reads) or
replace_with_unreplicated_resources(resource_writes))
class TPUReplicateContext(control_flow_ops.XLAControlFlowContext):
"""A `ControlFlowContext` for nodes inside a TPU computation.
The primary role of `TPUReplicateContext` is to mark operators inside a
tpu.replicate() computation with the attribute "_tpu_replicate=XYZ", where XYZ
is a unique name.
We use a `ControlFlowContext` to perform the annotation since it integrates
with Tensorflow constructs like ResourceVariables. For example, if a
`ResourceVariable` is constructed inside a tpu.replicate() block, the
`ResourceVariable` implementation can use
`with ops.control_dependencies(None)` to build the variable's definition
outside the replicated computation.
"""
def __init__(self, name: Text, num_replicas: int, pivot: ops.Operation):
"""Builds a new TPUReplicateContext.
Args:
name: a unique name for the context, used to populate the `_tpu_replicate`
attribute.
num_replicas: an integer that gives the number of replicas for the
computation.
pivot: a pivot node. Nodes in the TPUReplicateContext that do not have any
inputs will have a control dependency on the pivot node. This ensures
that nodes are correctly included in any enclosing control flow
contexts.
"""
super(TPUReplicateContext, self).__init__()
self._num_replicas = num_replicas
self._outer_device_function_stack = None
self._oc_dev_fn_stack = None
self._outside_compilation_cluster = None
self._outside_compilation_v2_context = None
self._outside_compilation_counter = 0
self._in_gradient_colocation = None
self._gradient_colocation_stack = []
self._host_compute_core = []
self._name = name
self._name_as_bytes = compat.as_bytes(name)
self._tpu_relicate_attr_buf = c_api_util.ScopedTFBuffer(
attr_value_pb2.AttrValue(s=self._name_as_bytes).SerializeToString())
self._unsupported_ops = []
self._pivot = pivot
self._replicated_vars = {}
def get_replicated_var_handle(self,
name: Text,
vars_: Union[List[core_types.Tensor],
List[variables.Variable]],
is_mirrored: bool = False,
is_packed: bool = False) -> core_types.Tensor:
"""Returns a variable handle for replicated TPU variable 'var'.
This is a method used by an experimental replicated variable implementation
and is not intended as a public API.
Args:
name: The common name of the variable.
vars_: The replicated TPU variables or handles.
is_mirrored: Whether the variables are mirrored, which guarantees the
values in each replica are always the same.
is_packed: Whether the replicated variables are packed into one variable.
Returns:
The handle of the TPU replicated input node.
"""
device_assignment = _enclosing_tpu_device_assignment()
# We don't need to put device assignment as part of the replicated_vars key
# because each TPUReplicateContext will only have one device assignment.
handle = self._replicated_vars.get(name)
if handle is not None:
return handle
if device_assignment is not None and not is_packed:
# Find a variable copy for each replica in the device assignment.
# Note that the order of devices for replicas for the variable and the
# device assignment might not match.
job_name = pydev.DeviceSpec.from_string(vars_[0].device).job
devices_to_vars = {device_util.canonicalize(v.device): v for v in vars_}
replicated_vars = []
for replica_id in range(device_assignment.num_replicas):
for logical_core in range(device_assignment.num_cores_per_replica):
device = device_util.canonicalize(
device_assignment.tpu_device(
replica=replica_id, logical_core=logical_core, job=job_name))
if device in devices_to_vars:
replicated_vars.append(devices_to_vars[device])
break
else:
raise ValueError(
"Failed to find a variable on any device in replica {} for "
"current device assignment".format(replica_id))
else:
replicated_vars = vars_
# Builds a TPUReplicatedInput node for the variable, if one does not already
# exist. The TPUReplicatedInput node must belong to the enclosing
# control-flow scope of the TPUReplicateContext.
# TODO(phawkins): consider changing the contract of the TPU encapsulation
# so the TPUReplicatedInput nodes go inside the TPUReplicateContext scope
# instead.
_, graph = _enclosing_tpu_context_and_graph()
with graph.as_default():
# If replicated_vars are variables, get the handles. Note that this can be
# done inside TPUReplicateContext because replicated_vars.handle may
# create new ops.
if isinstance(replicated_vars[0], variables.Variable):
replicated_vars = [v.handle for v in replicated_vars]
# pylint: disable=protected-access
saved_context = graph._get_control_flow_context()
graph._set_control_flow_context(self.outer_context)
handle = tpu_ops.tpu_replicated_input(replicated_vars,
name=name + "/handle",
is_mirrored_variable=is_mirrored,
is_packed=is_packed)
graph._set_control_flow_context(saved_context)
# pylint: enable=protected-access
self._replicated_vars[name] = handle
return handle
def report_unsupported_operations(self) -> None:
if self._unsupported_ops:
op_str = "\n".join(" %s (%s)" % (op.type, op.name)
for op in self._unsupported_ops[:_MAX_WARNING_LINES])
logging.warning("%d unsupported operations found: \n%s",
len(self._unsupported_ops), op_str)
if len(self._unsupported_ops) > _MAX_WARNING_LINES:
logging.warning("... and %d more" %
(len(self._unsupported_ops) - _MAX_WARNING_LINES))
def EnterGradientColocation(self, op: ops.Operation, gradient_uid: Text):
if op is not None:
if ops.get_default_graph()._control_flow_context is None: # pylint: disable=protected-access
# If we are in TF 2 functions (control flow V2 functions, or
# tf.function()), we need to attach _xla_outside_compilation attribute
# directly because we are not in TPUReplicateContext.
try:
outside_attr = op.get_attr(_OUTSIDE_COMPILATION_ATTR).decode("ascii")
except ValueError:
# The attr was not present: do nothing.
return
parts = outside_attr.split(".")
cluster = parts[0] + "." + gradient_uid
self._outside_compilation_v2_context = OutsideCompilationV2Context(
cluster)
self._outside_compilation_v2_context.Enter()
return
self._gradient_colocation_stack.append(op)
if not self._outside_compilation_cluster:
try:
outside_attr = op.get_attr(_OUTSIDE_COMPILATION_ATTR).decode("ascii")
if self._in_gradient_colocation:
raise NotImplementedError(
"Cannot nest gradient colocation operations outside compilation"
)
if gradient_uid == "__unsupported__":
raise NotImplementedError(
"No gradient_uid calling gradient within outside_compilation")
# When we take the gradient of an op X in an outside_compilation
# cluster C in a forward computation we would like to put the ops
# corresponding to the gradient of X into a new outside_compilation
# cluster C'. However, if we take the gradient of X twice, the second
# one should get yet another new outside_compilation cluster C''.
#
# The mechanism we adopt is to use a 'root_cluster' which is the
# cluster that X was in before we took gradients, and a 'gradient_uid'
# which is different for every invocation of gradients, and put the
# gradient of X in cluster 'root_cluster.gradient_uid'.
#
# When taking a gradient of a gradient, some ops will be colocated
# with Op in the forward pass (e.g., cluster root_cluster) and some in
# the backward pass (e.g., cluster root_cluster.initial_gradient_uid).
# We need all of the grad-of-grad ops to be in the same cluster to
# avoid cyclic dependencies between clusters. We adopt a heuristic
# that puts any op clustered with root_cluster.<xxx> in
# root_cluster.gradient_uid, even if xxx was initial_gradient_uid.
self._in_gradient_colocation = op
parts = outside_attr.split(".")
cluster = parts[0] + "." + gradient_uid
self._EnterOutsideCompilationScope(cluster=cluster)
except ValueError:
# The attr was not present: do nothing.
pass
def ExitGradientColocation(self, op: ops.Operation, gradient_uid: Text):
if op is not None:
if ops.get_default_graph()._control_flow_context is None: # pylint: disable=protected-access
# Inside a TF2 tf.function or control flow graph and `op` was not
# marked to be outside compiled.
assert self._outside_compilation_v2_context is None
return
if self._outside_compilation_v2_context is not None:
# Inside a TF2 tf.function or control flow graph and `op` was
# marked to be outside compiled.
self._outside_compilation_v2_context.Exit()
self._outside_compilation_v2_context = None
return
if not self._gradient_colocation_stack:
raise errors.InternalError(
op.node_def, op,
f"Badly nested gradient colocation: empty stack when popping Op {op.name}"
)
last_op = self._gradient_colocation_stack.pop()
if op is last_op:
if op is self._in_gradient_colocation:
self._in_gradient_colocation = None
self._ExitOutsideCompilationScope()
else:
raise errors.InternalError(
op.node_def, op,
f"Badly nested gradient colocation, expected {last_op}, got {op.name}"
)
def _EnterOutsideCompilationScope(self, cluster: Optional[Text] = None):
class FakeOp(object):
"""A helper class to determine the current device.
Supports only the type and device set/get methods needed to run the
graph's _apply_device_function method.
"""
def __init__(self):
self._device = ""
@property
def type(self):
return "FakeOp"
@property
def device(self):
return self._device
def _set_device(self, device):
if isinstance(device, pydev.DeviceSpec):
self._device = device.to_string()
else:
self._device = device
def _set_device_from_string(self, device_str):
self._device = device_str
if self._outside_compilation_cluster:
raise NotImplementedError("Cannot nest outside_compilation clusters")
if cluster:
self._outside_compilation_cluster = cluster
else:
self._outside_compilation_cluster = str(self._outside_compilation_counter)
self._outside_compilation_counter += 1
graph = ops.get_default_graph()
fake_op = FakeOp()
graph._apply_device_functions(fake_op) # pylint: disable=protected-access
device = pydev.DeviceSpec.from_string(fake_op.device)
if (device.device_type == "TPU_REPLICATED_CORE" and
device.device_index is not None):
self._host_compute_core.append(self._outside_compilation_cluster + ":" +
str(device.device_index))
self._oc_dev_fn_stack = graph._device_function_stack # pylint: disable=protected-access
graph._device_function_stack = self._outer_device_function_stack # pylint: disable=protected-access
def _ExitOutsideCompilationScope(self):
if not self._outside_compilation_cluster:
raise ValueError(
"Attempted to exit outside_compilation scope when not in scope")
self._outside_compilation_cluster = None
graph = ops.get_default_graph()
graph._device_function_stack = self._oc_dev_fn_stack # pylint: disable=protected-access
def Enter(self) -> None:
if not self._outer_device_function_stack:
# Capture the device function stack at the time of first entry
# since that is the stack that will be used outside_compilation.
graph = ops.get_default_graph()
# pylint: disable=protected-access
self._outer_device_function_stack = graph._device_function_stack.copy()
# pylint: enable=protected-access
super(TPUReplicateContext, self).Enter()
def HostComputeCore(self) -> List[Text]:
return self._host_compute_core
def _RemoveExternalControlEdges(
self, op: ops.Operation
) -> Tuple[List[ops.Operation], List[ops.Operation]]:
"""Remove any external control dependency on this op."""
internal_control_inputs = []
external_control_inputs = []
for x in op.control_inputs:
# pylint: disable=protected-access
is_internal_op = False
ctxt = x._get_control_flow_context()
while ctxt is not None:
if ctxt == self:
is_internal_op = True
break
ctxt = ctxt._outer_context
if is_internal_op:
internal_control_inputs.append(x)
else:
external_control_inputs.append(x)
# pylint: enable=protected-access
# pylint: disable=protected-access
op._remove_all_control_inputs()
op._add_control_inputs(internal_control_inputs)
# pylint: enable=protected-access
return internal_control_inputs, external_control_inputs
def AddOp(self, op: ops.Operation) -> None:
# pylint: disable=protected-access
if op.type in _DENYLISTED_OPS:
logging.error("Operation of type %s (%s) is not supported on the TPU. "
"Execution will fail if this op is used in the graph. ",
op.type, op.name)
if op.type in _UNSUPPORTED_OPS:
self._unsupported_ops.append(op)
if any(x.dtype._is_ref_dtype for x in op.inputs):
raise NotImplementedError(
f"Non-resource Variables are not supported inside TPU computations "
f"(operator name: {op.name})")
# TensorFlowOpLayer may clone nodes that are in tpu.rewrite()s. It'll add
# the "_cloned" attribute and we should continue in that case.
if (_TPU_REPLICATE_ATTR in op.node_def.attr and
"_cloned" not in op.node_def.attr):
raise ValueError(f"TPU computations cannot be nested on op ({op})")
op._set_attr_with_buf(_TPU_REPLICATE_ATTR,
self._tpu_relicate_attr_buf.buffer)
if self._outside_compilation_cluster:
op._set_attr(
_OUTSIDE_COMPILATION_ATTR,
attr_value_pb2.AttrValue(
s=compat.as_bytes(self._outside_compilation_cluster)))
if self._num_replicas > 1 or not self._outside_compilation_cluster:
# Prevent feeding or fetching anything that is being compiled,
# and any replicated outside_compilation Op.
op.graph.prevent_feeding(op)
op.graph.prevent_fetching(op)
# Remove any control edges from outer control flow contexts. These may cause
# mismatched frame errors.
(internal_control_inputs,
external_control_inputs) = self._RemoveExternalControlEdges(op)
if not op.inputs:
# Add a control edge from the control pivot to this op.
if not internal_control_inputs:
# pylint: disable=protected-access
op._add_control_input(self.GetControlPivot())
# pylint: enable=protected-access
else:
for index in xrange(len(op.inputs)):
x = op.inputs[index]
real_x = self.AddValue(x)
if real_x is not x:
op._update_input(index, real_x) # pylint: disable=protected-access
if external_control_inputs:
# Use an identity to pull control inputs as data inputs. Note that we
# ignore ops which don't have outputs. TODO(phawkins): fix that.
with ops.control_dependencies(None):
self.Enter()
external_control_inputs = [
array_ops.identity(x.outputs[0]).op
for x in external_control_inputs
if x.outputs
]
self.Exit()
# pylint: disable=protected-access
op._add_control_inputs(external_control_inputs)
# pylint: enable=protected-access
# Mark op's outputs as seen by this context and any outer contexts.
output_names = [x.name for x in op.outputs]
context = self
while context is not None:
# pylint: disable=protected-access
context._values.update(output_names)
context = context._outer_context
# pylint: enable=protected-access
if self._outer_context:
self._outer_context.AddInnerOp(op)
def AddValue(self, val: core_types.Tensor) -> core_types.Tensor:
"""Add `val` to the current context and its outer context recursively."""
if not self._outer_context:
return val
if val.name in self._values:
# Use the real value if it comes from outer context.
result = self._external_values.get(val.name)
return val if result is None else result
result = val
self._values.add(val.name)
if self._outer_context:
result = self._outer_context.AddValue(val)
self._values.add(result.name)
self._external_values[val.name] = result
return result
def AddInnerOp(self, op: ops.Operation):
self.AddOp(op)
if self._outer_context:
self._outer_context.AddInnerOp(op)
@property
def grad_state(self):
# Define the gradient loop state associated with the TPUReplicateContext to
# be None as the TPUReplicateContext does not get nested nor does the
# grad_state outside the TPUReplicateContext affect the graph inside so the
# grad_state should be as if this is the top-level gradient state.
return None
@property
def back_prop(self):
"""Forwards to the enclosing while context, if any."""
if self.GetWhileContext():
return self.GetWhileContext().back_prop
return False
def GetControlPivot(self) -> ops.Operation:
return self._pivot
def RequiresUniqueFunctionRetracing(self):
# More context: b/158152827. TPU stack uses the TPUReplicateContext to
# create replicated variable handles and cluster TPU computations, thus we
# always retrace a tf.function when the wrapped TPUReplicateContext changes.
return True
class OutsideCompilationV2Context(control_flow_ops.ControlFlowContext):
"""The context for outside compilation in Tensorflow 2.0.
Every op added in this context will be assigned an _xla_outside_compilation
attribute.
"""
def __init__(self, name: Text):
control_flow_ops.ControlFlowContext.__init__(self)
self._name = name
def AddOp(self, op: ops.Operation) -> None:
if self._outer_context:
self._outer_context.AddOp(op)
# pylint: disable=protected-access
op._set_attr("_xla_outside_compilation",
attr_value_pb2.AttrValue(s=compat.as_bytes(self._name)))
# pylint: enable=protected-access
def AddInnerOp(self, op: ops.Operation) -> None:
if self._outer_context:
self._outer_context.AddInnerOp(op)
# pylint: disable=protected-access
op._set_attr("_xla_outside_compilation",
attr_value_pb2.AttrValue(s=compat.as_bytes(self._name)))
# pylint: enable=protected-access
def to_control_flow_context_def(self, context_def, export_scope=None):
raise NotImplementedError
@tf_export(v1=["tpu.outside_compilation"])
def outside_compilation(
computation: Callable[..., Any], *args, **kwargs
) -> Any:
"""Builds part of a computation outside any current TPU replicate scope.
`tf.tpu.outside_compilation()` is used to run ops in `computation` on CPU
instead of running on TPU. For example, users can run ops that are not
supported on TPU's (e.g. tf.summary.write()) by explicitly placing those
ops on CPU's. Below usage of outside compilation will place ops in
`computation_with_string_ops` on CPU.
Example usage:
```python
def computation_with_string_ops(x):
# strings types are not supported on TPU's and below ops must
# run on CPU instead.
output = tf.strings.format('1{}', x)
return tf.strings.to_number(output)
def tpu_computation():
# Expected output is 11.
output = tf.tpu.outside_compilation(computation_with_string_ops, 1)
```
Outside compilation should be called inside TPUReplicateContext. That is,
`tf.tpu.outside_compilation()` should be called inside a function that is
passed to `tpu.split_compile_and_replicate()` -- this is implied when
outside compilation is invoked inside a function passed to TPUStrategy
`run()`. If invoked outside of TPUReplicateContext,
then this simply returns the result of `computation`, and therefore,
would be a no-op. Note that outside compilation is different from
`tf.distribute.experimental.TPUStrategy.merge_call()` as logic in
outside compilation is replicated and executed separately for each
replica. On the other hand, `merge_call()` requires a `merge_fn`
to aggregate the inputs from different replicas and is executed only
once.
For variables placed in TPU device, which includes variables created inside
TPUStrategy scope, outside compilation logic must not include variable
read/write. For variables placed on host, which is the case when variables
created via TPUEstimator, variable read/write is only allowed if the variable
is not accessed by any other ops in the TPU computation. Variable read/write
from outside compilation cluster is not visible from TPU computation and
vice versa. Therefore, if outside compilation logic contains such host
variables read/write ops and if the variables are accessed by TPU
computation as well, then this may lead to deadlock.
Internally, `tf.tpu.outside_compilation()` adds outside compilation
attributes to all ops in `computation`. During later graph pass, these
ops with outside compilation attribute is extracted out and replicated
into a host-side graph. Inputs to this extract host-side graph is sent
from TPU computation graph to host graph via a pair of XlaSendToHost and
XlaRecvFromHost ops. Note that using `tf.tpu.outside_compilation()`
may result in tensor transfer between TPU and CPU, leading to non-trivial
performance impact.
Args:
computation: A Python function that builds the computation to
place on the host.
*args: the positional arguments for the computation.
**kwargs: the keyword arguments for the computation.
Returns:
The Tensors returned by computation.
"""
args = [] if args is None else args
graph = ops.get_default_graph()
# If we are in TF 2 functions (control flow V2 functions, or tf.function()),
# we need to attach _xla_outside_compilation attribute directly because we are
# not in TPUReplicateContext.
if isinstance(graph, func_graph.FuncGraph):
try:
tpu_context, _ = _enclosing_tpu_context_and_graph()
except ValueError:
logging.warning(
"Outside compilation attempted outside TPUReplicateContext "
"scope. As no enclosing TPUReplicateContext can be found, "
"returning the result of `computation` as is.")
return computation(*args, **kwargs)
# pylint: disable=protected-access
outside_compilation_name = str(tpu_context._outside_compilation_counter)
tpu_context._outside_compilation_counter = (
tpu_context._outside_compilation_counter + 1)
# pylint: enable=protected-access
outside_compilation_context = OutsideCompilationV2Context(
outside_compilation_name)
outside_compilation_context.Enter()
args = [] if args is None else args
retval = computation(*args, **kwargs)
outside_compilation_context.Exit()
return retval
# If we are in a TPUReplicateContext, signal that we are now
# outside_compilation
initial_context = graph._get_control_flow_context() # pylint: disable=protected-access
context = initial_context
while context:
if isinstance(context, TPUReplicateContext):
context._EnterOutsideCompilationScope() # pylint: disable=protected-access
context = context.outer_context
retval = computation(*args, **kwargs)
# If we are in a TPUReplicateContext, signal that we are no longer
# outside_compilation
final_context = graph._get_control_flow_context() # pylint: disable=protected-access
if initial_context is not final_context:
raise NotImplementedError(
"Control-flow context cannot be different at start and end of an "
"outside_compilation scope")
context = initial_context
while context:
if isinstance(context, TPUReplicateContext):
context._ExitOutsideCompilationScope() # pylint: disable=protected-access
context = context.outer_context
return retval
@tf_export(v1=["tpu.PaddingSpec"])
class PaddingSpec(enum.IntEnum):
"""Represents the type of padding policies for tpu.replicate."""
# By default the policy is set to AUTO, the dynamic input shape dimension will
# be pad to maximum of all the replicas.
AUTO = 0
# Bucketize the dynamic input shape dimension into a power of 2.
POWER_OF_TWO = 1
@tf_export("tpu.XLAOptions")
class XLAOptions(
collections.namedtuple("XLAOptions", [
"use_spmd_for_xla_partitioning",
"enable_xla_dynamic_padder",
])):
"""XLA compilation options.
Attributes:
use_spmd_for_xla_partitioning: Boolean. Whether to use XLA's SPMD
partitioner instead of MPMD partitioner when compiler partitioning is
requested.
enable_xla_dynamic_padder: Boolean. Whether to enable XLA dynamic padder
infrastructure to handle dynamic shapes inputs inside XLA. True by
default. Disabling this may cause correctness issues with dynamic shapes
inputs, as XLA will just assume the inputs are with padded shapes. However
users can optionally set it to False to improve device time if masking is
already handled in the user side.
"""
def __new__(cls,
use_spmd_for_xla_partitioning=True,
enable_xla_dynamic_padder=True):
return super(XLAOptions, cls).__new__(cls, use_spmd_for_xla_partitioning,
enable_xla_dynamic_padder)
@tf_export(v1=["tpu.replicate"])
def replicate(
computation: Callable[..., Any],
inputs: Optional[List[List[core_types.Tensor]]] = None,
infeed_queue: Optional[tpu_feed.InfeedQueue] = None,
device_assignment: Optional[device_assignment_lib.DeviceAssignment] = None,
name: Optional[Text] = None,
maximum_shapes: Optional[Any] = None,
padding_spec: Optional[PaddingSpec] = None,
xla_options: Optional[XLAOptions] = None) -> List[Any]:
"""Builds a graph operator that runs a replicated TPU computation.
Example for the basic usage that `inputs` has static shape:
```python
def computation(x):
x = x + 1
return tf.math.reduce_mean(x)
x = tf.convert_to_tensor([1., 2., 3.])
y = tf.convert_to_tensor([4., 5., 6.])
tf.compat.v1.tpu.replicate(computation, inputs=[[x], [y]])
```
If the `inputs` has dynamic shapes and you would like to automatically
bucketize the inputs to avoid XLA recompilation. See the advanced example
below:
```python
def computation(x):
x = x + 1
return tf.math.reduce_mean(x)
# Assume input tensors in two replicas `x` and `y` both have dynamic shape
# ([None, 2]).
tf.compat.v1.tpu.replicate(
computation,
inputs=[x, y],
maximum_shapes=[tf.TensorShape([None, None])],
padding_spec=tf.compat.v1.tpu.PaddingSpec.POWER_OF_TWO)
```
Args:
computation: A Python function that builds the computation to replicate.
inputs: A list of lists of input tensors or `None` (equivalent to
`[[]]`), indexed by `[replica_num][input_num]`. All replicas must
have the same number of inputs. Each input can be a nested structure
containing values that are convertible to tensors. Note that passing an
N-dimension list of compatible values will result in a N-dimension list of
scalar tensors rather than a single Rank-N tensors. If you need different
behavior, convert part of inputs to tensors with `tf.convert_to_tensor`.
infeed_queue: If not `None`, the `InfeedQueue` from which to append a tuple
of arguments as inputs to computation.
device_assignment: If not `None`, a `DeviceAssignment` describing the
mapping between logical cores in the computation with physical cores in
the TPU topology. Uses a default device assignment if `None`. The
`DeviceAssignment` may be omitted if each replica of the computation uses
only one core, and there is either only one replica, or the number of
replicas is equal to the number of cores in the TPU system.
name: (Deprecated) Does nothing.
maximum_shapes: A nested structure of tf.TensorShape representing the shape
to which the respective component of each input element in each replica
should be padded. Any unknown dimensions (e.g.
tf.compat.v1.Dimension(None) in a tf.TensorShape or -1 in a tensor-like
object) will be padded to the maximum size of that dimension over all
replicas. The structure of `maximum_shapes` needs to be the same as
`inputs[0]`.
padding_spec: An enum specified by `tpu.PaddingSpec`. This describes the
padding policy when the `inputs` to `tpu.replicate` is dynamic.
One usage is to enable automatic bucketizing on the inputs by setting the
value to `tpu.PaddingSpec.POWER_OF_TWO`, which can help to reduce the
recompilation in the XLA side.
xla_options: An instance of `tpu.XLAOptions` which indicates the options
passed to XLA compiler. Use `None` for default options.
Returns:
A list of outputs, indexed by `[replica_num]` each output can be a nested
structure same as what computation() returns with a few exceptions.
Exceptions include:
1) None output: a NoOp would be returned which control-depends on
computation.
2) Single value output: A tuple containing the value would be returned.
3) Operation-only outputs: a NoOp would be returned which
control-depends on computation.
TODO(b/121383831): Investigate into removing these special cases.
Raises:
ValueError: If all replicas do not have equal numbers of input tensors.
ValueError: If the number of inputs per replica does not match
the number of formal parameters to `computation`.
ValueError: If the static `inputs` dimensions don't match with the values
given in `maximum_shapes`.
ValueError: If the structure of inputs per replica does not match
the structure of `maximum_shapes`.
"""
return split_compile_and_replicate(
computation,
inputs,
infeed_queue,
device_assignment,
name,
maximum_shapes=maximum_shapes,
padding_spec=padding_spec,
xla_options=xla_options)[1]
def _ceil_to_pow_of_n(x, n):
"""Ceil input `x` to power of `n`."""
x = math_ops.cast(x, dtypes.float32)
lognx = math_ops.log(x) / math_ops.log(n * 1.0)
lognx = math_ops.ceil(lognx)
result = math_ops.pow(n * 1.0, lognx)
result = math_ops.cast(result, dtypes.int32)
return result
def _pad_all_input(
inputs: Iterable[core_types.Tensor],
padded_shapes: List[Optional[tensor_shape.TensorShape]],
padding_spec: PaddingSpec
) -> Tuple[List[List[Any]], List[dynamic_padding.PaddingMap]]:
"""Pad all input tensors given padded_shapes.
The real shape tensors will be concatenated with the padded original inputs.
Args:
inputs: The original inputs.
padded_shapes: A list of padded shapes for each input. If an entry is None,
no padding is performed.
padding_spec: An enum specified by `tpu.PaddingSpec`. This describes the
padding policy when the `inputs` to `tf.tpu.replicate` is dynamic.
One usage is to enable automatic bucketizing on the inputs by setting the
value to `tpu.PaddingSpec.POWER_OF_TWO`, which can help to reduce the
recompilation in the XLA side.
Returns:
The padded inputs and a PaddingMap list which maps the padded input
dimension to the real shape argument index.
"""
# maximum_static_shapes[idx][i] indicates the maximum static size of ith
# dimension of the idx input among all the replicas.
maximum_static_shapes = []
# need_padding[idx][i] indicates whether the ith dimension of the idx input
# needs padding.
need_padding = []
input_shape_tensors = []
for core_idx, inputs_per_core in enumerate(inputs):
for idx, input_tensor in enumerate(inputs_per_core):
input_shape = input_tensor.get_shape().as_list()
if core_idx == 0:
input_shape_tensors.append([])
maximum_static_shapes.append(input_shape)
need_padding.append(np.full_like(input_shape, False, dtype=bool))
else:
for i, s in enumerate(input_shape):
if s is None or s != maximum_static_shapes[idx][i]:
need_padding[idx][i] = True
maximum_static_shapes[idx] = max(input_shape,
maximum_static_shapes[idx])
# Append _POST_DEVICE_REWRITE_ATTR attributes to the real shape ops.
real_input_shape = array_ops.shape(input_tensor)
real_input_shape.op._set_attr( # pylint: disable=protected-access
_POST_DEVICE_REWRITE_ATTR,
attr_value_pb2.AttrValue(b=True))
input_shape_tensors[idx].append(real_input_shape)
maximum_shapes = []
for shapes_per_input in input_shape_tensors:
maximum_shapes.append(
math_ops.reduce_max(array_ops.stack(shapes_per_input), axis=0))
padded_inputs = []
real_shapes = []
padding_maps = []
for core_idx, inputs_per_core in enumerate(inputs):
padded_inputs.append([])
real_shapes.append([])
real_shape_idx = len(inputs_per_core) - 1
for idx, input_tensor in enumerate(inputs_per_core):
input_shape_tensor = input_shape_tensors[idx][core_idx]
input_shape = input_tensor.get_shape().as_list()
padded_shape = padded_shapes[idx]
# If we have no padded_shape, then skip padding.
if any(need_padding[idx]) and padded_shape is not None:
for i, s in enumerate(input_shape):
if need_padding[idx][i]:
if core_idx == 0:
real_shape_idx += 1
padding_map = dynamic_padding.PaddingMap()
padding_map.arg_index = idx
padding_map.shape_index = i
padding_map.padding_arg_index = real_shape_idx
padding_maps.append(padding_map)
real_shapes[core_idx].append(
math_ops.cast(input_shape_tensor[i], dtypes.int32))
paddings = []
for i, s in enumerate(padded_shape.dims):
if need_padding[idx][i]:
# The minimum padded dimension size is 2 as XLA doesn't support size
# 1 dynamic size.
minimum_dynamic_dim_size = 2
if s.value is not None:
# Pad to the given maximum value.
max_dim_size = max(s.value, minimum_dynamic_dim_size)
else:
# If maximum value is not given, then pad to the maximum dimension
# among all the cores.
max_dim_size = math_ops.maximum(maximum_shapes[idx][i],
minimum_dynamic_dim_size)
if padding_spec == PaddingSpec.POWER_OF_TWO:
max_dim_size = _ceil_to_pow_of_n(max_dim_size, 2)
# Pad to the given maximum value.
padding = [0, max_dim_size - input_shape_tensor[i]]
else:
padding = [0, 0]
paddings.append(padding)
if input_tensor.get_shape().is_fully_defined():
# TODO(rxsang): This is a hack to make sure padded_input has dynamic
# shapes, so any tf.size/tf.shape op performed on it won't be constant
# folded. Do we have better ways to do it?
padded_input = control_flow_ops.cond(
array_ops.constant(True),
lambda: array_ops.pad(input_tensor, paddings), # pylint: disable=cell-var-from-loop
lambda: input_tensor)
else:
padded_input = array_ops.pad(input_tensor, paddings)
# Append _POST_DEVICE_REWRITE_ATTR attributes to all padded inputs.
padded_input.op._set_attr( # pylint: disable=protected-access
_POST_DEVICE_REWRITE_ATTR,
attr_value_pb2.AttrValue(b=True))
padded_inputs[core_idx].append(padded_input)
else:
padded_inputs[core_idx].append(input_tensor)
num_replicas = len(padded_inputs)
for i in range(num_replicas):
padded_inputs[i].extend(real_shapes[i])
return padded_inputs, padding_maps
def _flatten_and_filter_composite(maybe_composite, non_composite_output,
composite_output=None):
"""For an input, replaced the input by a tuple if the input is composite.
If `maybe_composite` is not composite, return the parameter
`non_composite_output` otherwise return a tuple which consists of the value of
the parameter `composite_output` the same number of times as there are
components of the composite tensor.
This is useful for computing a mask when flattening nested data with
`expand_composites=True`. For example
```python
nest.flatten(data, expand_composites=True)
```
and
```python
nest.flatten(nest.map(
data, lambda x: _flatten_and_filter_composite(x, False, True)))
```
will have the same length and second will be True if the tensor in the first
is derived from a expanding a composite tensor.
Args:
maybe_composite: A value to test for being a composite tensor.
non_composite_output: The value to return when `maybe_composite` is not a
composite.
composite_output: the value to fill the output tuple with if
`maybe_composite` is a composite.
Returns:
`non_composite_output` or a tuple with multiple copies of
`composite_output`.
"""
if isinstance(maybe_composite, composite_tensor.CompositeTensor):
num_components = len(nest.flatten(maybe_composite, expand_composites=True))
return (composite_output,) * num_components
return non_composite_output
def split_compile_and_replicate(
computation: Callable[..., Any],
inputs: Optional[List[List[core_types.Tensor]]] = None,
infeed_queue: Optional[tpu_feed.InfeedQueue] = None,
device_assignment: Optional[device_assignment_lib.DeviceAssignment] = None,
name: Optional[Text] = None,
use_tpu: bool = True,
maximum_shapes: Optional[Any] = None,
padding_spec: Optional[PaddingSpec] = None,
xla_options: Optional[XLAOptions] = None,
) -> List[List[core_types.Tensor]]:
"""Builds graph operators that runs compilation and replicated computation.
This is a lower level interface than replicate that returns a separate compile
and execute output tensor. In the generated graph the compile op feeds into
the execute op and no additional compilation is incurred when running the
compile op before the execute op. The compile op returns additional
information about the compilation but does not return the compiled program.
Args:
computation: A Python function that builds the computation to replicate.
inputs: A list of lists of input tensors or `None` (equivalent to
`[[]]`), indexed by `[replica_num][input_num]`. All replicas must
have the same number of inputs. Each input can be a nested structure
containing values that are convertible to tensors. Note that passing an
N-dimension list of compatible values will result in a N-dimension list of
scalar tensors rather than a single Rank-N tensors. If you need different
behavior, convert part of inputs to tensors with `tf.convert_to_tensor`.
infeed_queue: If not `None`, the `InfeedQueue` from which to append a tuple
of arguments as inputs to computation.
device_assignment: If not `None`, a `DeviceAssignment` describing the
mapping between logical cores in the computation with physical cores in
the TPU topology. Uses a default device assignment if `None`. The
`DeviceAssignment` may be omitted if each replica of the computation uses
only one core, and there is either only one replica, or the number of
replicas is equal to the number of cores in the TPU system.
name: (Deprecated) Does nothing.
use_tpu: When false, the input `computation` is executed on the XLA CPU/GPU
backends. Currently, only supports a default placement (computation is
placed on GPU if one is available, and on CPU if not).
maximum_shapes: A nested structure of tf.TensorShape representing the shape
to which the respective component of each input element in each replica
should be padded. Any unknown dimensions (e.g.
tf.compat.v1.Dimension(None) in a tf.TensorShape or -1 in a tensor-like
object) will be padded to the maximum size of that dimension over all
replicas. The structure of `maximum_shapes` needs to be the same as
`inputs[0]`.
padding_spec: An enum specified by `tf.tpu.PaddingSpec`. This describes the
padding policy when the `inputs` to `tf.tpu.replicate` is dynamic.
One usage is to enable automatic bucketizing on the inputs by setting the
value to `tpu.PaddingSpec.POWER_OF_TWO`, which can help to reduce the
recompilation in the XLA side.
xla_options: An instance of `tpu.XLAOptions` which indicates the options
passed to XLA compiler. Use `None` for default options.
Returns:
A list of lists with the first list corresponding to the compile op and the
second a list of output tensors, indexed by `[replica_num][output_num]`.
Raises:
ValueError: If all replicas do not have equal numbers of input tensors.
ValueError: If the number of inputs per replica does not match
the number of formal parameters to `computation`.
ValueError: If the static `inputs` dimensions don't match with the values
given in `maximum_shapes`.
ValueError: If the structure of inputs per replica does not match
the structure of `maximum_shapes`.
"""
del name
inputs = [[]] if inputs is None else inputs
xla_options = xla_options or XLAOptions()
metadata_kwargs = {}
if device_assignment is not None:
# Turn the Numpy array into a flattened list so we can pass it as an
# operator attribute.
metadata_kwargs = {
"topology":
device_assignment.topology.serialized(),
"device_assignment":
device_assignment.core_assignment.flatten().tolist()
}
metadata_kwargs["num_cores_per_replica"] = (
device_assignment.num_cores_per_replica)
# This entry is used for enabling automatic outside compilation.
metadata_kwargs["allow_soft_placement"] = config.get_soft_device_placement()
if config.get_soft_device_placement():
logging.info("Automatic outside compilation is enabled. "
"Ops without XLA kernels will be automatically "
"placed on CPU.")
if not isinstance(inputs, list):
raise TypeError("tpu.replicate() inputs must be a list of lists/tuples, "
f"received {type(inputs)}")
if any(not isinstance(inp, (list, tuple)) for inp in inputs):
raise TypeError(
"tpu.replicate() inputs must be a list of lists/tuples, "
f"received types: {[type(inp) for inp in inputs]}")
num_replicas = len(inputs)
# No replicas? Nothing to do.
if num_replicas == 0:
return []
# Checks all replicas have the same structure.
for i in xrange(1, num_replicas):
nest.assert_same_structure(inputs[0], inputs[i])
# Flatten inputs. This structure may contain None values, which will be
# handled later.
flat_inputs_with_nones = [
nest.flatten(per_replica_input, expand_composites=True)
for per_replica_input in inputs
]
# Mask parallel to one replica's inputs with True for tensors coming from
# composites.
is_composite = nest.flatten(nest.map_structure(
lambda x: _flatten_and_filter_composite(x, False, True), inputs[0]))
# Converts inputs to Tensors, replacing Nones with a placeholder 0 since
# tpu_ops.tpu_replicated_input() can't handle non-Tensor values.
flat_inputs = []
for inp in flat_inputs_with_nones:
flat_inputs.append([
constant_op.constant(0) if x is None else ops.convert_to_tensor(x)
for x in inp
])
# Verifies that all replicas have matching numbers and types of inputs
flat_input_types = [x.dtype for x in flat_inputs[0]]
input_arity = len(inputs[0])
flat_input_arity = len(flat_input_types)
for i in range(num_replicas):
if len(inputs[i]) != input_arity:
raise ValueError("Replicas must have the same number of inputs. "
"Replica 0 had {} inputs, replica {} had {} "
"inputs.".format(input_arity, i, len(inputs[i])))
types = [x.dtype for x in flat_inputs[i]]
if types != flat_input_types:
raise ValueError("Replicas must have matching input types. Replica 0 had "
"input types {}, replica {} had input types {}".format(
flat_input_types, i, types))
arg_error = xla.check_function_argument_count(
computation, input_arity, infeed_queue)
if arg_error is not None:
if infeed_queue is None:
raise TypeError(
"Supplied computation cannot be called with the specified inputs. "
f"You specified {input_arity} inputs: {[i.name for i in inputs[0]]}, "
f"but the computation needs{arg_error}")
else:
raise TypeError(
"Supplied computation cannot be called with the specified inputs. "
f"You specified {input_arity} inputs: {[i.name for i in inputs[0]]} ",
f"and {infeed_queue.number_of_tuple_elements} additional inputs "
f"from infeed, but the computation needs {arg_error}")
dynamic_shape_inputs = False
if maximum_shapes:
if infeed_queue:
raise ValueError(
"Dynamic input shapes are not supported with infeed queues")
# Make sure maximum_shapes has the same structure as inputs.
nest.assert_same_structure(inputs[0], maximum_shapes, check_types=False)
# Flatten padded shapes:
# For composite tensor components, we don't want to pad them. For each
# entry of maximum_shapes that corresponds to a composite tensor, replace it
# by a tuple of Nones of the same length as the number of components of the
# composite tensor. When we flatten a second time, this makes
# flat_maximum_shapes have the same length as flat_inputs[i]. We can then
# avoid padding these tensors. The assumption is that they will be used by
# outside compilation or that the components are statically shaped and will
# be used by tpu compatible ops.
flat_maximum_shapes = nest.flatten(
[_flatten_and_filter_composite(x, y)
for x, y in zip(nest.flatten(inputs[0]),
nest.flatten(maximum_shapes))])
flat_maximum_shapes = [
tensor_shape.TensorShape(s) if s is not None else None
for s in flat_maximum_shapes
]
nest.assert_same_structure(flat_inputs[0], flat_maximum_shapes,
check_types=False)
unpadded_inputs = flat_inputs
flat_inputs, padding_maps = _pad_all_input(unpadded_inputs,
flat_maximum_shapes,
padding_spec)
if padding_maps:
dynamic_shape_inputs = True
logging.info("TPU has inputs with dynamic shapes: %s", unpadded_inputs[0])
metadata_kwargs["step_marker_location"] = getattr(
computation, "step_marker_location", "STEP_MARK_AT_ENTRY")
metadata_kwargs["use_spmd_for_xla_partitioning"] = \
xla_options.use_spmd_for_xla_partitioning
graph = ops.get_default_graph()
# Fan-in: Builds a TPUReplicatedInput node for each input.
flat_replicated_inputs = []
for i in range(0, len(flat_inputs[0])):
replicas = [flat_inputs[replica][i] for replica in xrange(num_replicas)]
flat_replicated_inputs.append(
tpu_ops.tpu_replicated_input(
replicas, name="input{}".format(i), index=i))
if isinstance(graph, func_graph.FuncGraph):
# When we are in Tensorflow 2.0 function, 'graph' will be a FuncGraph
# object. If both outside graph and this function have a TPU cluster,
# they will have the same cluster name and it will cause problems (because
# we lower functional ops in Tensorflow 2.0). Append function name to
# 'cluster_name' to avoid cluster name collision.
cluster_name = graph.unique_name("cluster_" + graph.name)
else:
cluster_name = graph.unique_name("cluster")
pivot = control_flow_ops.no_op(name=cluster_name + "/pivot")
pivot._set_attr(_PIVOT_FOR_CLUSTER, # pylint: disable=protected-access
attr_value_pb2.AttrValue(s=compat.as_bytes(cluster_name)))
context = TPUReplicateContext(
name=cluster_name, num_replicas=num_replicas, pivot=pivot)
try:
context.Enter()
metadata = tpu_ops.tpu_replicate_metadata(
num_replicas=num_replicas, use_tpu=use_tpu, **metadata_kwargs)
with tpu_function.tpu_shard_context(
num_replicas), ops.control_dependencies([metadata]):
if dynamic_shape_inputs and xla_options.enable_xla_dynamic_padder:
for padding_map in padding_maps:
input_shape = flat_replicated_inputs[padding_map.arg_index].shape
flat_replicated_inputs[
padding_map.arg_index] = tf2xla.set_dynamic_dimension_size(
flat_replicated_inputs[padding_map.arg_index],
padding_map.shape_index,
flat_replicated_inputs[padding_map.padding_arg_index])
flat_replicated_inputs[padding_map.arg_index].set_shape(input_shape)
# Add identity ops so even unused inputs are "consumed" by the
# computation. This is to avoid orphaned TPUReplicatedInput nodes.
# TODO(phawkins): consider instead pruning unused TPUReplicatedInput
# and eliding trivial TPUReplicatedInput/TPUReplicatedOutput pairs.
flat_replicated_inputs = [
array_ops.identity(x, name="replicated_input_{}".format(i))
for i, x in enumerate(flat_replicated_inputs)
]
for i, composite in zip(flat_replicated_inputs, is_composite):
# pylint: disable=protected-access
# Add an attribute to the identity node so that they could be removed in
# encapsulate TPU computation pass if unused. However we don't remove
# inputs when dynamic padding is enabled.
# TODO(rxsang): Use other ways except argument index in padding_map so
# outside compilation can work with dynamic padding correctly.
if not dynamic_shape_inputs or composite:
i.op._set_attr("_tpu_input_identity",
attr_value_pb2.AttrValue(b=True))
# pylint: enable=protected-access
# Clobber replicated placeholders with Nones.
computation_inputs = [
None if inp is None else replicated for replicated, inp in zip(
flat_replicated_inputs, flat_inputs_with_nones[0])
]
# Unflatten the computation inputs to match original input structure.
computation_inputs = nest.pack_sequence_as(
structure=inputs[0],
flat_sequence=computation_inputs[:flat_input_arity],
expand_composites=True)
# If there is an infeed queue, adds the dequeued values to the
# computation's inputs.
if infeed_queue is not None:
infeed_queue.set_number_of_shards(num_replicas)
for t in infeed_queue.generate_dequeue_op():
computation_inputs.append(t)
# Only resource variables work inside a TPU computation, so turn on
# resource variables for the computation.
# TODO(phawkins): consider removing this code. It will
# be less confusing to clients if they knowingly choose to use resource
# variables.
# Partitioned variables is not supported (b/112311320).
vscope = variable_scope.get_variable_scope()
saved_use_resource = vscope.use_resource
saved_custom_getter = vscope.custom_getter
def custom_getter(getter, name, *args, **kwargs):
"""Variables on TPU have a few restrictions."""
partitioner = kwargs.get("partitioner", None)
if partitioner is not None:
kwargs["partitioner"] = None
logging.warning(
"Partitioned variables are not supported on TPU. Got "
"`partitioner` that is %s for variable %s. "
"Setting `partitioner` to `None`.", partitioner, name)
if saved_custom_getter is None:
return getter(name, *args, **kwargs)
else:
return saved_custom_getter(getter, name, *args, **kwargs)
vscope.set_use_resource(True)
vscope.set_custom_getter(custom_getter)
outputs = computation(*computation_inputs)
vscope.set_use_resource(saved_use_resource)
vscope.set_custom_getter(saved_custom_getter)
outputs_is_flat = xla.is_flat(outputs)
if outputs_is_flat:
output_tensors, control_deps, pack_template = _postprocess_flat_outputs(
outputs)
else:
output_tensors, control_deps, pack_template = (
_postprocess_non_flat_outputs(outputs))
# tensor_tracer imports tpu.py. Local import to tensor_tracer to avoid
# import-cycle
if typing.TYPE_CHECKING:
tensor_tracer = Any
else:
# pylint: disable=g-import-not-at-top
from tensorflow.python.tpu import tensor_tracer
# pylint: enable=g-import-not-at-top
if tensor_tracer.TensorTracer.is_enabled():
tt = tensor_tracer.TensorTracer()
output_tensors = tt.trace_tpu(ops.get_default_graph(),
output_tensors, control_deps,
num_replicas)
context.ExitResult(output_tensors)
finally:
context.report_unsupported_operations()
context.Exit()
host_compute_core = context.HostComputeCore()
if host_compute_core:
attr_value = attr_value_pb2.AttrValue()
attr_value.list.s.extend(compat.as_bytes(x) for x in host_compute_core)
metadata._set_attr("host_compute_core", attr_value) # pylint: disable=protected-access
with ops.control_dependencies([metadata]):
if use_tpu:
compile_status = tpu_ops.tpu_compilation_result()
op = compile_status.op
attr_value = attr_value_pb2.AttrValue(s=compat.as_bytes(cluster_name))
op._set_attr(_TPU_COMPILATION_STATUS_ATTR, attr_value) # pylint: disable=protected-access
else:
compile_status = control_flow_ops.no_op(name="compilation_status")
if not output_tensors:
# Returns a list of NoOps dependent on the replication Op, indexed by
# [replica_num].
return [
compile_status,
[
control_flow_ops.group(control_deps, name="shard_%d" % i)
for i in range(num_replicas)
]
]
# Fan-out: Builds a TPUReplicatedOutput node for each output.
replicated_outputs = [[] for i in range(num_replicas)]
for i, t in enumerate(output_tensors):
# None values returned by the computation can't be sent to
# tpu_ops.tpu_replicated_output(), we handle them specially here. We can
# avoid the placeholder 0 routine required on the inputs since outputs are
# replicated per-tensor, not per-replica, so we can skip replication.
if t is None:
for replica in range(num_replicas):
replicated_outputs[replica].append(None)
continue
# Fan-out: Builds a TPUReplicatedOutput node for each output.
ys = tpu_ops.tpu_replicated_output(
t, num_replicas, name="output{}".format(i))
# Wraps the outputs in identity operators so the names of any possible
# `fetch` nodes are preserved by the replication rewrite.
with ops.control_dependencies(control_deps):
for replica in range(num_replicas):
replicated_outputs[replica].append(
array_ops.identity(
ys[replica], name="output_%d_shard_%d" % (i, replica)))
replicated_outputs = [
nest.pack_sequence_as(pack_template, replica_outs, expand_composites=True)
for replica_outs in replicated_outputs
]
return [compile_status, replicated_outputs]
def _postprocess_flat_outputs(
outputs: Any
) -> Tuple[List[Optional[core_types.Tensor]], List[ops.Operation], List[Any]]:
"""Validates non-flat outputs, add backs device assignments and other attrs.
Args:
outputs: Output from `computation` inside `tpu.rewrite`.
Returns:
- Tensors extracted from outputs.
- Operations extracted from outputs.
- A pack template for use with nest.pack_sequence_as to pack the tensors.
"""
# Following code segment is to preserve legacy behavior. Previously we only
# supported flat outputs and thus for consistency it was nice to convert even
# single element into a tuple. But now that we support arbitrary output
# structure, this is no longer necessary.
# TODO(b/121383831): Migrate all legacy use cases and delete this special
# case.
# If the computation returns `None`, make it an empty tuple.
if outputs is None:
outputs = tuple()
# For legacy / backwards compatibility reasons we return a list for "flat"
# output values (even if the user's flat return value was a different type or
# even just a scalar value) so use nest.flatten to compute a flat list pack
# template.
pack_template = nest.flatten(outputs, expand_composites=False)
# Even though outputs is already "flat", we flatten any composites so their
# component tensors can be tagged and replicated. The pack_template will be
# used by the caller to repack the composite tensors.
outputs = nest.flatten(outputs, expand_composites=True)
# Append `no_op` here so that fetching any return value of this function
# will trigger TPUExecute node.
outputs += (control_flow_ops.no_op(),)
maybe_convert = lambda x: None if x is None else ops.convert_to_tensor(x)
try:
with ops.device(core(0)):
outputs = [
o if isinstance(o, ops.Operation) else maybe_convert(o)
for o in outputs
]
except Exception as e:
raise ValueError(
"TPU function return values must all either be Operations or "
f"convertible to Tensors. Got error: {e}")
# Separates the returned Operations and Tensors.
output_operations = [o for o in outputs if isinstance(o, ops.Operation)]
output_tensors = [o for o in outputs if not isinstance(o, ops.Operation)]
if outputs != output_tensors + output_operations:
raise ValueError(
"TPU functions must return zero-or more Tensor values followed by "
"zero or more Operations.")
# Trim operations off the end of the pack template. output_operations has 1
# extra element due to the no-op that is added.
if len(output_operations) > 1:
pack_template = pack_template[:1 - len(output_operations)]
# Wraps outputs in Identity ops. Otherwise a replicated input copied
# straight to an output would bypass the replicate(). This would be bad
# because the TPUReplicatedInput/TPUReplicatedOutput operator would not
# be rewritten away, leading to a runtime error.
# TODO(phawkins): extend the rewrite to elide these nodes instead.
new_output_tensors = []
for t in output_tensors:
if t is None:
new_output_tensors.append(None)
with ops.device(t.device if t.device else core(0)):
o = array_ops.identity(t)
# pylint: disable=protected-access
o.op._set_attr("_tpu_output_identity", attr_value_pb2.AttrValue(b=True))
# pylint: enable=protected-access
new_output_tensors.append(o)
return new_output_tensors, output_operations, pack_template
def _postprocess_non_flat_outputs(
outputs: Any
) -> Tuple[List[Optional[core_types.Tensor]], List[ops.Operation], List[Any]]:
"""Validates non-flat outputs, add backs device assignments and other attrs.
Args:
outputs: Output from `computation` inside `tpu.rewrite`.
Returns:
- Tensors extracted from outputs.
- An empty Operations list because Operations are not allowed in non-flat
outputs.
- A pack template for use with nest.pack_sequence_as to pack the tensors.
"""
# Flatten output items.
flat_outputs = nest.flatten(outputs, expand_composites=True)
# Convert all non-None non-Operation outputs to Tensors.
for i, o in enumerate(flat_outputs):
if o is None:
flat_outputs[i] = None
continue
if isinstance(o, ops.Operation):
raise ValueError(
"tpu.rewrite does not support Operation as return value in non-flat "
"output structure. You can set returned Operations as control "
"dependencies of returned Tensors so Operations are triggered when "
f'Tensors are evaluated. Operation found: "{o.name}"')
try:
o = ops.convert_to_tensor(o)
except Exception as e:
raise ValueError(
"TPU function return values must all either be Operations or "
f'convertible to Tensors. Got error: "{e}"')
# Wraps outputs in Identity ops. Otherwise a replicated input copied
# straight to an output would bypass the replicate(). This would be bad
# because the TPUReplicatedInput/TPUReplicatedOutput operator would not
# be rewritten away, leading to a runtime error.
# TODO(phawkins): extend the rewrite to elide these nodes instead.
with ops.device(o.device if o.device else core(0)):
o = array_ops.identity(o)
# pylint: disable=protected-access
o.op._set_attr("_tpu_output_identity", attr_value_pb2.AttrValue(b=True))
# pylint: enable=protected-access
flat_outputs[i] = array_ops.identity(o)
# All flat_outputs are Tensors, and no Operations.
return flat_outputs, [], outputs
def split_compile_and_shard(
computation: Callable[..., Any],
inputs: Optional[List[List[Optional[core_types.Tensor]]]] = None,
num_shards: int = 1,
input_shard_axes: Optional[List[int]] = None,
outputs_from_all_shards: Union[bool, List[bool]] = True,
output_shard_axes: Optional[List[int]] = None,
infeed_queue: Optional[tpu_feed.InfeedQueue] = None,
device_assignment: Optional[device_assignment_lib.DeviceAssignment] = None,
name: Optional[Text] = None,
xla_options: Optional[XLAOptions] = None,
) -> Tuple[ops.Operation, List[core_types.Tensor]]:
"""Shards `computation` for parallel execution.
`inputs` must be a list of Tensors or None (equivalent to an empty list), each
of which has a corresponding split axis (from `input_shard_axes`). Each input
is split into `num_shards` pieces along the corresponding axis, and
computation is applied to each shard in parallel.
Tensors are broadcast to all shards if they are lexically captured by
`computation`. e.g.,
x = tf.constant(7)
def computation():
return x + 3
... = shard(computation, ...)
If `outputs_from_all_shards` is true, the outputs from all shards of
`computation` are concatenated back together along their `output_shard_axes`.
Otherwise, each output is taken from an arbitrary shard.
Inputs and outputs of the computation must be at least rank-1 Tensors.
Args:
computation: A Python function that builds a computation to apply to each
shard of the input.
inputs: A list of input tensors or None (equivalent to an empty list). Each
input tensor has a corresponding shard axes, given by `input_shard_axes`,
which must have size divisible by `num_shards`.
num_shards: The number of shards.
input_shard_axes: A list of dimensions along which to shard `inputs`, or
`None`. `None` means "shard all inputs along dimension 0". If not `None`,
there must be one dimension per input.
outputs_from_all_shards: Boolean or list of boolean. For each output, if
`True`, outputs from all shards are concatenated along the corresponding
`output_shard_axes` entry. Otherwise, each output is taken
from an arbitrary shard. If the argument is a boolean, the argument's
value is used for each output.
output_shard_axes: A list of dimensions along which to concatenate the
outputs of `computation`, or `None`. `None` means "concatenate all outputs
along dimension 0". If not `None`, there must be one dimension per output.
Ignored if `outputs_from_all_shards` is False.
infeed_queue: If not `None`, the `InfeedQueue` to use to augment the inputs
of `computation`.
device_assignment: If not `None`, a `DeviceAssignment` describing the
mapping between logical cores in the computation with physical cores in
the TPU topology. Uses a default device assignment if `None`. The
`DeviceAssignment` may be omitted if each shard of the computation uses
only one core, and there is either only one shard, or the number of shards
is equal to the number of cores in the TPU system.
name: (Deprecated) Does nothing.
xla_options: An instance of `tpu.XLAOptions` which indicates the options
passed to XLA compiler. Use `None` for default options.
Returns:
A tuple of (compile op, [output tensors]).
Raises:
ValueError: If num_shards <= 0
ValueError: If len(input_shard_axes) != len(inputs)
ValueError: If len(output_shard_axes) != len(outputs from `computation`)
"""
# TODO(phawkins): consider adding support for broadcasting Tensors passed as
# inputs.
if num_shards <= 0:
raise ValueError(
f"num_shards must be a positive integer. Received {num_shards}")
inputs = [] if inputs is None else inputs
if not isinstance(inputs, list):
raise TypeError("tpu.shard()'s inputs must be a list of Tensors or None. "
f"Received {type(inputs)}")
# Converts inputs to Tensors.
inputs = [ops.convert_to_tensor(x) for x in inputs]
if input_shard_axes is None:
input_shard_axes = [0] * len(inputs)
if len(inputs) != len(input_shard_axes):
raise ValueError("Length of input_shard_axes must be equal to the number "
f"of inputs. Received {len(inputs)} inputs and "
f"{len(input_shard_axes)} input_shard_axes.")
if inputs:
# Splits the `inputs` along the corresponding `input_shard_axes`, giving
# lists with layout [input][shard]
split_inputs = [
array_ops.split(x, num_shards, axis=axis)
for (axis, x) in zip(input_shard_axes, inputs)]
# Transposes the input lists to have layout [shard][input]
transposed_inputs = [list(i) for i in zip(*split_inputs)]
else:
transposed_inputs = [[]] * num_shards
compile_op, outputs = split_compile_and_replicate(
computation,
transposed_inputs,
infeed_queue=infeed_queue,
device_assignment=device_assignment,
name=name,
xla_options=xla_options)
# There must be at least one shard since num_shards > 0.
# TODO(b/36647078) remove disable when pylint bug is fixed.
# pylint: disable=indexing-exception
if isinstance(outputs[0], ops.Operation):
# pylint: enable=indexing-exception
# There were no outputs from the computation and replicate returned a list
# of NoOps with control dependencies on the computation. Return the first
# one so it can be used as a control dependency or fetch node.
# TODO(b/36647078) remove disable when pylint bug is fixed.
# pylint: disable=indexing-exception
return compile_op, [outputs[0]]
# pylint: enable=indexing-exception
# TODO(b/36647078) remove disable when pylint bug is fixed.
# pylint: disable=indexing-exception
num_outputs = len(outputs[0])
# pylint: enable=indexing-exception
if output_shard_axes is None:
output_shard_axes = [0] * num_outputs
if num_outputs != len(output_shard_axes):
raise ValueError("Length of output_shard_axes must be equal to the number "
f"of outputs. Received {num_outputs} outputs "
f"and {len(output_shard_axes)} output_shard_axes.")
if isinstance(outputs_from_all_shards, bool):
outputs_from_all_shards = [outputs_from_all_shards] * num_outputs
if num_outputs != len(outputs_from_all_shards):
raise ValueError(
"Length of outputs_from_all_shards must be equal to the number of "
f"outputs. Received {num_outputs} outputs and "
f"{len(outputs_from_all_shards)} outputs_from_all_shards.")
results = []
for (axis, all_shards, x) in zip(output_shard_axes, outputs_from_all_shards,
zip(*outputs)):
if all_shards:
# Concatenate all of the outputs together (use stack for scalars).
shape = x[0].shape
is_scalar = shape is not None and (shape.ndims == 0)
results.append((array_ops.stack(list(x)) if is_scalar
else array_ops.concat(list(x), axis=axis)))
else:
# TODO(phawkins): use a smarter policy, e.g., round-robin across shards.
results.append(x[0])
return compile_op, results
@tf_export(v1=["tpu.shard"])
def shard(
computation: Callable[..., Any],
inputs: Optional[List[core_types.Tensor]] = None,
num_shards: int = 1,
input_shard_axes: Optional[List[int]] = None,
outputs_from_all_shards: Union[bool, List[bool]] = True,
output_shard_axes: Optional[List[int]] = None,
infeed_queue: Optional[tpu_feed.InfeedQueue] = None,
device_assignment: Optional[device_assignment_lib.DeviceAssignment] = None,
name: Optional[Text] = None,
xla_options: Optional[XLAOptions] = None) -> List[core_types.Tensor]:
"""Shards `computation` for parallel execution.
`inputs` must be a list of Tensors or None (equivalent to an empty list), each
of which has a corresponding split axis (from `input_shard_axes`). Each input
is split into `num_shards` pieces along the corresponding axis, and
computation is applied to each shard in parallel.
Tensors are broadcast to all shards if they are lexically captured by
`computation`. e.g.,
x = tf.constant(7)
def computation():
return x + 3
... = shard(computation, ...)
TODO(phawkins): consider adding support for broadcasting Tensors passed
as inputs.
If `outputs_from_all_shards` is true, the outputs from all shards of
`computation` are concatenated back together along their `output_shard_axes`.
Otherwise, each output is taken from an arbitrary shard.
Inputs and outputs of the computation must be at least rank-1 Tensors.
Args:
computation: A Python function that builds a computation to apply to each
shard of the input.
inputs: A list of input tensors or None (equivalent to an empty list). Each
input tensor has a corresponding shard axes, given by `input_shard_axes`,
which must have size divisible by `num_shards`.
num_shards: The number of shards.
input_shard_axes: A list of dimensions along which to shard `inputs`, or
`None`. `None` means "shard all inputs along dimension 0". If not `None`,
there must be one dimension per input.
outputs_from_all_shards: Boolean or list of boolean. For each output, if
`True`, outputs from all shards are concatenated along the corresponding
`output_shard_axes` entry. Otherwise, each output is taken
from an arbitrary shard. If the argument is a boolean, the argument's
value is used for each output.
output_shard_axes: A list of dimensions along which to concatenate the
outputs of `computation`, or `None`. `None` means "concatenate all outputs
along dimension 0". If not `None`, there must be one dimension per output.
Ignored if `outputs_from_all_shards` is False.
infeed_queue: If not `None`, the `InfeedQueue` to use to augment the inputs
of `computation`.
device_assignment: If not `None`, a `DeviceAssignment` describing the
mapping between logical cores in the computation with physical cores in
the TPU topology. Uses a default device assignment if `None`. The
`DeviceAssignment` may be omitted if each shard of the computation uses
only one core, and there is either only one shard, or the number of shards
is equal to the number of cores in the TPU system.
name: (Deprecated) Does nothing.
xla_options: An instance of `tpu.XLAOptions` which indicates the options
passed to XLA compiler. Use `None` for default options.
Returns:
A list of output tensors.
Raises:
ValueError: If num_shards <= 0
ValueError: If len(input_shard_axes) != len(inputs)
ValueError: If len(output_shard_axes) != len(outputs from `computation`)
"""
return split_compile_and_shard(
computation,
inputs=inputs,
num_shards=num_shards,
input_shard_axes=input_shard_axes,
outputs_from_all_shards=outputs_from_all_shards,
output_shard_axes=output_shard_axes,
infeed_queue=infeed_queue,
device_assignment=device_assignment,
name=name,
xla_options=xla_options)[1]
@tf_export(v1=["tpu.batch_parallel"])
def batch_parallel(
computation: Callable[..., Any],
inputs: Optional[List[List[Optional[core_types.Tensor]]]] = None,
num_shards: int = 1,
infeed_queue: Optional[tpu_feed.InfeedQueue] = None,
device_assignment: Optional[device_assignment_lib.DeviceAssignment] = None,
name: Optional[Text] = None,
xla_options: Optional[XLAOptions] = None):
"""Shards `computation` along the batch dimension for parallel execution.
Convenience wrapper around shard().
`inputs` must be a list of Tensors or None (equivalent to an empty list).
Each input is split into `num_shards` pieces along the 0-th dimension, and
computation is applied to each shard in parallel.
Tensors are broadcast to all shards if they are lexically captured by
`computation`. e.g.,
x = tf.constant(7)
def computation():
return x + 3
... = shard(computation, ...)
The outputs from all shards are concatenated back together along their 0-th
dimension.
Inputs and outputs of the computation must be at least rank-1 Tensors.
Args:
computation: A Python function that builds a computation to apply to each
shard of the input.
inputs: A list of input tensors or None (equivalent to an empty list). The
0-th dimension of each Tensor must have size divisible by `num_shards`.
num_shards: The number of shards.
infeed_queue: If not `None`, the `InfeedQueue` from which to append a tuple
of arguments as inputs to `computation`.
device_assignment: If not `None`, a `DeviceAssignment` describing the
mapping between logical cores in the computation with physical cores in
the TPU topology. Uses a default device assignment if `None`. The
`DeviceAssignment` may be omitted if each shard of the computation uses
only one core, and there is either only one shard, or the number of shards
is equal to the number of cores in the TPU system.
name: (Deprecated) Does nothing.
xla_options: An instance of `tpu.XLAOptions` which indicates the options
passed to XLA compiler. Use `None` for default options.
Returns:
A list of output tensors.
Raises:
ValueError: If `num_shards <= 0`
"""
return shard(
computation,
inputs,
num_shards=num_shards,
infeed_queue=infeed_queue,
device_assignment=device_assignment,
name=name,
xla_options=xla_options)
@tf_export(v1=["tpu.rewrite"])
def rewrite(
computation: Callable[..., Any],
inputs: Optional[List[List[Optional[core_types.Tensor]]]] = None,
infeed_queue: Optional[tpu_feed.InfeedQueue] = None,
device_assignment: Optional[device_assignment_lib.DeviceAssignment] = None,
name: Optional[Text] = None,
xla_options: Optional[XLAOptions] = None) -> Any:
"""Rewrites `computation` for execution on a TPU system.
Args:
computation: A Python function that builds a computation to apply to the
input. If the function takes n inputs, 'inputs' should be a list of n
tensors.
`computation` may return a list of operations and tensors. Tensors must
come before operations in the returned list. The return value of
`rewrite` is a list of tensors corresponding to the tensors from the
output of `computation`.
All `Operation`s constructed during `computation` will be executed when
evaluating any of the returned output tensors, not just the ones returned.
inputs: A list of input tensors or `None` (equivalent to an empty list).
Each input can be a nested structure containing values that are
convertible to tensors. Note that passing an N-dimension list of
compatible values will result in a N-dimension list of scalar tensors
rather than a single Rank-N tensors. If you need different behavior,
convert part of inputs to tensors with `tf.convert_to_tensor`.
infeed_queue: If not `None`, the `InfeedQueue` from which to append a tuple
of arguments as inputs to `computation`.
device_assignment: if not `None`, a `DeviceAssignment` describing the
mapping between logical cores in the computation with physical cores in
the TPU topology. May be omitted for a single-core computation, in which
case the core attached to task 0, TPU device 0 is used.
name: (Deprecated) Does nothing.
xla_options: An instance of `tpu.XLAOptions` which indicates the options
passed to XLA compiler. Use `None` for default options.
Returns:
Same data structure as if computation(*inputs) is called directly with some
exceptions for correctness. Exceptions include:
1) None output: a NoOp would be returned which control-depends on
computation.
2) Single value output: A tuple containing the value would be returned.
3) Operation-only outputs: a NoOp would be returned which
control-depends on computation.
TODO(b/121383831): Investigate into removing these special cases.
"""
# TODO(b/36647078) remove disable when pylint bug is fixed.
# pylint: disable=indexing-exception
return replicate(
computation,
None if inputs is None else [inputs],
infeed_queue=infeed_queue,
device_assignment=device_assignment,
name=name,
xla_options=xla_options)[0]
# pylint: enable=indexing-exception
# Operations that indicate some error in the user's inference graph.
_DENYLISTED_INFERENCE_OPS = set([
"ReadVariableOp",
"AssignVariableOp",
"AssignAddVariableOp",
"AssignSubVariableOp",
"VarHandleOp",
"Variable",
"VariableV2",
])
def under_tpu_inference_context() -> bool:
"""Check if it is currently under `_TPUInferenceContext`."""
graph = ops.get_default_graph()
while graph:
context = graph._get_control_flow_context() # pylint: disable=protected-access
while context:
if isinstance(context, _TPUInferenceContext):
return True
context = context.outer_context
if isinstance(graph, function._FuncGraph): # pylint: disable=protected-access
graph = graph._outer_graph # pylint: disable=protected-access
elif isinstance(graph, func_graph.FuncGraph):
graph = graph.outer_graph
else:
return False
class _TPUInferenceContext(control_flow_ops.XLAControlFlowContext):
"""A `ControlFlowContext` for nodes inside a TPU inference computation.
The primary role of `_TPUInferenceContext` is to indicate the mode of
operation and possibly sanity check operators inside a
tpu.rewrite_for_inference() computation.
"""
def __init__(self, name: Text, check_ops: bool = True):
super(_TPUInferenceContext, self).__init__()
self._name = name
self._check_ops = check_ops
def AddOp(self, op):
self._AddOpInternal(op)
def _AddOpInternal(self, op):
# pylint: disable=protected-access
if self._check_ops and op.type in _DENYLISTED_INFERENCE_OPS:
raise NotImplementedError(
f"Operation of type {op.type} ({op.name}) is not supported on the "
"TPU for inference. Execution will fail if this op is used in the "
"graph. Make sure your variables are using variable_scope.")
if self._outer_context:
self._outer_context.AddInnerOp(op)
def AddValue(self, val):
result = val
if self._outer_context:
result = self._outer_context.AddValue(val)
return result
def AddInnerOp(self, op):
self._AddOpInternal(op)
@property
def grad_state(self):
return None
def validate_inference_rewrite_for_variables(graph: ops.Graph):
"""Validates whether rewrite_for_inference() 'worked' for variables.
The rewrite_for_inference() method is supposed to append GuaranteeConstOps
after ReadVariableOps, but this mechanism works only if you are using
tf.compat.v1.get_variable() to create and access variables in your tpu
computation. This validation method can be called immediately after calling
tpu.rewrite_for_inference() to check whether GuaranteeConstOps where added
to the graph.
Typical usages:
tpu.validate_inference_rewrite_for_variables(
tf.compat.v1.get_default_graph())
tpu.validate_inference_rewrite_for_variables(sess.graph)
Args:
graph: The graph which needs to be validated.
Raises:
RuntimeError: if validation failed.
"""
if not any(x.type == "GuaranteeConst" for x in graph.get_operations()):
raise RuntimeError(
"No GuaranteeConst ops found in the graph after running "
"tpu.rewrite_for_inference(...). Please check that you are using "
"tf.get_variable() to create and access variables in your tpu "
"computation.")
def rewrite_for_inference(
computation: Callable[..., Any],
inputs: Optional[List[core_types.Tensor]] = None,
infeed_queue: Optional[tpu_feed.InfeedQueue] = None,
device_assignment: Optional[device_assignment_lib.DeviceAssignment] = None,
name: Optional[Text] = None) -> List[core_types.Tensor]:
"""Rewrites `computation` for inference on a TPU system.
Other than 'rewriting' the computation to run on a TPU, if using variables
in your computation, it moves the ReadVariableOps outside the TPU
computation, and adds GuaranteeConst ops just after the ReadVariableOps.
This mechanism works only if you are using tf.compat.v1.get_variable() to
create and access variables in your tpu computation. You can validate
whether this worked, by calling validate_inference_rewrite_for_variables()
method immediately after this method to check whether GuaranteeConstOps
where added to the graph.
Args:
computation: A Python function that builds a computation to apply to the
input. If the function takes n inputs, 'inputs' should be a list of n
tensors. If the function returns m outputs, rewrite will return a list of
m tensors.
inputs: A list of input tensors or `None` (equivalent to an empty list).
infeed_queue: If not `None`, the `InfeedQueue` from which to append a tuple
of arguments as inputs to `computation`.
device_assignment: if not `None`, a `DeviceAssignment` describing the
mapping between logical cores in the computation with physical cores in
the TPU topology. May be omitted for a single-core computation, in which
case the core attached to task 0, TPU device 0 is used.
name: The name of the operator.
Returns:
A list of output tensors.
"""
def guarantee_const_getter(getter, name, *args, **kwargs):
with ops.control_dependencies(None):
return array_ops.guarantee_const(
getter(name, *args, **kwargs), name=name + "/GuaranteeConst")
def wrapped_computation(*args, **kwargs):
"""Execute computation under `_TPUInferenceContext`."""
context = _TPUInferenceContext(
name=ops.get_default_graph().unique_name("rewrite_for_inference"))
try:
context.Enter()
vscope = variable_scope.get_variable_scope()
prev_custom_getter = vscope.custom_getter
prev_caching_device = vscope.caching_device
vscope.set_custom_getter(guarantee_const_getter)
vscope.set_caching_device(lambda op: op.device)
result = computation(*args, **kwargs)
vscope.set_custom_getter(prev_custom_getter)
vscope.set_caching_device(prev_caching_device)
finally:
context.Exit()
return result
# pylint: disable=undefined-variable
return rewrite(
wrapped_computation,
inputs=inputs,
infeed_queue=infeed_queue,
device_assignment=device_assignment,
name=name)
# pylint: enable=undefined-variable
def prune_unconnected_ops_from_xla(prune_graph: ops.Graph):
"""Prunes unconnected ops as listed in _UNCONNECTED_OPS_TO_PRUNE.
Args:
prune_graph: A tensorflow graph from which we wish to prune unconnected ops
as listed in _UNCONNECTED_OPS_TO_PRUNE. In general, these ops should have
no inputs and no consumers. These can often be left behind due to graph
construction rewiring (for instance TF-Hub). While they never execute,
they will cause XLA compile to fail so we strip them from XLA compile by
removing the tpu_replicate attribute.
"""
# Scan over the top level graph and all function graphs.
for graph in [prune_graph] + [
f for f in prune_graph._functions.values() # pylint: disable=protected-access
]:
if not isinstance(graph, ops.Graph):
continue
for op in graph.get_operations():
if op.type not in _UNCONNECTED_OPS_TO_PRUNE:
continue
outputs_consumed = False
for output in op.outputs:
if output.consumers():
outputs_consumed = True
break
if not outputs_consumed:
logging.info(
"Pruning OP %s of type %s from XLA Compile due to "
"it being disconnected.", op.name, op.type)
op._clear_attr(_TPU_REPLICATE_ATTR) # pylint: disable=protected-access
|
frreiss/tensorflow-fred
|
tensorflow/python/tpu/tpu.py
|
Python
|
apache-2.0
| 97,112 | 0.006683 |
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestViewLog(unittest.TestCase):
def tearDown(self):
frappe.set_user('Administrator')
def test_if_user_is_added(self):
ev = frappe.get_doc({
'doctype': 'Event',
'subject': 'test event for view logs',
'starts_on': '2018-06-04 14:11:00',
'event_type': 'Public'
}).insert()
frappe.set_user('test@example.com')
from frappe.desk.form.load import getdoc
# load the form
getdoc('Event', ev.name)
a = frappe.get_value(
doctype="View Log",
filters={
"reference_doctype": "Event",
"reference_name": ev.name
},
fieldname=['viewed_by']
)
self.assertEqual('test@example.com', a)
self.assertNotEqual('test1@example.com', a)
|
vjFaLk/frappe
|
frappe/core/doctype/view_log/test_view_log.py
|
Python
|
mit
| 850 | 0.031765 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Student.student_id'
db.alter_column('publications_student', 'student_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=12))
def backwards(self, orm):
# Changing field 'Student.student_id'
db.alter_column('publications_student', 'student_id', self.gf('django.db.models.fields.CharField')(max_length=8, unique=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 3, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')", 'object_name': 'Page'},
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'page_flags': ('django.db.models.fields.TextField', [], {'null': True, 'blank': True}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'contacts_and_people.building': {
'Meta': {'ordering': "('site', 'street', 'number', 'name')", 'object_name': 'Building'},
'access_and_parking': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'building_access_and_parking'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'additional_street_address': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'building_description'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'getting_here': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'getting_here'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'map': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '9', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'place'", 'on_delete': 'models.PROTECT', 'to': "orm['contacts_and_people.Site']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '256'}),
'zoom': ('django.db.models.fields.IntegerField', [], {'default': '17', 'null': 'True', 'blank': 'True'})
},
'contacts_and_people.entity': {
'Meta': {'ordering': "['tree_id', 'lft']", 'object_name': 'Entity', '_ormbases': ['contacts_and_people.EntityLite']},
'abstract_entity': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'access_note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'auto_contacts_page': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_news_page': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_publications_page': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_vacancies_page': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'building': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts_and_people.Building']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'building_recapitulates_entity_name': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'contacts_page_intro': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contacts_page_intro'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'contacts_page_menu_title': ('django.db.models.fields.CharField', [], {'default': "'Contacts & people'", 'max_length': '50'}),
'display_parent': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'entitylite_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['contacts_and_people.EntityLite']", 'unique': 'True', 'primary_key': 'True'}),
'external_url': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'entity_item'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['links.ExternalLink']"}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'news_page_intro': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'news_page_intro'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'news_page_menu_title': ('django.db.models.fields.CharField', [], {'default': "'News & events'", 'max_length': '50'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['contacts_and_people.Entity']"}),
'precise_location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'publications_page_menu_title': ('django.db.models.fields.CharField', [], {'default': "'Publications'", 'max_length': '50'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'blank': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'vacancies_page_intro': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vacancies_page_intro'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'vacancies_page_menu_title': ('django.db.models.fields.CharField', [], {'default': "'Vacancies & studentships'", 'max_length': '50'}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entity'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['cms.Page']", 'blank': 'True', 'unique': 'True'})
},
'contacts_and_people.entitylite': {
'Meta': {'object_name': 'EntityLite'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'contacts_and_people.membership': {
'Meta': {'ordering': "('-importance_to_entity', 'person__surname')", 'object_name': 'Membership'},
'display_role': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'display_roles'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['contacts_and_people.Membership']"}),
'entity': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['contacts_and_people.Entity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance_to_entity': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'blank': 'True'}),
'importance_to_person': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'blank': 'True'}),
'key_contact': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'member_of'", 'to': "orm['contacts_and_people.Person']"}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'contacts_and_people.person': {
'Meta': {'ordering': "['surname', 'given_name', 'user']", 'object_name': 'Person', '_ormbases': ['contacts_and_people.PersonLite']},
'access_note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'building': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts_and_people.Building']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'data_feed_locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'entities': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'people'", 'to': "orm['contacts_and_people.Entity']", 'through': "orm['contacts_and_people.Membership']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'external_url': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'person_item'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['links.ExternalLink']"}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'institutional_username': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'override_entity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'people_override'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['contacts_and_people.Entity']"}),
'personlite_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['contacts_and_people.PersonLite']", 'unique': 'True', 'primary_key': 'True'}),
'please_contact': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'contact_for'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['contacts_and_people.Person']"}),
'precise_location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'blank': 'True'}),
'staff_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'person_user'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['auth.User']", 'blank': 'True', 'unique': 'True'})
},
'contacts_and_people.personlite': {
'Meta': {'object_name': 'PersonLite'},
'given_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'middle_names': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'surname': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts_and_people.Title']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'contacts_and_people.phonecontact': {
'Meta': {'ordering': "('label',)", 'object_name': 'PhoneContact'},
'area_code': ('django.db.models.fields.CharField', [], {'default': "'029'", 'max_length': '5'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'country_code': ('django.db.models.fields.CharField', [], {'default': "'44'", 'max_length': '5'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_extension': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'})
},
'contacts_and_people.site': {
'Meta': {'ordering': "('country', 'site_name', 'post_town')", 'object_name': 'Site'},
'country': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post_town': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'site_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
'contacts_and_people.title': {
'Meta': {'ordering': "['title']", 'object_name': 'Title'},
'abbreviation': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': "orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'links.externallink': {
'Meta': {'ordering': "['title']", 'object_name': 'ExternalLink'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '256', 'blank': 'True'}),
'external_site': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'links'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['links.ExternalSite']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'links'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['links.LinkType']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'links.externalsite': {
'Meta': {'ordering': "['domain']", 'object_name': 'ExternalSite'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['links.ExternalSite']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'links.linktype': {
'Meta': {'object_name': 'LinkType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'scheme': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
'publications.authored': {
'Meta': {'object_name': 'Authored'},
'bibliographic_record': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'authored'", 'to': "orm['publications.BibliographicRecord']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_a_favourite': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publication': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'authored'", 'to': "orm['publications.Publication']"}),
'researcher': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'authored'", 'to': "orm['publications.Researcher']"}),
'reverse_sort_cue': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'publications.bibliographicrecord': {
'Meta': {'ordering': "['-publication_date']", 'object_name': 'BibliographicRecord'},
'abstract': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'associated_authors': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'authors': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'awarded_date': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'begin_page': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'book_author_type': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'commissioning_body': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'confidential': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'data_source': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'doi': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'edition': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'editors': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'end_page': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'filed_date': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'finish_date': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'first_author': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_at_source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'isbn_10': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'isbn_13': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'issn': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'issue': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'journal': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'journal_article_type': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'keywords': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'language': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'last_author': ('django.db.models.fields.TextField', [], {}),
'location': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'medium': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'name_of_conference': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'number': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'number_of_authors': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'number_of_pages': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'number_of_pieces': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'parent_title': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'patent_number': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'patent_status': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'pii': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'place_of_publication': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'publication': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bibliographic_records'", 'to': "orm['publications.Publication']"}),
'publication_date': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'publication_status': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'publisher': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'reference_count': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'series': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'start_date': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'times_cited': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'verification_status': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'version': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'volume': ('django.db.models.fields.TextField', [], {'null': 'True'})
},
'publications.bibliourl': {
'Meta': {'object_name': 'BiblioURL'},
'bibliographic_record': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'urls'", 'to': "orm['publications.BibliographicRecord']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
},
'publications.publication': {
'Meta': {'object_name': 'Publication'},
'created_when': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_modified_when': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'needs_refetch': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'new_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'public_dspace_handle': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
},
'publications.publicationsplugin': {
'Meta': {'object_name': 'PublicationsPlugin', 'db_table': "'cmsplugin_publicationsplugin'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'entity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'publicationsplugin_plugin'", 'null': 'True', 'to': "orm['contacts_and_people.Entity']"}),
'favourites_only': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'format': ('django.db.models.fields.CharField', [], {'default': "'long'", 'max_length': '25'}),
'group_dates': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'heading_level': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '3'}),
'limit_to': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '5', 'null': 'True', 'blank': 'True'}),
'publications_heading_text': ('django.db.models.fields.CharField', [], {'default': "'Publications'", 'max_length': '50'})
},
'publications.researcher': {
'Meta': {'object_name': 'Researcher'},
'description': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'research_description'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'person': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['contacts_and_people.Person']", 'unique': 'True', 'primary_key': 'True'}),
'publishes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'symplectic_access': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'symplectic_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'null': 'True', 'blank': 'True'}),
'symplectic_int_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'synopsis': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'research_synopsis'", 'null': 'True', 'to': "orm['cms.Placeholder']"})
},
'publications.student': {
'Meta': {'object_name': 'Student'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'programme': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'researcher': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['publications.Researcher']", 'unique': 'True', 'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'student_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'supervisors': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['publications.Supervisor']", 'through': "orm['publications.Supervision']", 'symmetrical': 'False'}),
'thesis': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'publications.supervision': {
'Meta': {'object_name': 'Supervision'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['publications.Student']"}),
'supervisor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['publications.Supervisor']"})
},
'publications.supervisor': {
'Meta': {'object_name': 'Supervisor'},
'researcher': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['publications.Researcher']", 'unique': 'True', 'primary_key': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['publications']
|
evildmp/arkestra-publications
|
publications/migrations/0003_auto__chg_field_student_student_id.py
|
Python
|
bsd-2-clause
| 40,862 | 0.007905 |
# -*- coding: utf-8 -*-
"""Composing task work-flows.
.. seealso:
You should import these from :mod:`celery` and not this module.
"""
from __future__ import absolute_import, unicode_literals
import itertools
import operator
from collections import deque
from copy import deepcopy
from functools import partial as _partial
from functools import reduce
from operator import itemgetter
from kombu.utils.functional import fxrange, reprcall
from kombu.utils.objects import cached_property
from kombu.utils.uuid import uuid
from vine import barrier
from celery._state import current_app
from celery.five import PY3, python_2_unicode_compatible
from celery.local import try_import
from celery.result import GroupResult, allow_join_result
from celery.utils import abstract
from celery.utils.collections import ChainMap
from celery.utils.functional import _regen
from celery.utils.functional import chunks as _chunks
from celery.utils.functional import (is_list, maybe_list, regen,
seq_concat_item, seq_concat_seq)
from celery.utils.objects import getitem_property
from celery.utils.text import remove_repeating_from_task, truncate
try:
from collections.abc import MutableSequence
except ImportError:
# TODO: Remove this when we drop Python 2.7 support
from collections import MutableSequence
__all__ = (
'Signature', 'chain', 'xmap', 'xstarmap', 'chunks',
'group', 'chord', 'signature', 'maybe_signature',
)
# json in Python 2.7 borks if dict contains byte keys.
JSON_NEEDS_UNICODE_KEYS = PY3 and not try_import('simplejson')
def maybe_unroll_group(group):
"""Unroll group with only one member."""
# Issue #1656
try:
size = len(group.tasks)
except TypeError:
try:
size = group.tasks.__length_hint__()
except (AttributeError, TypeError):
return group
else:
return list(group.tasks)[0] if size == 1 else group
else:
return group.tasks[0] if size == 1 else group
def task_name_from(task):
return getattr(task, 'name', task)
def _upgrade(fields, sig):
"""Used by custom signatures in .from_dict, to keep common fields."""
sig.update(chord_size=fields.get('chord_size'))
return sig
@abstract.CallableSignature.register
@python_2_unicode_compatible
class Signature(dict):
"""Task Signature.
Class that wraps the arguments and execution options
for a single task invocation.
Used as the parts in a :class:`group` and other constructs,
or to pass tasks around as callbacks while being compatible
with serializers with a strict type subset.
Signatures can also be created from tasks:
- Using the ``.signature()`` method that has the same signature
as ``Task.apply_async``:
.. code-block:: pycon
>>> add.signature(args=(1,), kwargs={'kw': 2}, options={})
- or the ``.s()`` shortcut that works for star arguments:
.. code-block:: pycon
>>> add.s(1, kw=2)
- the ``.s()`` shortcut does not allow you to specify execution options
but there's a chaning `.set` method that returns the signature:
.. code-block:: pycon
>>> add.s(2, 2).set(countdown=10).set(expires=30).delay()
Note:
You should use :func:`~celery.signature` to create new signatures.
The ``Signature`` class is the type returned by that function and
should be used for ``isinstance`` checks for signatures.
See Also:
:ref:`guide-canvas` for the complete guide.
Arguments:
task (Union[Type[celery.app.task.Task], str]): Either a task
class/instance, or the name of a task.
args (Tuple): Positional arguments to apply.
kwargs (Dict): Keyword arguments to apply.
options (Dict): Additional options to :meth:`Task.apply_async`.
Note:
If the first argument is a :class:`dict`, the other
arguments will be ignored and the values in the dict will be used
instead::
>>> s = signature('tasks.add', args=(2, 2))
>>> signature(s)
{'task': 'tasks.add', args=(2, 2), kwargs={}, options={}}
"""
TYPES = {}
_app = _type = None
@classmethod
def register_type(cls, name=None):
def _inner(subclass):
cls.TYPES[name or subclass.__name__] = subclass
return subclass
return _inner
@classmethod
def from_dict(cls, d, app=None):
typ = d.get('subtask_type')
if typ:
target_cls = cls.TYPES[typ]
if target_cls is not cls:
return target_cls.from_dict(d, app=app)
return Signature(d, app=app)
def __init__(self, task=None, args=None, kwargs=None, options=None,
type=None, subtask_type=None, immutable=False,
app=None, **ex):
self._app = app
if isinstance(task, dict):
super(Signature, self).__init__(task) # works like dict(d)
else:
# Also supports using task class/instance instead of string name.
try:
task_name = task.name
except AttributeError:
task_name = task
else:
self._type = task
super(Signature, self).__init__(
task=task_name, args=tuple(args or ()),
kwargs=kwargs or {},
options=dict(options or {}, **ex),
subtask_type=subtask_type,
immutable=immutable,
chord_size=None,
)
def __call__(self, *partial_args, **partial_kwargs):
"""Call the task directly (in the current process)."""
args, kwargs, _ = self._merge(partial_args, partial_kwargs, None)
return self.type(*args, **kwargs)
def delay(self, *partial_args, **partial_kwargs):
"""Shortcut to :meth:`apply_async` using star arguments."""
return self.apply_async(partial_args, partial_kwargs)
def apply(self, args=None, kwargs=None, **options):
"""Call task locally.
Same as :meth:`apply_async` but executed the task inline instead
of sending a task message.
"""
args = args if args else ()
kwargs = kwargs if kwargs else {}
# Extra options set to None are dismissed
options = {k: v for k, v in options.items() if v is not None}
# For callbacks: extra args are prepended to the stored args.
args, kwargs, options = self._merge(args, kwargs, options)
return self.type.apply(args, kwargs, **options)
def apply_async(self, args=None, kwargs=None, route_name=None, **options):
"""Apply this task asynchronously.
Arguments:
args (Tuple): Partial args to be prepended to the existing args.
kwargs (Dict): Partial kwargs to be merged with existing kwargs.
options (Dict): Partial options to be merged
with existing options.
Returns:
~@AsyncResult: promise of future evaluation.
See also:
:meth:`~@Task.apply_async` and the :ref:`guide-calling` guide.
"""
args = args if args else ()
kwargs = kwargs if kwargs else {}
# Extra options set to None are dismissed
options = {k: v for k, v in options.items() if v is not None}
try:
_apply = self._apply_async
except IndexError: # pragma: no cover
# no tasks for chain, etc to find type
return
# For callbacks: extra args are prepended to the stored args.
if args or kwargs or options:
args, kwargs, options = self._merge(args, kwargs, options)
else:
args, kwargs, options = self.args, self.kwargs, self.options
# pylint: disable=too-many-function-args
# Borks on this, as it's a property
return _apply(args, kwargs, **options)
def _merge(self, args=None, kwargs=None, options=None, force=False):
args = args if args else ()
kwargs = kwargs if kwargs else {}
options = options if options else {}
if self.immutable and not force:
return (self.args, self.kwargs,
dict(self.options,
**options) if options else self.options)
return (tuple(args) + tuple(self.args) if args else self.args,
dict(self.kwargs, **kwargs) if kwargs else self.kwargs,
dict(self.options, **options) if options else self.options)
def clone(self, args=None, kwargs=None, **opts):
"""Create a copy of this signature.
Arguments:
args (Tuple): Partial args to be prepended to the existing args.
kwargs (Dict): Partial kwargs to be merged with existing kwargs.
options (Dict): Partial options to be merged with
existing options.
"""
args = args if args else ()
kwargs = kwargs if kwargs else {}
# need to deepcopy options so origins links etc. is not modified.
if args or kwargs or opts:
args, kwargs, opts = self._merge(args, kwargs, opts)
else:
args, kwargs, opts = self.args, self.kwargs, self.options
signature = Signature.from_dict({'task': self.task,
'args': tuple(args),
'kwargs': kwargs,
'options': deepcopy(opts),
'subtask_type': self.subtask_type,
'chord_size': self.chord_size,
'immutable': self.immutable},
app=self._app)
signature._type = self._type
return signature
partial = clone
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None):
"""Finalize the signature by adding a concrete task id.
The task won't be called and you shouldn't call the signature
twice after freezing it as that'll result in two task messages
using the same task id.
Returns:
~@AsyncResult: promise of future evaluation.
"""
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
opts = self.options
try:
tid = opts['task_id']
except KeyError:
tid = opts['task_id'] = _id or uuid()
if root_id:
opts['root_id'] = root_id
if parent_id:
opts['parent_id'] = parent_id
if 'reply_to' not in opts:
opts['reply_to'] = self.app.oid
if group_id:
opts['group_id'] = group_id
if chord:
opts['chord'] = chord
# pylint: disable=too-many-function-args
# Borks on this, as it's a property.
return self.AsyncResult(tid)
_freeze = freeze
def replace(self, args=None, kwargs=None, options=None):
"""Replace the args, kwargs or options set for this signature.
These are only replaced if the argument for the section is
not :const:`None`.
"""
signature = self.clone()
if args is not None:
signature.args = args
if kwargs is not None:
signature.kwargs = kwargs
if options is not None:
signature.options = options
return signature
def set(self, immutable=None, **options):
"""Set arbitrary execution options (same as ``.options.update(…)``).
Returns:
Signature: This is a chaining method call
(i.e., it will return ``self``).
"""
if immutable is not None:
self.set_immutable(immutable)
self.options.update(options)
return self
def set_immutable(self, immutable):
self.immutable = immutable
def _with_list_option(self, key):
items = self.options.setdefault(key, [])
if not isinstance(items, MutableSequence):
items = self.options[key] = [items]
return items
def append_to_list_option(self, key, value):
items = self._with_list_option(key)
if value not in items:
items.append(value)
return value
def extend_list_option(self, key, value):
items = self._with_list_option(key)
items.extend(maybe_list(value))
def link(self, callback):
"""Add callback task to be applied if this task succeeds.
Returns:
Signature: the argument passed, for chaining
or use with :func:`~functools.reduce`.
"""
return self.append_to_list_option('link', callback)
def link_error(self, errback):
"""Add callback task to be applied on error in task execution.
Returns:
Signature: the argument passed, for chaining
or use with :func:`~functools.reduce`.
"""
return self.append_to_list_option('link_error', errback)
def on_error(self, errback):
"""Version of :meth:`link_error` that supports chaining.
on_error chains the original signature, not the errback so::
>>> add.s(2, 2).on_error(errback.s()).delay()
calls the ``add`` task, not the ``errback`` task, but the
reverse is true for :meth:`link_error`.
"""
self.link_error(errback)
return self
def flatten_links(self):
"""Return a recursive list of dependencies.
"unchain" if you will, but with links intact.
"""
return list(itertools.chain.from_iterable(itertools.chain(
[[self]],
(link.flatten_links()
for link in maybe_list(self.options.get('link')) or [])
)))
def __or__(self, other):
# These could be implemented in each individual class,
# I'm sure, but for now we have this.
if isinstance(self, group):
# group() | task -> chord
return chord(self, body=other, app=self._app)
elif isinstance(other, group):
# unroll group with one member
other = maybe_unroll_group(other)
if isinstance(self, _chain):
# chain | group() -> chain
return _chain(seq_concat_item(
self.unchain_tasks(), other), app=self._app)
# task | group() -> chain
return _chain(self, other, app=self.app)
if not isinstance(self, _chain) and isinstance(other, _chain):
# task | chain -> chain
return _chain(seq_concat_seq(
(self,), other.unchain_tasks()), app=self._app)
elif isinstance(other, _chain):
# chain | chain -> chain
return _chain(seq_concat_seq(
self.unchain_tasks(), other.unchain_tasks()), app=self._app)
elif isinstance(self, chord):
# chord | task -> attach to body
sig = self.clone()
sig.body = sig.body | other
return sig
elif isinstance(other, Signature):
if isinstance(self, _chain):
if self.tasks and isinstance(self.tasks[-1], group):
# CHAIN [last item is group] | TASK -> chord
sig = self.clone()
sig.tasks[-1] = chord(
sig.tasks[-1], other, app=self._app)
return sig
elif self.tasks and isinstance(self.tasks[-1], chord):
# CHAIN [last item is chord] -> chain with chord body.
sig = self.clone()
sig.tasks[-1].body = sig.tasks[-1].body | other
return sig
else:
# chain | task -> chain
return _chain(seq_concat_item(
self.unchain_tasks(), other), app=self._app)
# task | task -> chain
return _chain(self, other, app=self._app)
return NotImplemented
def election(self):
type = self.type
app = type.app
tid = self.options.get('task_id') or uuid()
with app.producer_or_acquire(None) as producer:
props = type.backend.on_task_call(producer, tid)
app.control.election(tid, 'task',
self.clone(task_id=tid, **props),
connection=producer.connection)
return type.AsyncResult(tid)
def reprcall(self, *args, **kwargs):
args, kwargs, _ = self._merge(args, kwargs, {}, force=True)
return reprcall(self['task'], args, kwargs)
def __deepcopy__(self, memo):
memo[id(self)] = self
return dict(self)
def __invert__(self):
return self.apply_async().get()
def __reduce__(self):
# for serialization, the task type is lazily loaded,
# and not stored in the dict itself.
return signature, (dict(self),)
def __json__(self):
return dict(self)
def __repr__(self):
return self.reprcall()
if JSON_NEEDS_UNICODE_KEYS: # pragma: no cover
def items(self):
for k, v in dict.items(self):
yield k.decode() if isinstance(k, bytes) else k, v
@property
def name(self):
# for duck typing compatibility with Task.name
return self.task
@cached_property
def type(self):
return self._type or self.app.tasks[self['task']]
@cached_property
def app(self):
return self._app or current_app
@cached_property
def AsyncResult(self):
try:
return self.type.AsyncResult
except KeyError: # task not registered
return self.app.AsyncResult
@cached_property
def _apply_async(self):
try:
return self.type.apply_async
except KeyError:
return _partial(self.app.send_task, self['task'])
id = getitem_property('options.task_id', 'Task UUID')
parent_id = getitem_property('options.parent_id', 'Task parent UUID.')
root_id = getitem_property('options.root_id', 'Task root UUID.')
task = getitem_property('task', 'Name of task.')
args = getitem_property('args', 'Positional arguments to task.')
kwargs = getitem_property('kwargs', 'Keyword arguments to task.')
options = getitem_property('options', 'Task execution options.')
subtask_type = getitem_property('subtask_type', 'Type of signature')
chord_size = getitem_property(
'chord_size', 'Size of chord (if applicable)')
immutable = getitem_property(
'immutable', 'Flag set if no longer accepts new arguments')
def _prepare_chain_from_options(options, tasks, use_link):
# When we publish groups we reuse the same options dictionary for all of
# the tasks in the group. See:
# https://github.com/celery/celery/blob/fb37cb0b8/celery/canvas.py#L1022.
# Issue #5354 reported that the following type of canvases
# causes a Celery worker to hang:
# group(
# add.s(1, 1),
# add.s(1, 1)
# ) | tsum.s() | add.s(1) | group(add.s(1), add.s(1))
# The resolution of #5354 in PR #5681 was to only set the `chain` key
# in the options dictionary if it is not present.
# Otherwise we extend the existing list of tasks in the chain with the new
# tasks: options['chain'].extend(chain_).
# Before PR #5681 we overrode the `chain` key in each iteration
# of the loop which applies all the tasks in the group:
# options['chain'] = tasks if not use_link else None
# This caused Celery to execute chains correctly in most cases since
# in each iteration the `chain` key would reset itself to a new value
# and the side effect of mutating the key did not propagate
# to the next task in the group.
# Since we now mutated the `chain` key, a *list* which is passed
# by *reference*, the next task in the group will extend the list
# of tasks in the chain instead of setting a new one from the chain_
# variable above.
# This causes Celery to execute a chain, even though there might not be
# one to begin with. Alternatively, it causes Celery to execute more tasks
# that were previously present in the previous task in the group.
# The solution is to be careful and never mutate the options dictionary
# to begin with.
# Here is an example of a canvas which triggers this issue:
# add.s(5, 6) | group((add.s(1) | add.s(2), add.s(3))).
# The expected result is [14, 14]. However, when we extend the `chain`
# key the `add.s(3)` task erroneously has `add.s(2)` in its chain since
# it was previously applied to `add.s(1)`.
# Without being careful not to mutate the options dictionary, the result
# in this case is [16, 14].
# To avoid deep-copying the entire options dictionary every single time we
# run a chain we use a ChainMap and ensure that we never mutate
# the original `chain` key, hence we use list_a + list_b to create a new
# list.
if use_link:
return ChainMap({'chain': None}, options)
elif 'chain' not in options:
return ChainMap({'chain': tasks}, options)
elif tasks is not None:
# chain option may already be set, resulting in
# "multiple values for keyword argument 'chain'" error.
# Issue #3379.
# If a chain already exists, we need to extend it with the next
# tasks in the chain.
# Issue #5354.
# WARNING: Be careful not to mutate `options['chain']`.
return ChainMap({'chain': options['chain'] + tasks},
options)
@Signature.register_type(name='chain')
@python_2_unicode_compatible
class _chain(Signature):
tasks = getitem_property('kwargs.tasks', 'Tasks in chain.')
@classmethod
def from_dict(cls, d, app=None):
tasks = d['kwargs']['tasks']
if tasks:
if isinstance(tasks, tuple): # aaaargh
tasks = d['kwargs']['tasks'] = list(tasks)
tasks = [maybe_signature(task, app=app) for task in tasks]
return _upgrade(d, _chain(tasks, app=app, **d['options']))
def __init__(self, *tasks, **options):
tasks = (regen(tasks[0]) if len(tasks) == 1 and is_list(tasks[0])
else tasks)
Signature.__init__(
self, 'celery.chain', (), {'tasks': tasks}, **options
)
self._use_link = options.pop('use_link', None)
self.subtask_type = 'chain'
self._frozen = None
def __call__(self, *args, **kwargs):
if self.tasks:
return self.apply_async(args, kwargs)
def clone(self, *args, **kwargs):
to_signature = maybe_signature
signature = Signature.clone(self, *args, **kwargs)
signature.kwargs['tasks'] = [
to_signature(sig, app=self._app, clone=True)
for sig in signature.kwargs['tasks']
]
return signature
def unchain_tasks(self):
# Clone chain's tasks assigning sugnatures from link_error
# to each task
tasks = [t.clone() for t in self.tasks]
for sig in self.options.get('link_error', []):
for task in tasks:
task.link_error(sig)
return tasks
def apply_async(self, args=None, kwargs=None, **options):
# python is best at unpacking kwargs, so .run is here to do that.
args = args if args else ()
kwargs = kwargs if kwargs else []
app = self.app
if app.conf.task_always_eager:
with allow_join_result():
return self.apply(args, kwargs, **options)
return self.run(args, kwargs, app=app, **(
dict(self.options, **options) if options else self.options))
def run(self, args=None, kwargs=None, group_id=None, chord=None,
task_id=None, link=None, link_error=None, publisher=None,
producer=None, root_id=None, parent_id=None, app=None, **options):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
args = args if args else ()
kwargs = kwargs if kwargs else []
app = app or self.app
use_link = self._use_link
if use_link is None and app.conf.task_protocol == 1:
use_link = True
args = (tuple(args) + tuple(self.args)
if args and not self.immutable else self.args)
tasks, results = self.prepare_steps(
args, kwargs, self.tasks, root_id, parent_id, link_error, app,
task_id, group_id, chord,
)
if results:
if link:
tasks[0].extend_list_option('link', link)
first_task = tasks.pop()
options = _prepare_chain_from_options(options, tasks, use_link)
first_task.apply_async(**options)
return results[0]
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
_, results = self._frozen = self.prepare_steps(
self.args, self.kwargs, self.tasks, root_id, parent_id, None,
self.app, _id, group_id, chord, clone=False,
)
return results[0]
def prepare_steps(self, args, kwargs, tasks,
root_id=None, parent_id=None, link_error=None, app=None,
last_task_id=None, group_id=None, chord_body=None,
clone=True, from_dict=Signature.from_dict):
app = app or self.app
# use chain message field for protocol 2 and later.
# this avoids pickle blowing the stack on the recursion
# required by linking task together in a tree structure.
# (why is pickle using recursion? or better yet why cannot python
# do tail call optimization making recursion actually useful?)
use_link = self._use_link
if use_link is None and app.conf.task_protocol == 1:
use_link = True
steps = deque(tasks)
steps_pop = steps.pop
steps_extend = steps.extend
prev_task = None
prev_res = None
tasks, results = [], []
i = 0
# NOTE: We are doing this in reverse order.
# The result is a list of tasks in reverse order, that is
# passed as the ``chain`` message field.
# As it's reversed the worker can just do ``chain.pop()`` to
# get the next task in the chain.
while steps:
task = steps_pop()
is_first_task, is_last_task = not steps, not i
if not isinstance(task, abstract.CallableSignature):
task = from_dict(task, app=app)
if isinstance(task, group):
task = maybe_unroll_group(task)
# first task gets partial args from chain
if clone:
if is_first_task:
task = task.clone(args, kwargs)
else:
task = task.clone()
elif is_first_task:
task.args = tuple(args) + tuple(task.args)
if isinstance(task, _chain):
# splice the chain
steps_extend(task.tasks)
continue
if isinstance(task, group) and prev_task:
# automatically upgrade group(...) | s to chord(group, s)
# for chords we freeze by pretending it's a normal
# signature instead of a group.
tasks.pop()
results.pop()
try:
task = chord(
task, body=prev_task,
task_id=prev_res.task_id, root_id=root_id, app=app,
)
except AttributeError:
# A GroupResult does not have a task_id since it consists
# of multiple tasks.
# We therefore, have to construct the chord without it.
# Issues #5467, #3585.
task = chord(
task, body=prev_task,
root_id=root_id, app=app,
)
if is_last_task:
# chain(task_id=id) means task id is set for the last task
# in the chain. If the chord is part of a chord/group
# then that chord/group must synchronize based on the
# last task in the chain, so we only set the group_id and
# chord callback for the last task.
res = task.freeze(
last_task_id,
root_id=root_id, group_id=group_id, chord=chord_body,
)
else:
res = task.freeze(root_id=root_id)
i += 1
if prev_task:
if use_link:
# link previous task to this task.
task.link(prev_task)
if prev_res and not prev_res.parent:
prev_res.parent = res
if link_error:
for errback in maybe_list(link_error):
task.link_error(errback)
tasks.append(task)
results.append(res)
prev_task, prev_res = task, res
if isinstance(task, chord):
app.backend.ensure_chords_allowed()
# If the task is a chord, and the body is a chain
# the chain has already been prepared, and res is
# set to the last task in the callback chain.
# We need to change that so that it points to the
# group result object.
node = res
while node.parent:
node = node.parent
prev_res = node
return tasks, results
def apply(self, args=None, kwargs=None, **options):
args = args if args else ()
kwargs = kwargs if kwargs else {}
last, (fargs, fkwargs) = None, (args, kwargs)
for task in self.tasks:
res = task.clone(fargs, fkwargs).apply(
last and (last.get(),), **dict(self.options, **options))
res.parent, last, (fargs, fkwargs) = last, res, (None, None)
return last
@property
def app(self):
app = self._app
if app is None:
try:
app = self.tasks[0]._app
except LookupError:
pass
return app or current_app
def __repr__(self):
if not self.tasks:
return '<{0}@{1:#x}: empty>'.format(
type(self).__name__, id(self))
return remove_repeating_from_task(
self.tasks[0]['task'],
' | '.join(repr(t) for t in self.tasks))
class chain(_chain):
"""Chain tasks together.
Each tasks follows one another,
by being applied as a callback of the previous task.
Note:
If called with only one argument, then that argument must
be an iterable of tasks to chain: this allows us
to use generator expressions.
Example:
This is effectively :math:`((2 + 2) + 4)`:
.. code-block:: pycon
>>> res = chain(add.s(2, 2), add.s(4))()
>>> res.get()
8
Calling a chain will return the result of the last task in the chain.
You can get to the other tasks by following the ``result.parent``'s:
.. code-block:: pycon
>>> res.parent.get()
4
Using a generator expression:
.. code-block:: pycon
>>> lazy_chain = chain(add.s(i) for i in range(10))
>>> res = lazy_chain(3)
Arguments:
*tasks (Signature): List of task signatures to chain.
If only one argument is passed and that argument is
an iterable, then that'll be used as the list of signatures
to chain instead. This means that you can use a generator
expression.
Returns:
~celery.chain: A lazy signature that can be called to apply the first
task in the chain. When that task succeeds the next task in the
chain is applied, and so on.
"""
# could be function, but must be able to reference as :class:`chain`.
def __new__(cls, *tasks, **kwargs):
# This forces `chain(X, Y, Z)` to work the same way as `X | Y | Z`
if not kwargs and tasks:
if len(tasks) != 1 or is_list(tasks[0]):
tasks = tasks[0] if len(tasks) == 1 else tasks
return reduce(operator.or_, tasks)
return super(chain, cls).__new__(cls, *tasks, **kwargs)
class _basemap(Signature):
_task_name = None
_unpack_args = itemgetter('task', 'it')
@classmethod
def from_dict(cls, d, app=None):
return _upgrade(
d, cls(*cls._unpack_args(d['kwargs']), app=app, **d['options']),
)
def __init__(self, task, it, **options):
Signature.__init__(
self, self._task_name, (),
{'task': task, 'it': regen(it)}, immutable=True, **options
)
def apply_async(self, args=None, kwargs=None, **opts):
# need to evaluate generators
args = args if args else ()
kwargs = kwargs if kwargs else {}
task, it = self._unpack_args(self.kwargs)
return self.type.apply_async(
(), {'task': task, 'it': list(it)},
route_name=task_name_from(self.kwargs.get('task')), **opts
)
@Signature.register_type()
@python_2_unicode_compatible
class xmap(_basemap):
"""Map operation for tasks.
Note:
Tasks executed sequentially in process, this is not a
parallel operation like :class:`group`.
"""
_task_name = 'celery.map'
def __repr__(self):
task, it = self._unpack_args(self.kwargs)
return '[{0}(x) for x in {1}]'.format(
task.task, truncate(repr(it), 100))
@Signature.register_type()
@python_2_unicode_compatible
class xstarmap(_basemap):
"""Map operation for tasks, using star arguments."""
_task_name = 'celery.starmap'
def __repr__(self):
task, it = self._unpack_args(self.kwargs)
return '[{0}(*x) for x in {1}]'.format(
task.task, truncate(repr(it), 100))
@Signature.register_type()
class chunks(Signature):
"""Partition of tasks into chunks of size n."""
_unpack_args = itemgetter('task', 'it', 'n')
@classmethod
def from_dict(cls, d, app=None):
return _upgrade(
d, chunks(*cls._unpack_args(
d['kwargs']), app=app, **d['options']),
)
def __init__(self, task, it, n, **options):
Signature.__init__(
self, 'celery.chunks', (),
{'task': task, 'it': regen(it), 'n': n},
immutable=True, **options
)
def __call__(self, **options):
return self.apply_async(**options)
def apply_async(self, args=None, kwargs=None, **opts):
args = args if args else ()
kwargs = kwargs if kwargs else {}
return self.group().apply_async(
args, kwargs,
route_name=task_name_from(self.kwargs.get('task')), **opts
)
def group(self):
# need to evaluate generators
task, it, n = self._unpack_args(self.kwargs)
return group((xstarmap(task, part, app=self._app)
for part in _chunks(iter(it), n)),
app=self._app)
@classmethod
def apply_chunks(cls, task, it, n, app=None):
return cls(task, it, n, app=app)()
def _maybe_group(tasks, app):
if isinstance(tasks, dict):
tasks = signature(tasks, app=app)
if isinstance(tasks, (group, _chain)):
tasks = tasks.tasks
elif isinstance(tasks, abstract.CallableSignature):
tasks = [tasks]
else:
tasks = [signature(t, app=app) for t in tasks]
return tasks
@Signature.register_type()
@python_2_unicode_compatible
class group(Signature):
"""Creates a group of tasks to be executed in parallel.
A group is lazy so you must call it to take action and evaluate
the group.
Note:
If only one argument is passed, and that argument is an iterable
then that'll be used as the list of tasks instead: this
allows us to use ``group`` with generator expressions.
Example:
>>> lazy_group = group([add.s(2, 2), add.s(4, 4)])
>>> promise = lazy_group() # <-- evaluate: returns lazy result.
>>> promise.get() # <-- will wait for the task to return
[4, 8]
Arguments:
*tasks (List[Signature]): A list of signatures that this group will
call. If there's only one argument, and that argument is an
iterable, then that'll define the list of signatures instead.
**options (Any): Execution options applied to all tasks
in the group.
Returns:
~celery.group: signature that when called will then call all of the
tasks in the group (and return a :class:`GroupResult` instance
that can be used to inspect the state of the group).
"""
tasks = getitem_property('kwargs.tasks', 'Tasks in group.')
@classmethod
def from_dict(cls, d, app=None):
return _upgrade(
d, group(d['kwargs']['tasks'], app=app, **d['options']),
)
def __init__(self, *tasks, **options):
if len(tasks) == 1:
tasks = tasks[0]
if isinstance(tasks, group):
tasks = tasks.tasks
if isinstance(tasks, abstract.CallableSignature):
tasks = [tasks.clone()]
if not isinstance(tasks, _regen):
tasks = regen(tasks)
Signature.__init__(
self, 'celery.group', (), {'tasks': tasks}, **options
)
self.subtask_type = 'group'
def __call__(self, *partial_args, **options):
return self.apply_async(partial_args, **options)
def skew(self, start=1.0, stop=None, step=1.0):
it = fxrange(start, stop, step, repeatlast=True)
for task in self.tasks:
task.set(countdown=next(it))
return self
def apply_async(self, args=None, kwargs=None, add_to_parent=True,
producer=None, link=None, link_error=None, **options):
args = args if args else ()
if link is not None:
raise TypeError('Cannot add link to group: use a chord')
if link_error is not None:
raise TypeError(
'Cannot add link to group: do that on individual tasks')
app = self.app
if app.conf.task_always_eager:
return self.apply(args, kwargs, **options)
if not self.tasks:
return self.freeze()
options, group_id, root_id = self._freeze_gid(options)
tasks = self._prepared(self.tasks, [], group_id, root_id, app)
p = barrier()
results = list(self._apply_tasks(tasks, producer, app, p,
args=args, kwargs=kwargs, **options))
result = self.app.GroupResult(group_id, results, ready_barrier=p)
p.finalize()
# - Special case of group(A.s() | group(B.s(), C.s()))
# That is, group with single item that's a chain but the
# last task in that chain is a group.
#
# We cannot actually support arbitrary GroupResults in chains,
# but this special case we can.
if len(result) == 1 and isinstance(result[0], GroupResult):
result = result[0]
parent_task = app.current_worker_task
if add_to_parent and parent_task:
parent_task.add_trail(result)
return result
def apply(self, args=None, kwargs=None, **options):
args = args if args else ()
kwargs = kwargs if kwargs else {}
app = self.app
if not self.tasks:
return self.freeze() # empty group returns GroupResult
options, group_id, root_id = self._freeze_gid(options)
tasks = self._prepared(self.tasks, [], group_id, root_id, app)
return app.GroupResult(group_id, [
sig.apply(args=args, kwargs=kwargs, **options) for sig, _ in tasks
])
def set_immutable(self, immutable):
for task in self.tasks:
task.set_immutable(immutable)
def link(self, sig):
# Simply link to first task
sig = sig.clone().set(immutable=True)
return self.tasks[0].link(sig)
def link_error(self, sig):
try:
sig = sig.clone().set(immutable=True)
except AttributeError:
# See issue #5265. I don't use isinstance because current tests
# pass a Mock object as argument.
sig['immutable'] = True
sig = Signature.from_dict(sig)
return self.tasks[0].link_error(sig)
def _prepared(self, tasks, partial_args, group_id, root_id, app,
CallableSignature=abstract.CallableSignature,
from_dict=Signature.from_dict,
isinstance=isinstance, tuple=tuple):
for task in tasks:
if isinstance(task, CallableSignature):
# local sigs are always of type Signature, and we
# clone them to make sure we don't modify the originals.
task = task.clone()
else:
# serialized sigs must be converted to Signature.
task = from_dict(task, app=app)
if isinstance(task, group):
# needs yield_from :(
unroll = task._prepared(
task.tasks, partial_args, group_id, root_id, app,
)
for taskN, resN in unroll:
yield taskN, resN
else:
if partial_args and not task.immutable:
task.args = tuple(partial_args) + tuple(task.args)
yield task, task.freeze(group_id=group_id, root_id=root_id)
def _apply_tasks(self, tasks, producer=None, app=None, p=None,
add_to_parent=None, chord=None,
args=None, kwargs=None, **options):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
app = app or self.app
with app.producer_or_acquire(producer) as producer:
for sig, res in tasks:
sig.apply_async(producer=producer, add_to_parent=False,
chord=sig.options.get('chord') or chord,
args=args, kwargs=kwargs,
**options)
# adding callback to result, such that it will gradually
# fulfill the barrier.
#
# Using barrier.add would use result.then, but we need
# to add the weak argument here to only create a weak
# reference to the object.
if p and not p.cancelled and not p.ready:
p.size += 1
res.then(p, weak=True)
yield res # <-- r.parent, etc set in the frozen result.
def _freeze_gid(self, options):
# remove task_id and use that as the group_id,
# if we don't remove it then every task will have the same id...
options = dict(self.options, **options)
options['group_id'] = group_id = (
options.pop('task_id', uuid()))
return options, group_id, options.get('root_id')
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
opts = self.options
try:
gid = opts['task_id']
except KeyError:
gid = opts['task_id'] = group_id or uuid()
if group_id:
opts['group_id'] = group_id
if chord:
opts['chord'] = chord
root_id = opts.setdefault('root_id', root_id)
parent_id = opts.setdefault('parent_id', parent_id)
new_tasks = []
# Need to unroll subgroups early so that chord gets the
# right result instance for chord_unlock etc.
results = list(self._freeze_unroll(
new_tasks, group_id, chord, root_id, parent_id,
))
if isinstance(self.tasks, MutableSequence):
self.tasks[:] = new_tasks
else:
self.tasks = new_tasks
return self.app.GroupResult(gid, results)
_freeze = freeze
def _freeze_unroll(self, new_tasks, group_id, chord, root_id, parent_id):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
stack = deque(self.tasks)
while stack:
task = maybe_signature(stack.popleft(), app=self._app).clone()
if isinstance(task, group):
stack.extendleft(task.tasks)
else:
new_tasks.append(task)
yield task.freeze(group_id=group_id,
chord=chord, root_id=root_id,
parent_id=parent_id)
def __repr__(self):
if self.tasks:
return remove_repeating_from_task(
self.tasks[0]['task'],
'group({0.tasks!r})'.format(self))
return 'group(<empty>)'
def __len__(self):
return len(self.tasks)
@property
def app(self):
app = self._app
if app is None:
try:
app = self.tasks[0].app
except LookupError:
pass
return app if app is not None else current_app
@Signature.register_type()
@python_2_unicode_compatible
class chord(Signature):
r"""Barrier synchronization primitive.
A chord consists of a header and a body.
The header is a group of tasks that must complete before the callback is
called. A chord is essentially a callback for a group of tasks.
The body is applied with the return values of all the header
tasks as a list.
Example:
The chord:
.. code-block:: pycon
>>> res = chord([add.s(2, 2), add.s(4, 4)])(sum_task.s())
is effectively :math:`\Sigma ((2 + 2) + (4 + 4))`:
.. code-block:: pycon
>>> res.get()
12
"""
@classmethod
def from_dict(cls, d, app=None):
options = d.copy()
args, options['kwargs'] = cls._unpack_args(**options['kwargs'])
return _upgrade(d, cls(*args, app=app, **options))
@staticmethod
def _unpack_args(header=None, body=None, **kwargs):
# Python signatures are better at extracting keys from dicts
# than manually popping things off.
return (header, body), kwargs
def __init__(self, header, body=None, task='celery.chord',
args=None, kwargs=None, app=None, **options):
args = args if args else ()
kwargs = kwargs if kwargs else {}
Signature.__init__(
self, task, args,
{'kwargs': kwargs, 'header': _maybe_group(header, app),
'body': maybe_signature(body, app=app)}, app=app, **options
)
self.subtask_type = 'chord'
def __call__(self, body=None, **options):
return self.apply_async((), {'body': body} if body else {}, **options)
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
if not isinstance(self.tasks, group):
self.tasks = group(self.tasks, app=self.app)
header_result = self.tasks.freeze(
parent_id=parent_id, root_id=root_id, chord=self.body)
body_result = self.body.freeze(
_id, root_id=root_id, chord=chord, group_id=group_id)
# we need to link the body result back to the group result,
# but the body may actually be a chain,
# so find the first result without a parent
node = body_result
seen = set()
while node:
if node.id in seen:
raise RuntimeError('Recursive result parents')
seen.add(node.id)
if node.parent is None:
node.parent = header_result
break
node = node.parent
self.id = self.tasks.id
return body_result
def apply_async(self, args=None, kwargs=None, task_id=None,
producer=None, publisher=None, connection=None,
router=None, result_cls=None, **options):
args = args if args else ()
kwargs = kwargs if kwargs else {}
args = (tuple(args) + tuple(self.args)
if args and not self.immutable else self.args)
body = kwargs.pop('body', None) or self.kwargs['body']
kwargs = dict(self.kwargs['kwargs'], **kwargs)
body = body.clone(**options)
app = self._get_app(body)
tasks = (self.tasks.clone() if isinstance(self.tasks, group)
else group(self.tasks, app=app))
if app.conf.task_always_eager:
with allow_join_result():
return self.apply(args, kwargs,
body=body, task_id=task_id, **options)
# chord([A, B, ...], C)
return self.run(tasks, body, args, task_id=task_id, **options)
def apply(self, args=None, kwargs=None,
propagate=True, body=None, **options):
args = args if args else ()
kwargs = kwargs if kwargs else {}
body = self.body if body is None else body
tasks = (self.tasks.clone() if isinstance(self.tasks, group)
else group(self.tasks, app=self.app))
return body.apply(
args=(tasks.apply(args, kwargs).get(propagate=propagate),),
)
def _traverse_tasks(self, tasks, value=None):
stack = deque(tasks)
while stack:
task = stack.popleft()
if isinstance(task, group):
stack.extend(task.tasks)
elif isinstance(task, _chain) and isinstance(task.tasks[-1], group):
stack.extend(task.tasks[-1].tasks)
else:
yield task if value is None else value
def __length_hint__(self):
tasks = (self.tasks.tasks if isinstance(self.tasks, group)
else self.tasks)
return sum(self._traverse_tasks(tasks, 1))
def run(self, header, body, partial_args, app=None, interval=None,
countdown=1, max_retries=None, eager=False,
task_id=None, **options):
app = app or self._get_app(body)
group_id = header.options.get('task_id') or uuid()
root_id = body.options.get('root_id')
body.chord_size = self.__length_hint__()
options = dict(self.options, **options) if options else self.options
if options:
options.pop('task_id', None)
body.options.update(options)
bodyres = body.freeze(task_id, root_id=root_id)
# Chains should not be passed to the header tasks. See #3771
options.pop('chain', None)
# Neither should chords, for deeply nested chords to work
options.pop('chord', None)
options.pop('task_id', None)
header_result = header.freeze(group_id=group_id, chord=body, root_id=root_id)
if len(header_result) > 0:
app.backend.apply_chord(
header_result,
body,
interval=interval,
countdown=countdown,
max_retries=max_retries,
)
header_result = header(*partial_args, task_id=group_id, **options)
# The execution of a chord body is normally triggered by its header's
# tasks completing. If the header is empty this will never happen, so
# we execute the body manually here.
else:
body.delay([])
bodyres.parent = header_result
return bodyres
def clone(self, *args, **kwargs):
signature = Signature.clone(self, *args, **kwargs)
# need to make copy of body
try:
signature.kwargs['body'] = maybe_signature(
signature.kwargs['body'], clone=True)
except (AttributeError, KeyError):
pass
return signature
def link(self, callback):
self.body.link(callback)
return callback
def link_error(self, errback):
self.body.link_error(errback)
return errback
def set_immutable(self, immutable):
# changes mutability of header only, not callback.
for task in self.tasks:
task.set_immutable(immutable)
def __repr__(self):
if self.body:
if isinstance(self.body, _chain):
return remove_repeating_from_task(
self.body.tasks[0]['task'],
'%({0} | {1!r})'.format(
self.body.tasks[0].reprcall(self.tasks),
chain(self.body.tasks[1:], app=self._app),
),
)
return '%' + remove_repeating_from_task(
self.body['task'], self.body.reprcall(self.tasks))
return '<chord without body: {0.tasks!r}>'.format(self)
@cached_property
def app(self):
return self._get_app(self.body)
def _get_app(self, body=None):
app = self._app
if app is None:
try:
tasks = self.tasks.tasks # is a group
except AttributeError:
tasks = self.tasks
if len(tasks):
app = tasks[0]._app
if app is None and body is not None:
app = body._app
return app if app is not None else current_app
tasks = getitem_property('kwargs.header', 'Tasks in chord header.')
body = getitem_property('kwargs.body', 'Body task of chord.')
def signature(varies, *args, **kwargs):
"""Create new signature.
- if the first argument is a signature already then it's cloned.
- if the first argument is a dict, then a Signature version is returned.
Returns:
Signature: The resulting signature.
"""
app = kwargs.get('app')
if isinstance(varies, dict):
if isinstance(varies, abstract.CallableSignature):
return varies.clone()
return Signature.from_dict(varies, app=app)
return Signature(varies, *args, **kwargs)
subtask = signature # noqa: E305 XXX compat
def maybe_signature(d, app=None, clone=False):
"""Ensure obj is a signature, or None.
Arguments:
d (Optional[Union[abstract.CallableSignature, Mapping]]):
Signature or dict-serialized signature.
app (celery.Celery):
App to bind signature to.
clone (bool):
If d' is already a signature, the signature
will be cloned when this flag is enabled.
Returns:
Optional[abstract.CallableSignature]
"""
if d is not None:
if isinstance(d, abstract.CallableSignature):
if clone:
d = d.clone()
elif isinstance(d, dict):
d = signature(d)
if app is not None:
d._app = app
return d
maybe_subtask = maybe_signature # noqa: E305 XXX compat
|
mdworks2016/work_development
|
Python/20_Third_Certification/venv/lib/python3.7/site-packages/celery/canvas.py
|
Python
|
apache-2.0
| 55,734 | 0.000036 |
from Screen import Screen
from Components.ServiceEventTracker import ServiceEventTracker
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.ChoiceList import ChoiceList, ChoiceEntryComponent
from Components.config import config, ConfigSubsection, getConfigListEntry, ConfigNothing, ConfigSelection, ConfigOnOff
from Components.MultiContent import MultiContentEntryText
from Components.Sources.List import List
from Components.Sources.Boolean import Boolean
from Components.SystemInfo import SystemInfo
from enigma import iPlayableService
from Tools.ISO639 import LanguageCodes
from Tools.BoundFunction import boundFunction
FOCUS_CONFIG, FOCUS_STREAMS = range(2)
[PAGE_AUDIO, PAGE_SUBTITLES] = ["audio", "subtitles"]
class AudioSelection(Screen, ConfigListScreen):
def __init__(self, session, infobar=None, page=PAGE_AUDIO):
Screen.__init__(self, session)
self["streams"] = List([])
self["key_red"] = Boolean(False)
self["key_green"] = Boolean(False)
self["key_yellow"] = Boolean(True)
self["key_blue"] = Boolean(False)
ConfigListScreen.__init__(self, [])
self.infobar = infobar or self.session.infobar
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
self.cached_subtitle_checked = False
self.__selected_subtitle = None
self["actions"] = ActionMap(["ColorActions", "SetupActions", "DirectionActions"],
{
"red": self.keyRed,
"green": self.keyGreen,
"yellow": self.keyYellow,
"blue": self.keyBlue,
"ok": self.keyOk,
"cancel": self.cancel,
"up": self.keyUp,
"down": self.keyDown,
}, -3)
self.settings = ConfigSubsection()
choicelist = [(PAGE_AUDIO,_("audio tracks")), (PAGE_SUBTITLES,_("Subtitles"))]
self.settings.menupage = ConfigSelection(choices = choicelist, default=page)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self["config"].instance.setSelectionEnable(False)
self.focus = FOCUS_STREAMS
self.settings.menupage.addNotifier(self.fillList)
def fillList(self, arg=None):
streams = []
conflist = []
selectedidx = 0
service = self.session.nav.getCurrentService()
self.audioTracks = audio = service and service.audioTracks()
n = audio and audio.getNumberOfTracks() or 0
if self.settings.menupage.getValue() == PAGE_AUDIO:
self.setTitle(_("Select audio track"))
if SystemInfo["CanDownmixAC3"]:
self.settings.downmix = ConfigOnOff(default=config.av.downmix_ac3.value)
self.settings.downmix.addNotifier(self.changeAC3Downmix, initial_call = False)
conflist.append(getConfigListEntry(_("AC3 downmix"), self.settings.downmix))
self["key_red"].setBoolean(True)
if n > 0:
self.audioChannel = service.audioChannel()
choicelist = [("0",_("left")), ("1",_("stereo")), ("2", _("right"))]
self.settings.channelmode = ConfigSelection(choices = choicelist, default = str(self.audioChannel.getCurrentChannel()))
self.settings.channelmode.addNotifier(self.changeMode, initial_call = False)
conflist.append(getConfigListEntry(_("Channel"), self.settings.channelmode))
self["key_green"].setBoolean(True)
selectedAudio = self.audioTracks.getCurrentTrack()
for x in range(n):
number = str(x)
i = audio.getTrackInfo(x)
languages = i.getLanguage().split('/')
description = i.getDescription() or _("<unknown>")
selected = ""
language = ""
if selectedAudio == x:
selected = _("Running")
selectedidx = x
cnt = 0
for lang in languages:
if cnt:
language += ' / '
if LanguageCodes.has_key(lang):
language += LanguageCodes[lang][0]
elif lang == "und":
_("<unknown>")
else:
language += lang
cnt += 1
streams.append((x, "", number, description, language, selected))
else:
streams = []
conflist.append(('',))
self["key_green"].setBoolean(False)
elif self.settings.menupage.getValue() == PAGE_SUBTITLES:
self.setTitle(_("Subtitle selection"))
conflist.append(('',))
conflist.append(('',))
self["key_red"].setBoolean(False)
self["key_green"].setBoolean(False)
if self.subtitlesEnabled():
sel = self.infobar.selected_subtitle
else:
sel = None
idx = 0
subtitlelist = self.getSubtitleList()
if len(subtitlelist):
for x in subtitlelist:
number = str(x[1])
description = "?"
language = _("<unknown>")
selected = ""
if sel and x[:4] == sel[:4]:
selected = _("Running")
selectedidx = idx
if x[4] != "und":
if LanguageCodes.has_key(x[4]):
language = LanguageCodes[x[4]][0]
else:
language = x[4]
if x[0] == 0:
description = "DVB"
number = "%x" % (x[1])
elif x[0] == 1:
description = "TTX"
number = "%x%02x" % (x[3],x[2])
elif x[0] == 2:
types = ("UTF-8 text","SSA / AAS",".SRT file")
description = types[x[2]]
streams.append((x, "", number, description, language, selected))
idx += 1
else:
streams = []
conflist.append(getConfigListEntry(_("Menu"), self.settings.menupage))
from Components.PluginComponent import plugins
from Plugins.Plugin import PluginDescriptor
if hasattr(self.infobar, "runPlugin"):
class PluginCaller:
def __init__(self, fnc, *args):
self.fnc = fnc
self.args = args
def __call__(self, *args, **kwargs):
self.fnc(*self.args)
Plugins = [ (p.name, PluginCaller(self.infobar.runPlugin, p)) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_AUDIOMENU) ]
if len(Plugins):
self["key_blue"].setBoolean(True)
conflist.append(getConfigListEntry(Plugins[0][0], ConfigNothing()))
self.plugincallfunc = Plugins[0][1]
if len(Plugins) > 1:
print "these plugins are installed but not displayed in the dialog box:", Plugins[1:]
self["config"].list = conflist
self["config"].l.setList(conflist)
self["streams"].list = streams
self["streams"].setIndex(selectedidx)
def __updatedInfo(self):
self.fillList()
def getSubtitleList(self):
s = self.infobar and self.infobar.getCurrentServiceSubtitle()
l = s and s.getSubtitleList() or [ ]
return l
def subtitlesEnabled(self):
return self.infobar.subtitles_enabled
def enableSubtitle(self, subtitles):
if self.infobar.selected_subtitle != subtitles:
self.infobar.subtitles_enabled = False
self.infobar.selected_subtitle = subtitles
if subtitles:
self.infobar.subtitles_enabled = True
def changeAC3Downmix(self, downmix):
if downmix.getValue() == True:
config.av.downmix_ac3.value = True
else:
config.av.downmix_ac3.value = False
config.av.downmix_ac3.save()
def changeMode(self, mode):
if mode is not None:
self.audioChannel.selectChannel(int(mode.getValue()))
def changeAudio(self, audio):
track = int(audio)
if isinstance(track, int):
if self.session.nav.getCurrentService().audioTracks().getNumberOfTracks() > track:
self.audioTracks.selectTrack(track)
def keyLeft(self):
if self.focus == FOCUS_CONFIG:
ConfigListScreen.keyLeft(self)
elif self.focus == FOCUS_STREAMS:
self["streams"].setIndex(0)
def keyRight(self, config = False):
if config or self.focus == FOCUS_CONFIG:
if self["config"].getCurrentIndex() < 3:
ConfigListScreen.keyRight(self)
elif hasattr(self, "plugincallfunc"):
self.plugincallfunc()
if self.focus == FOCUS_STREAMS and self["streams"].count() and config == False:
self["streams"].setIndex(self["streams"].count()-1)
def keyRed(self):
if self["key_red"].getBoolean():
self.colorkey(0)
def keyGreen(self):
if self["key_green"].getBoolean():
self.colorkey(1)
def keyYellow(self):
if self["key_yellow"].getBoolean():
self.colorkey(2)
def keyBlue(self):
if self["key_blue"].getBoolean():
self.colorkey(3)
def colorkey(self, idx):
self["config"].setCurrentIndex(idx)
self.keyRight(True)
def keyUp(self):
if self.focus == FOCUS_CONFIG:
self["config"].instance.moveSelection(self["config"].instance.moveUp)
elif self.focus == FOCUS_STREAMS:
if self["streams"].getIndex() == 0:
self["config"].instance.setSelectionEnable(True)
self["streams"].style = "notselected"
self["config"].setCurrentIndex(len(self["config"].getList())-1)
self.focus = FOCUS_CONFIG
else:
self["streams"].selectPrevious()
def keyDown(self):
if self.focus == FOCUS_CONFIG:
if self["config"].getCurrentIndex() < len(self["config"].getList())-1:
self["config"].instance.moveSelection(self["config"].instance.moveDown)
else:
self["config"].instance.setSelectionEnable(False)
self["streams"].style = "default"
self.focus = FOCUS_STREAMS
elif self.focus == FOCUS_STREAMS:
self["streams"].selectNext()
def keyOk(self):
if self.focus == FOCUS_STREAMS and self["streams"].list:
cur = self["streams"].getCurrent()
if self.settings.menupage.getValue() == PAGE_AUDIO and cur[0] is not None:
self.changeAudio(cur[2])
self.__updatedInfo()
if self.settings.menupage.getValue() == PAGE_SUBTITLES and cur[0] is not None:
if self.infobar.selected_subtitle == cur[0]:
self.enableSubtitle(None)
selectedidx = self["streams"].getIndex()
self.__updatedInfo()
self["streams"].setIndex(selectedidx)
else:
self.enableSubtitle(cur[0])
self.__updatedInfo()
self.close(0)
elif self.focus == FOCUS_CONFIG:
self.keyRight()
def cancel(self):
self.close(0)
class SubtitleSelection(AudioSelection):
def __init__(self, session, infobar=None):
AudioSelection.__init__(self, session, infobar, page=PAGE_SUBTITLES)
self.skinName = ["AudioSelection"]
|
popazerty/e2-dmm
|
lib/python/Screens/AudioSelectionExtended.py
|
Python
|
gpl-2.0
| 9,756 | 0.031673 |
import commonware.log
from datetime import datetime, timedelta
import re
from session_csrf import anonymous_csrf
from django.shortcuts import redirect, render, get_object_or_404
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import CreateView, UpdateView
from django.views.generic.base import View, TemplateView
from django.views.generic import ListView, DetailView, FormView
from django.views.decorators.csrf import csrf_protect
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.utils.decorators import method_decorator
from django.core.urlresolvers import reverse
from django.http import Http404
from stores.resources import DiscountResource, DiscountGroupResource, \
ItemResource, ItemImageResource, DetailItemResource, \
PendingShippingRequestResource
from stores.models import Store, Item, ItemImage, Discount, DiscountGroup, \
StoreDesign
from stores.forms import StoreCreationForm, StoreImageUploadForm, \
ItemCreationForm, ItemImageCreationForm, \
DiscountCreationForm, DiscountGroupCreationForm, \
ContactInformationForm, \
ItemSearchForm, StoreDesignForm
from cart.models import PersonalTag, SocialBuy, SocialTag, ShippingRequest, paypalAP
from cart.forms import SocialBuyForm
from friends.models import Friendship
from utils import LoginRequiredMixin, render_to_string, thumbnail
log = commonware.log.getLogger('shoppindomain')
@login_required
def search(request):
form = ItemSearchForm(request.GET)
return render(request, 'stores/search.j.html', {
'form': form
})
# TODO store.get_buyers is slow.
# Maybe we should denormalize database and add to Store model m2m-field `buyers`
@login_required
@csrf_protect
def view_store(request, store_id=None):
store = get_object_or_404(Store, pk=int(store_id))
friend_ids = map(lambda friend: friend['id'],
Friendship.objects.friends_of(request.user).values('id'))
buyer_ids = store.get_buyer_ids()
buyers = User.objects.filter(id__in=buyer_ids).exclude(id=request.user.id)
return render(request, 'stores/store.j.html', {
'store': store,
'friend_buyers': buyers.filter(id__in=friend_ids),
'other_buyers': buyers.exclude(id__in=friend_ids),
})
class CreateStoreView(LoginRequiredMixin, CreateView):
model = Store
template_name = 'stores/manage/create.j.html'
def get_form_class(self):
return StoreCreationForm
def form_valid(self, form):
store = form.save(commit=False)
store.user = self.request.user
store.save()
return redirect(self.get_success_url())
def get_success_url(self):
return reverse('stores.create_store_done')
def dispatch(self, request, *args, **kwargs):
if hasattr(request.user, 'store'):
return render(request, self.template_name, {})
else:
return super(CreateStoreView, self).dispatch(request, *args, **kwargs)
@login_required
@csrf_protect
def store_image(request):
if not hasattr(request.user, 'store'):
raise Http404()
store = request.user.store
if request.method == 'POST':
form = StoreImageUploadForm(request.POST,
request.FILES,
instance=store)
if form.is_valid():
form.save()
return redirect(store_image)
else:
form = StoreImageUploadForm(instance=store)
return render(request, 'stores/manage/image.j.html', {
'image': request.user.store.window_image,
'form': form
})
class CreateStoreDoneView(LoginRequiredMixin, TemplateView):
template_name = 'stores/manage/create_done.j.html'
def get_context_data(self, **kwargs):
request_token = self.request.GET.get('request_token', None)
verification_code = self.request.GET.get('verification_code', None)
store = self.request.user.store
if store.paypal_email and not store.is_active:
return {}
if (not request_token is None) and (not verification_code is None):
response = paypalAP.callPermissions('GetAccessToken',
token=request_token,
verifier=verification_code)
response = paypalAP.callPermissionsOnBehalf('GetBasicPersonalData',
access_token = response.token,
secret_token = response.tokenSecret,
**{
'attributeList.attribute(0)': 'http://axschema.org/contact/email',
'attributeList.attribute(1)': 'http://schema.openid.net/contact/fullname'
})
personal_data = {}
key_re = re.compile(r'response.personalData\((?P<index>\d+)\)\.(?P<name>.+)')
for key in response.raw:
m = re.match(key_re, key)
if not m is None and m.group('name') == 'personalDataKey':
personal_data[response.get(key)] = response.get('response.personalData(%s).personalDataValue' % m.group('index'))
store.paypal_email = personal_data['http://axschema.org/contact/email']
store.save()
return {}
else:
response = paypalAP.callPermissions('RequestPermissions',
callback=self.request.build_absolute_uri(reverse('stores.create_store_done')),
**{
'scope(0)': 'ACCESS_BASIC_PERSONAL_DATA',
'scope(1)':'REFUND'
})
return { 'paypal_url': paypalAP.generate_permissions_redirect_url(response.token) }
class ItemView(LoginRequiredMixin, DetailView):
model = Item
context_object_name = 'item'
template_name = 'stores/item.j.html'
def get_context_data(self, **kwargs):
context = super(ItemView, self).get_context_data(**kwargs)
request = self.request
item = self.object
context.update({
'item_details_json': DetailItemResource().to_json(
obj=item, request=request),
'images_json': ItemImageResource().to_json(
obj=item.images.all(), request=request),
})
return context
class ManageContactInformationView(LoginRequiredMixin, FormView):
template_name = 'stores/manage/contact_information.j.html'
form_class = ContactInformationForm
def get_initial(self):
return { 'country': 'AU' }
def get_form_kwargs(self):
kwargs = super(ManageContactInformationView, self).get_form_kwargs()
kwargs.update({ 'instance': self.request.user.store })
return kwargs
def form_valid(self, form):
form.save()
return self.render_to_response(
self.get_context_data(form=form,updated=True))
def form_invalid(self, form):
return self.render_to_response(self.get_context_data(form=form))
class ManageItemsView(LoginRequiredMixin, TemplateView):
template_name = 'stores/manage/items.j.html'
def get_context_data(self, **kwargs):
items_per_page = 4
items = self.request.user.store.items.all()
items_json = ItemResource().to_json(obj=items[:items_per_page],
request=self.request)
kwargs.update({
'items_json': items_json,
'items_total': items.count(),
'items_per_page': items_per_page,
'item_form': ItemCreationForm(),
'image_form': ItemImageCreationForm()
})
return kwargs
class ManageDiscountsView(LoginRequiredMixin, TemplateView):
template_name = 'stores/manage/discounts.j.html'
def get_context_data(self, **kwargs):
context = super(ManageDiscountsView, self).get_context_data(**kwargs)
request = self.request
discounts_json = DiscountResource().to_json(
obj=Discount.objects.filter(store=request.user.store),
request=request)
context.update({
'discounts_json': discounts_json,
'discount_creation_form': DiscountCreationForm()
})
return context
class ManageDiscountGroupsView(LoginRequiredMixin, TemplateView):
template_name = 'stores/manage/discount_groups.j.html'
def get_context_data(self, **kwargs):
context = super(ManageDiscountGroupsView, self).get_context_data(**kwargs)
request = self.request
discount_groups = DiscountGroup.objects.filter(
discount__store=request.user.store)
discount_groups_json = DiscountGroupResource().to_json(
obj=discount_groups,
request=request)
discounts_json = DiscountResource().to_json(
obj=Discount.objects.filter(store=request.user.store),
request=request)
items = request.user.store.items.all()
items_json = ItemResource().to_json(
obj=items, request=request)
context.update({
'discount_groups_json': discount_groups_json,
'items_json': items_json,
'discounts_json': discounts_json,
'discount_group_form': DiscountGroupCreationForm()
})
return context
class ManageShippingRequests(LoginRequiredMixin, TemplateView):
template_name = 'stores/manage/shipping.j.html'
def get_context_data(self, **kwargs):
context = super(ManageShippingRequests, self) \
.get_context_data(**kwargs)
request = self.request
resource = PendingShippingRequestResource()
shipping_requests = resource.apply_authorization_limits(
request, resource._meta.queryset)
context.update({
'shipping_requests_json': resource.to_json(
obj=shipping_requests,
request=request)
})
return context
class ChangeStoreDesign(LoginRequiredMixin, UpdateView):
form_class = StoreDesignForm
model = StoreDesign
template_name = 'stores/manage/change_shopdesign.j.html'
def get_object(self, queryset=None):
store = self.request.user.store
try:
sd = store.design
except self.model.DoesNotExist:
sd = self.model.objects.create(store=store)
return sd
def get_success_url(self):
return reverse('stores.design')
def form_invalid(self, *args, **kwargs):
messages.warning(self.request, _('Design is not saved!'))
return super(ChangeStoreDesign, self).form_invalid(*args, **kwargs)
def form_valid(self, *args, **kwargs):
messages.success(self.request, _('Design saved successfully'))
return super(ChangeStoreDesign, self).form_valid(*args, **kwargs)
|
softak/webfaction_demo
|
apps/stores/views.py
|
Python
|
bsd-3-clause
| 11,025 | 0.004535 |
from docs import *
|
cyphactor/lifecyclemanager
|
extra/plugins/docs/docs/__init__.py
|
Python
|
gpl-3.0
| 20 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetTagKey
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-resourcemanager
# [START cloudresourcemanager_v3_generated_TagKeys_GetTagKey_async]
from google.cloud import resourcemanager_v3
async def sample_get_tag_key():
# Create a client
client = resourcemanager_v3.TagKeysAsyncClient()
# Initialize request argument(s)
request = resourcemanager_v3.GetTagKeyRequest(
name="name_value",
)
# Make the request
response = await client.get_tag_key(request=request)
# Handle the response
print(response)
# [END cloudresourcemanager_v3_generated_TagKeys_GetTagKey_async]
|
googleapis/python-resource-manager
|
samples/generated_samples/cloudresourcemanager_v3_generated_tag_keys_get_tag_key_async.py
|
Python
|
apache-2.0
| 1,477 | 0.000677 |
from __future__ import division
from pyvx import *
from array import array
class TestDiv(object):
def test_div(self):
g = Graph()
with g:
img = Image(3, 4, DF_IMAGE_U8, array('B', range(12)))
sa1 = img / 2
sa2 = img // 2
sa1.force()
sa2.force()
g.process()
assert [sa1.data[i] for i in range(6)] == [0, 0.5, 1.0, 1.5, 2.0, 2.5]
assert [sa2.data[i] for i in range(6)] == [0, 0, 1, 1, 2, 2]
|
hakanardo/pyvx
|
old/test/test_truediv.py
|
Python
|
mit
| 494 | 0.002024 |
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.test import TestCase
# Create your tests here.
from portfolio.apps.core.models import User
class UserModelTests(TestCase):
def test_admin_user_exist(self):
self.assertRaises(Http404, lambda: get_object_or_404(User, phone="3124795074"))
with self.assertRaises(Http404) as e:
get_object_or_404(User, phone="3124795074")
self.assertRaises(ObjectDoesNotExist, lambda: User.objects.get(phone="3124795074"))
with self.assertRaises(ObjectDoesNotExist) as e:
User.objects.get(phone="3124795074")
|
i404ed/portfolio-dir
|
portfolio/apps/core/tests.py
|
Python
|
gpl-3.0
| 697 | 0.002869 |
import sys
import string
import re
import os
input_file = open(sys.argv[1])
prev_value = int(0)
true = 1
while true:
input_line = input_file.readline()
if input_line == "":
break
input_line = re.sub('#line.*','',input_line)
input_line = re.sub('# [0-9].*','',input_line)
print input_line
|
ucolesanti/tinyos-wrappers
|
support/sdk/python/pynopath.py
|
Python
|
bsd-3-clause
| 296 | 0.037162 |
#! /usr/bin/python
import threading
import socket
import sys,time
import SocketServer,struct,select
global bufLen
global endflag
global socksPort
###################
socksPort = 50000 #Default socks5 proxy port
###################
endflag = []
bufLen = 4*1024
class startThreadSoket(threading.Thread):
def __init__(self,socksPort):
threading.Thread.__init__(self)
self.socksPort = socksPort
def run(self):
socket_bind(self.socksPort)
class control(threading.Thread):
def __init__(self,server_Conn,client_Conn,serverAddr,clientAddr,clientNum):
threading.Thread.__init__(self)
self.server_Conn = server_Conn
self.client_Conn = client_Conn
self.server_Addr = serverAddr
self.client_Addr = clientAddr
self.clientNum = clientNum
def run(self):
global endflag
transferDataThreads = []
thread = 2
flag = self.clientNum
endflag.append(False)
y = transfer2Server(self.server_Conn,self.client_Conn,self.server_Addr,self.client_Addr,flag)
y.setDaemon(True)
z = transfer2Client(self.client_Conn,self.server_Conn,self.client_Addr,self.server_Addr,flag)
z.setDaemon(True)
transferDataThreads.append(y)
transferDataThreads.append(z)
for t in transferDataThreads:
t.start()
while True:
alive = True
for i in range(int(thread)):
alive = alive and transferDataThreads[i].isAlive()
if not alive:
time.sleep(3)
print "[Link %s] Connection has closed." % self.clientNum
break
break
class transfer2Server(threading.Thread):
def __init__(self,server_Conn,client_Conn,server_Addr,client_Addr,flag):
threading.Thread.__init__(self)
self.server_Conn = server_Conn
self.client_Conn = client_Conn
self.server_Addr = server_Addr
self.client_Conn = client_Conn
self.flag = flag
self.currentNum = self.flag+1
def run(self):
global bufLen
global endflag
servPeerName = self.server_Conn.getpeername()
clientPeerName = self.client_Conn.getpeername()
while True and not endflag[self.flag]:
try:
buf = self.client_Conn.recv(bufLen)
except:
print "Connection reset by peer.Program exit."
for m in endflag:
m = True
sys.exit()
if buf == '' or buf == '__closed__':
time.sleep(2)
self.client_Conn.close()
endflag[self.flag] = True
break
try:
self.server_Conn.send(buf)
print "[Link %s] %s --> %s : %s data" % (self.currentNum,clientPeerName,servPeerName,len(buf))
except:
endflag[self.flag] = True
time.sleep(2)
self.client_Conn.send('__closed__')
self.client_Conn.close()
break
class transfer2Client(threading.Thread):
def __init__(self,client_Conn,server_Conn,client_Addr,server_Addr,flag):
threading.Thread.__init__(self)
self.client_Conn = client_Conn
self.server_Conn = server_Conn
self.client_Addr = client_Addr
self.server_Addr = server_Addr
self.flag = flag
self.currentNum = flag+1
def run(self):
global bufLen
global endflag
servPeerName = self.server_Conn.getpeername()
clientPeerName = self.client_Conn.getpeername()
while True and not endflag[self.flag]:
buf = self.server_Conn.recv(bufLen)
if buf == '':
print "[Link %s] Server %s disconnect.End current thread." % (self.currentNum,clientPeerName)
time.sleep(2)
self.server_Conn.close()
endflag[self.flag] = True
break
try:
self.client_Conn.send(buf)
print "[Link %s] %s --> %s : %s data" % (self.currentNum,servPeerName,clientPeerName,len(buf))
except:
endflag[self.flag] = True
time.sleep(2)
self.server_Conn.close()
break
class ThreadingTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): pass
class Socks5Server(SocketServer.StreamRequestHandler):
def handle_tcp(self, sock, remote):
fdset = [sock, remote]
while True:
r, w, e = select.select(fdset, [], [])
if sock in r:
if remote.send(sock.recv(4096)) <= 0: break
if remote in r:
if sock.send(remote.recv(4096)) <= 0: break
def handle(self):
try:
pass
sock = self.connection
sock.recv(262)
sock.send("\x05\x00");
data = self.rfile.read(4)
mode = ord(data[1])
addrtype = ord(data[3])
if addrtype == 1:
addr = socket.inet_ntoa(self.rfile.read(4))
elif addrtype == 3:
addr = self.rfile.read(ord(sock.recv(1)[0]))
port = struct.unpack('>H', self.rfile.read(2))
reply = "\x05\x00\x00\x01"
try:
if mode == 1:
remote = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remote.connect((addr, port[0]))
pass
else:
reply = "\x05\x07\x00\x01"
local = remote.getsockname()
reply += socket.inet_aton(local[0]) + struct.pack(">H", local[1])
except socket.error:
reply = '\x05\x05\x00\x01\x00\x00\x00\x00\x00\x00'
sock.send(reply)
if reply[1] == '\x00':
if mode == 1:
self.handle_tcp(sock, remote)
except socket.error:
pass
except IndexError:
pass
def socket_bind(socketPort):
socks_port = int(socketPort)
server = ThreadingTCPServer(('', socks_port), Socks5Server)
print 'Socks5 proxy bind port : %d' % socks_port + ' ok!'
server.serve_forever()
def usage():
print """
reprocks_client\t1.0
Code by H.K.T\temail:jlvsjp@qq.com
Thanks to ringzero@557.im for socks5 proxy module!
usage : %s -m 1 <reprocks_server_IP> <reprocks_server_port>
%s -m 2 <transferIP> <transferPort> <reprocks_server_IP> <reprocks_server_port>
%s -m 3 [bind_socket_port]
example:
%s -m 1 123.123.123.123 1230
#Rebind socks5 proxy to reprocks_server.
%s -m 2 127.0.0.1 22 123.123.123.123 1230
#Just port transmit in reconnection method.
%s -m 3 7070
#Just start socks5 proxy.
""" % (sys.argv[0],sys.argv[0],sys.argv[0],sys.argv[0],sys.argv[0],sys.argv[0])
def main():
global socksPort
global endflag
try:
if len(sys.argv)>=3:
if sys.argv[2]=='3':
if len(sys.argv)==4:
socksPort = int(sys.argv[3])
socket_bind(socksPort)
elif sys.argv[2]=='1' and len(sys.argv)==5:
socksProxy = startThreadSoket(socksPort)
socksProxy.setDaemon(True)
socksProxy.start()
reproket('localhost',socksPort,sys.argv[3],sys.argv[4])
elif sys.argv[2]=='2':
if len(sys.argv)==7:
reproket(sys.argv[3],sys.argv[4],sys.argv[5],sys.argv[6])
else:
usage()
else:
usage()
except KeyboardInterrupt:
print "Catch ctrl+c pressed,program will exit."
for m in endflag:
m = True
def reproket(transmitIP,transmitPort,clientIP,clientPort):
serverAddr = (transmitIP,int(transmitPort))
clientAddr = (clientIP,int(clientPort))
serverLink = []
clientLink = []
socketServer = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
socketServer.connect(serverAddr)
socketClient = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
try:
socketClient.connect(clientAddr)
except:
print "Cannot connect to reprocks server.Please run it fisrt or check the network!"
time.sleep(1)
sys.exit()
print "Connect to reprocks server...success!!!"
serverLink.append(socketServer)
clientLink.append(socketClient)
controlThreads = []
clientNum = 0
while True:
try:
newLinkFlag = clientLink[clientNum].recv(bufLen)
except:
print "[link %s] Connection reset by peer,program exit." % (clientNum+1)
break
if newLinkFlag == '__newLink__':
nextClientLink = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
nextClientLink.connect(clientAddr)
print "[Link %s] Make a new connection to reprocks_server ok!" % (clientNum+1)
nextServerLink = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
nextServerLink.connect(serverAddr)
print "[link %s] Make a new connection to socks5 proxy ok!" % (clientNum+1)
temp = control(serverLink[clientNum],clientLink[clientNum],serverAddr,clientAddr,clientNum)
temp.setDaemon(True)
controlThreads.append(temp)
controlThreads[clientNum].start()
clientLink.append(nextClientLink)
serverLink.append(nextServerLink)
clientNum += 1
if __name__ == '__main__':
main()
|
0ps/reprocks
|
client/reprocks_client.py
|
Python
|
mit
| 9,685 | 0.010532 |
import hashlib
import mimetypes
import os
import posixpath
import re
from time import time
from urlparse import urlsplit, urlunsplit
from werkzeug.exceptions import NotFound
from werkzeug.http import is_resource_modified, http_date
from spa.static.handlers import StaticHandler
from spa.utils import clean_path
class HashCache(object):
def __init__(self):
self.path_hashes = {}
self.contents = {}
def get_path_hash(self, path):
return self.path_hashes.get(path)
def set_path_hash(self, path, path_hash):
self.path_hashes[path] = path_hash
def get_contents(self, path):
return self.contents.get(path)
def set_contents(self, path, contents):
self.contents[path] = contents
class CacheBustingStaticHandler(StaticHandler):
css_url_patterns = (
(re.compile(r"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""", re.IGNORECASE),
"""url("{hashed_url}")"""),
(re.compile(r"""(@import\s*["']\s*(.*?)["'])""", re.IGNORECASE),
"""@import url("{hashed_url}")"""),
)
def __init__(self, app, req, params, directory, hash_cache, **kwargs):
self.hash_cache = hash_cache
return super(CacheBustingStaticHandler, self).__init__(
app, req, params, directory, **kwargs
)
def get(self, filepath):
unhashed_path, path_hash = parse_hashed_filepath(filepath)
if unhashed_path is None:
return NotFound()
if self.hash_cache.get_path_hash(unhashed_path) is None:
# compute hash, and cache it.
file = self.get_file(unhashed_path)
if file is None:
return NotFound()
try:
hash_str = get_hash(file.handle)
self.hash_cache.set_path_hash(unhashed_path, hash_str)
finally:
file.handle.close()
# If hash we were passed doesn't equal the one we've computed and
# cached, then 404.
if path_hash != self.hash_cache.get_path_hash(unhashed_path):
return NotFound()
# For CSS stylesheets only, we'll rewrite content so that url()
# functions will point to hashed filenames instead of unhashed. The
# rewritten CSS content will be kept in memory.
if mimetypes.guess_type(filepath)[0] == 'text/css':
return self.make_css_response(unhashed_path)
return super(CacheBustingStaticHandler, self).get(unhashed_path)
def make_css_response(self, filepath):
def resp(environ, start_response):
file = self.get_file(filepath)
try:
headers = [('Date', http_date())]
if self.cache:
timeout = self.cache_timeout
etag = self.generate_etag(file.mtime, file.size, file.name)
headers += [
('Etag', '"%s"' % etag),
('Cache-Control', 'max-age=%d, public' % timeout)
]
if not is_resource_modified(environ, etag, last_modified=file.mtime):
start_response('304 Not Modified', headers)
return []
headers.append(('Expires', http_date(time() + timeout)))
else:
headers.append(('Cache-Control', 'public'))
contents = self.hash_cache.get_contents(filepath)
if contents is None:
contents = file.handle.read()
for pat, tpl in self.css_url_patterns:
converter = self.get_converter(tpl)
contents = pat.sub(converter, contents)
self.hash_cache.set_contents(filepath, contents)
headers.extend((
('Content-Type', file.mimetype),
('Content-Length', len(contents)),
('Last-Modified', http_date(file.mtime))
))
start_response('200 OK', headers)
return [contents]
finally:
file.handle.close()
return resp
def get_converter(self, tpl):
def converter(matchobj):
matched, url = matchobj.groups()
if url.startswith(('#', 'http:', 'https:', 'data:', '//')):
return url
return tpl.format(hashed_url=self.convert_css_url(url))
return converter
def convert_css_url(self, css_url):
split_url = urlsplit(css_url)
url_path = split_url.path
if not url_path.startswith('/'):
abs_url_path = self.make_path_absolute(url_path)
else:
abs_url_path = posixpath.realpath(url_path)
prefix = self.get_url_prefix()
# now make the path as it would be passed in to this handler when
# requested from the web. From there we can use existing methods on the
# class to resolve to a real file.
_, _, content_filepath = abs_url_path.partition(prefix)
content_filepath = clean_path(content_filepath)
content_file_hash = self.hash_cache.get_path_hash(content_filepath)
if content_file_hash is None:
content_file = self.get_file(content_filepath)
if content_file is None:
return 'NOT FOUND: "%s"' % url_path
try:
content_file_hash = get_hash(content_file.handle)
finally:
content_file.handle.close()
parts = list(split_url)
parts[2] = add_hash_to_filepath(url_path, content_file_hash)
url = urlunsplit(parts)
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
if '?#' in css_url:
parts = list(urlsplit(url))
if not parts[3]:
parts[2] += '?'
url = urlunsplit(parts)
return url
def get_url_prefix(self):
"""
Return the mount point for this handler. So if you had a route like
this:
('/foo/bar/static/<path:filepath>', 'foo', Handler)
Then this function should return '/foo/bar/static/'
"""
env = self.request.environ
filepath = self.params['filepath']
prefix, _, _ = (env['SCRIPT_NAME'] +
env['PATH_INFO']).rpartition(filepath)
return prefix
def make_path_absolute(self, path):
"""
Given a relative url found inside the CSS file we're currently serving,
return an absolute form of that URL.
"""
env = self.request.environ
pinfo = posixpath.dirname(env['PATH_INFO'])
return posixpath.realpath(env['SCRIPT_NAME'] + pinfo + '/' + path)
def parse_hashed_filepath(filename, hash_len=12):
"""
Given a name like '/static/my_file.deadbeef1234.txt', return a tuple of the file name
without the hash, and the hash itself, like this:
('/static/my_file.txt', 'deadbeef1234')
If no hash part is found, then return (None, None).
"""
pat = '^(?P<before>.*)\.(?P<hash>[0-9,a-f]{%s})(?P<after>.*?)$' % hash_len
m = re.match(pat, filename)
if m is None:
return None, None
parts = m.groupdict()
return '{before}{after}'.format(**parts), parts['hash']
def add_hash_to_filepath(filepath, hash_str):
path, filename = os.path.split(filepath)
root, ext = os.path.splitext(filename)
return os.path.join(path, "%s.%s%s" % (root, hash_str, ext))
def get_hash(lines, hash_len=12):
md5 = hashlib.md5()
for line in lines:
md5.update(line)
return md5.hexdigest()[:hash_len]
class CacheBuster(object):
"""
A factory for making CacheBustingStaticHandler instances that share a cache
instance.
"""
def __init__(self, directory):
self.directory = directory
self.hash_cache = HashCache()
def __call__(self, app, req, params, **kwargs):
return CacheBustingStaticHandler(app, req, params,
directory=self.directory,
hash_cache=self.hash_cache,
**kwargs)
|
pawpro/spa
|
spa/static/hashed.py
|
Python
|
bsd-3-clause
| 8,271 | 0.000484 |
# All Rights Reserved 2020
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import operator
from unittest import mock
from neutronclient.tests.unit.osc.v2 import fakes as test_fakes
from osc_lib import utils as osc_utils
from osc_lib.utils import columns as column_util
from oslo_utils import uuidutils
from neutron_taas.taas_client.osc import tap_flow as osc_tap_flow
from neutron_taas.tests.unit.taas_client.osc import fakes
columns_long = tuple(col for col, _, listing_mode in osc_tap_flow._attr_map
if listing_mode in (column_util.LIST_BOTH,
column_util.LIST_LONG_ONLY))
headers_long = tuple(head for _, head, listing_mode in
osc_tap_flow._attr_map if listing_mode in
(column_util.LIST_BOTH, column_util.LIST_LONG_ONLY))
sorted_attr_map = sorted(osc_tap_flow._attr_map, key=operator.itemgetter(1))
sorted_columns = tuple(col for col, _, _ in sorted_attr_map)
sorted_headers = tuple(head for _, head, _ in sorted_attr_map)
def _get_data(attrs, columns=sorted_columns):
return osc_utils.get_dict_properties(attrs, columns)
class TestCreateTapService(test_fakes.TestNeutronClientOSCV2):
columns = (
'Direction',
'ID',
'Name',
'Status',
'Tenant',
'source_port',
'tap_service_id',
)
def setUp(self):
super(TestCreateTapService, self).setUp()
self.cmd = osc_tap_flow.CreateTapFlow(self.app, self.namespace)
def test_create_tap_flow(self):
"""Test Create Tap Flow."""
fake_tap_flow = fakes.FakeTapFlow.create_tap_flow(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid()
}
)
self.neutronclient.post = mock.Mock(
return_value={osc_tap_flow.TAP_FLOW: fake_tap_flow})
arg_list = [
'--name', fake_tap_flow['name'],
'--port', fake_tap_flow['source_port'],
'--tap-service', fake_tap_flow['tap_service_id'],
'--direction', fake_tap_flow['direction'],
]
verify_list = [
('name', fake_tap_flow['name']),
('port', fake_tap_flow['source_port']),
('tap_service', fake_tap_flow['tap_service_id']),
]
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
with mock.patch.object(self.neutronclient, 'find_resource') as nc_find:
nc_find.side_effect = [
{'id': fake_tap_flow['source_port']},
{'id': fake_tap_flow['tap_service_id']}
]
columns, data = self.cmd.take_action(parsed_args)
self.neutronclient.post.assert_called_once_with(
'/taas/tap_flows',
body={
osc_tap_flow.TAP_FLOW:
{
'name': fake_tap_flow['name'],
'source_port': fake_tap_flow['source_port'],
'tap_service_id': fake_tap_flow['tap_service_id'],
'direction': fake_tap_flow['direction']
}
}
)
self.assertEqual(self.columns, columns)
self.assertItemEqual(_get_data(fake_tap_flow), data)
class TestListTapFlow(test_fakes.TestNeutronClientOSCV2):
def setUp(self):
super(TestListTapFlow, self).setUp()
self.cmd = osc_tap_flow.ListTapFlow(self.app, self.namespace)
def test_list_tap_flows(self):
"""Test List Tap Flow."""
fake_tap_flows = fakes.FakeTapFlow.create_tap_flows(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid(),
},
count=2)
self.neutronclient.list = mock.Mock(return_value=fake_tap_flows)
arg_list = []
verify_list = []
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
headers, data = self.cmd.take_action(parsed_args)
self.neutronclient.list.assert_called_once()
self.assertEqual(headers, list(headers_long))
self.assertListItemEqual(
list(data),
[_get_data(fake_tap_flow, columns_long) for fake_tap_flow
in fake_tap_flows[osc_tap_flow.TAP_FLOWS]]
)
class TestDeleteTapFlow(test_fakes.TestNeutronClientOSCV2):
def setUp(self):
super(TestDeleteTapFlow, self).setUp()
self.neutronclient.find_resource = mock.Mock(
side_effect=lambda _, name_or_id: {'id': name_or_id})
self.cmd = osc_tap_flow.DeleteTapFlow(self.app, self.namespace)
def test_delete_tap_flow(self):
"""Test Delete tap flow."""
fake_tap_flow = fakes.FakeTapFlow.create_tap_flow(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid(),
}
)
self.neutronclient.delete = mock.Mock()
arg_list = [
fake_tap_flow['id'],
]
verify_list = [
(osc_tap_flow.TAP_FLOW, [fake_tap_flow['id']]),
]
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
result = self.cmd.take_action(parsed_args)
self.neutronclient.delete.assert_called_once_with(
osc_tap_flow.resource_path % ('tap_flows',
fake_tap_flow['id']))
self.assertIsNone(result)
class TestShowTapFlow(test_fakes.TestNeutronClientOSCV2):
def setUp(self):
super(TestShowTapFlow, self).setUp()
self.neutronclient.find_resource = mock.Mock(
side_effect=lambda _, name_or_id: {'id': name_or_id})
self.cmd = osc_tap_flow.ShowTapFlow(self.app, self.namespace)
def test_show_tap_flow(self):
"""Test Show tap flow."""
fake_tap_flow = fakes.FakeTapFlow.create_tap_flow(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid(),
}
)
self.neutronclient.get = mock.Mock(
return_value={osc_tap_flow.TAP_FLOW: fake_tap_flow})
arg_list = [
fake_tap_flow['id'],
]
verify_list = [
(osc_tap_flow.TAP_FLOW, fake_tap_flow['id']),
]
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
headers, data = self.cmd.take_action(parsed_args)
self.neutronclient.get.assert_called_once_with(
osc_tap_flow.resource_path % ('tap_flows',
fake_tap_flow['id']))
self.assertEqual(sorted_headers, headers)
self.assertItemEqual(_get_data(fake_tap_flow), data)
class TestUpdateTapFlow(test_fakes.TestNeutronClientOSCV2):
_new_name = 'new_name'
columns = (
'Direction',
'ID',
'Name',
'Status',
'Tenant',
'source_port',
'tap_service_id',
)
def setUp(self):
super(TestUpdateTapFlow, self).setUp()
self.cmd = osc_tap_flow.UpdateTapFlow(self.app, self.namespace)
self.neutronclient.find_resource = mock.Mock(
side_effect=lambda _, name_or_id: {'id': name_or_id})
def test_update_tap_flow(self):
"""Test update tap service"""
fake_tap_flow = fakes.FakeTapFlow.create_tap_flow(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid(),
}
)
new_tap_flow = copy.deepcopy(fake_tap_flow)
new_tap_flow['name'] = self._new_name
self.neutronclient.put = mock.Mock(
return_value={osc_tap_flow.TAP_FLOW: new_tap_flow})
arg_list = [
fake_tap_flow['id'],
'--name', self._new_name,
]
verify_list = [('name', self._new_name)]
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
columns, data = self.cmd.take_action(parsed_args)
attrs = {'name': self._new_name}
self.neutronclient.put.assert_called_once_with(
osc_tap_flow.resource_path % ('tap_flows',
new_tap_flow['id']),
{osc_tap_flow.TAP_FLOW: attrs})
self.assertEqual(self.columns, columns)
self.assertItemEqual(_get_data(new_tap_flow), data)
|
openstack/tap-as-a-service
|
neutron_taas/tests/unit/taas_client/osc/test_osc_tap_flow.py
|
Python
|
apache-2.0
| 9,088 | 0 |
#!/usr/bin/env python
"""
Process meme.txt files to
generate conservation plots
"""
import argparse
import csv
import sys
import seaborn as sns
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats.stats import pearsonr
from Bio import motifs
def plot_meme_against_phylo(meme_record, phylo):
sns.set(style="darkgrid")
def position_wise_profile(counts_dict, length):
profile = map(dict, zip(*[[(k, v) for v in value] for k, value in counts_dict.items()]))
return profile
def find_max_occurence(profile, max_count=2):
sorted_profile = []
for p in profile:
sorted_profile.append(sorted(p.items(), key=lambda x:x[1]))
for i,p in enumerate(sorted_profile):
sorted_profile[i] = p[-max_count:]
return sorted_profile
def main(argv):
parser = argparse.ArgumentParser(description='Process meme files')
parser.add_argument('-i', '--meme', metavar='<meme_out>', help='Meme input file', required=True)
parser.add_argument('-m', '--motif', metavar='<motif_no>', help='Motif number', required=True, type=int)
parser.add_argument('-c', '--phylo', metavar='<phylo_out>', help='PhyloP conservation scores', required=True)
parsed = parser.parse_args(argv)
handle = open(parsed.meme)
records = motifs.parse(handle, 'meme')
record = records[parsed.motif-1]
phylo_data = csv.reader(open(parsed.phylo,'r'), delimiter='\t')
phylo_scores = []
for line in phylo_data:
phylo_scores.append(float(line[2]))
print "Motif length", record.length
print "phylo length", len(phylo_scores)
profile = position_wise_profile(record.counts, record.length)
max_occur = find_max_occurence(profile, max_count=1)
motif_scores = []
for position in max_occur:
motif_scores.append(position[0][1])
pr = pearsonr(np.array(motif_scores), np.array(phylo_scores))
print 'Pearson correlation: {}'.format(pr)
fig, ax = plt.subplots()
ax= sns.regplot(y=np.array(motif_scores), x=np.array(phylo_scores), scatter=True)
ax.set(ylabel="Count of most freq nucleotide", xlabel="PhyloP scores", title='CTCF | pearsonr = {}, p-val={}'.format(pr[0],pr[1]));
fig.savefig('{}_motif{}_scatter.png'.format(parsed.phylo, parsed.motif))
x = np.linspace(1,len(phylo_scores)+1,num=len(phylo_scores), endpoint=False)
f, (ax1, ax2) = plt.subplots(2, 1)
x1 = sns.barplot(x,y=np.array(motif_scores), ax=ax1)
x2 = sns.barplot(x,y=np.array(phylo_scores), ax=ax2)
x1.set(ylabel='Counts of most freq nucleotide', xlabel='Position in motif')
x2.set(ylabel='Phylop Score', xlabel='Position in motif')
f.tight_layout()
f.savefig('{}_motif{}_trend.png'.format(parsed.phylo, parsed.motif))
if __name__ == "__main__":
main(sys.argv[1:])
|
saketkc/bio-tricks
|
meme_parser/meme_processory.py
|
Python
|
mit
| 2,759 | 0.007249 |
from django.conf.urls import patterns, include, url
from volunteers import views
urlpatterns = patterns('',
#url(r'^$', views.index, name='volunteer_index'),
#url(r'^(?P<volunteer_id>\d+)/$', views.volunteer_detail, name='volunteer_detail'),
#url(r'^AddTasks/$', views.add_tasks, name='add_tasks'),
#url(r'^(?P<volunteer_id>\d+)/edit/$', views.volunteer_edit, name='volunteer_edit'),
)
|
cateee/fosdem-volunteers
|
volunteers/urls.py
|
Python
|
agpl-3.0
| 403 | 0.027295 |
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise, fulfill_after, fulfill_before
class OpenResponsePage(PageObject):
"""
Open-ended response in the courseware.
"""
@property
def name(self):
return "lms.open_response"
@property
def requirejs(self):
return []
@property
def js_globals(self):
return []
def url(self):
"""
Open-response isn't associated with a particular URL.
"""
raise NotImplemented
def is_browser_on_page(self):
return self.is_css_present('section.xmodule_CombinedOpenEndedModule')
@property
def assessment_type(self):
"""
Return the type of assessment currently active.
Options are "self", "ai", or "peer"
"""
labels = self.css_text('section#combined-open-ended-status>div.statusitem-current')
if len(labels) < 1:
self.warning("Could not find assessment type label")
# Provide some tolerance to UI changes
label_compare = labels[0].lower().strip()
if 'self' in label_compare:
return 'self'
elif 'ai' in label_compare:
return 'ai'
elif 'peer' in label_compare:
return 'peer'
else:
raise ValueError("Unexpected assessment type: '{0}'".format(label))
@property
def prompt(self):
"""
Return an HTML string representing the essay prompt.
"""
prompt_css = "section.open-ended-child>div.prompt"
prompts = self.css_map(prompt_css, lambda el: el.html.strip())
if len(prompts) == 0:
self.warning("Could not find essay prompt on page.")
return ""
elif len(prompts) > 1:
self.warning("Multiple essay prompts found on page; using the first one.")
return prompts[0]
@property
def has_rubric(self):
"""
Return a boolean indicating whether the rubric is available.
"""
return self.is_css_present('div.rubric')
@property
def rubric_categories(self):
"""
Return a list of categories available in the essay rubric.
Example:
["Writing Applications", "Language Conventions"]
The rubric is not always visible; if it's not available,
this will return an empty list.
"""
return self.css_text('span.rubric-category')
@property
def rubric_feedback(self):
"""
Return a list of correct/incorrect feedback for each rubric category (e.g. from self-assessment).
Example: ['correct', 'incorrect']
If no feedback is available, returns an empty list.
If feedback could not be interpreted (unexpected CSS class),
the list will contain a `None` item.
"""
# Get the green checkmark / red x labels
# We need to filter out the similar-looking CSS classes
# for the rubric items that are NOT marked correct/incorrect
feedback_css = 'div.rubric-label>label'
labels = filter(
lambda el_class: el_class != 'rubric-elements-info',
self.css_map(feedback_css, lambda el: el['class'])
)
# Map CSS classes on the labels to correct/incorrect
def map_feedback(css_class):
if 'choicegroup_incorrect' in css_class:
return 'incorrect'
elif 'choicegroup_correct' in css_class:
return 'correct'
else:
return None
return map(map_feedback, labels)
@property
def alert_message(self):
"""
Alert message displayed to the user.
"""
alerts = self.css_text("div.open-ended-alert")
if len(alerts) < 1:
return ""
else:
return alerts[0]
@property
def grader_status(self):
"""
Status message from the grader.
If not present, return an empty string.
"""
status_list = self.css_text('div.grader-status')
if len(status_list) < 1:
self.warning("No grader status found")
return ""
elif len(status_list) > 1:
self.warning("Multiple grader statuses found; returning the first one")
return status_list[0]
def set_response(self, response_str):
"""
Input a response to the prompt.
"""
input_css = "textarea.short-form-response"
self.css_fill(input_css, response_str)
def save_response(self):
"""
Save the response for later submission.
"""
status_msg_shown = EmptyPromise(
lambda: 'save' in self.alert_message.lower(),
"Status message saved"
)
with fulfill_after(status_msg_shown):
self.css_click('input.save-button')
def submit_response(self):
"""
Submit a response for grading.
"""
with fulfill_after(self._submitted_promise(self.assessment_type)):
with self.handle_alert():
self.css_click('input.submit-button')
def submit_self_assessment(self, scores):
"""
Submit a self-assessment rubric.
`scores` is a list of scores (0 to max score) for each category in the rubric.
"""
# Warn if we have the wrong number of scores
num_categories = len(self.rubric_categories)
if len(scores) != num_categories:
msg = "Recieved {0} scores but there are {1} rubric categories".format(
len(scores), num_categories
)
self.warning(msg)
# Set the score for each category
for score_index in range(len(scores)):
# Check that we have the enough radio buttons
category_css = "div.rubric>ul.rubric-list:nth-of-type({0})".format(score_index + 1)
if scores[score_index] > self.css_count(category_css + ' input.score-selection'):
msg = "Tried to select score {0} but there are only {1} options".format(score_num, len(inputs))
self.warning(msg)
# Check the radio button at the correct index
else:
input_css = (category_css +
">li.rubric-list-item:nth-of-type({0}) input.score-selection".format(
scores[score_index] + 1)
)
self.css_check(input_css)
# Wait for the button to become enabled
button_css = 'input.submit-button'
button_enabled = EmptyPromise(
lambda: all(self.css_map(button_css, lambda el: not el['disabled'])),
"Submit button enabled"
)
# Submit the assessment
with fulfill_before(button_enabled):
self.css_click(button_css)
def _submitted_promise(self, assessment_type):
"""
Return a `Promise` that the next step is visible after submitting.
This will vary based on the type of assessment.
`assessment_type` is either 'self', 'ai', or 'peer'
"""
if assessment_type == 'self':
return EmptyPromise(lambda: self.has_rubric, "Rubric has appeared")
elif assessment_type == 'ai':
return EmptyPromise(
lambda: self.grader_status != 'Unanswered',
"Problem status is no longer 'unanswered'"
)
elif assessment_type == 'peer':
return EmptyPromise(lambda: False, "Peer assessment not yet implemented")
else:
self.warning("Unrecognized assessment type '{0}'".format(assessment_type))
return EmptyPromise(lambda: True, "Unrecognized assessment type")
|
mjg2203/edx-platform-seas
|
common/test/bok_choy/edxapp_pages/lms/open_response.py
|
Python
|
agpl-3.0
| 7,752 | 0.001806 |
#!/usr/bin/env python
#-*-coding=utf-8-*-
|
rockyzhengwu/mlpractice
|
algorithm/perceptron.py
|
Python
|
mit
| 44 | 0.045455 |
# TODO: use a unit-testing library for asserts
# invoke with:
# ./pox.py --script=tests.topology.topology topology
#
# Maybe there is a less awkward way to invoke tests...
from pox.core import core
from pox.lib.revent import *
topology = core.components['topology']
def autobinds_correctly():
topology.listenTo(core)
return True
if not autobinds_correctly():
raise AssertionError("Did no autobind correctly")
|
sstjohn/pox
|
tests/topology/topology.py
|
Python
|
gpl-3.0
| 423 | 0.014184 |
VERSION = "0.12beta4"
VERSION_NAME = "Anderssen"
|
btrent/knave
|
pychess/__init__.py
|
Python
|
gpl-3.0
| 49 | 0 |
f = raw_input('filename: ')
def make_word_dict(f):
"""
Read the file f and, store each word as dictionary key.
Values are not important.
Return the dictioonary.
"""
c = open(f)
d = dict()
i = 0
for word in c:
w = word.strip('\n\r')
d[w] = i
i += 1
#l.append(w)
c.close()
return d
def rotate_word(s, n):
"""
Rotate each char in a string by the given amount.
Wrap around to the beginning (if necessary).
"""
rotate = ''
for c in s:
start = ord('a')
num = ord(c) - start
r_num = (num + n) % 26 + start
r_c = chr(r_num)
rotate += r_c
return rotate
if __name__ == '__main__':
word_dict = make_word_dict(f)
for k in word_dict:
for i in range(1, 14):
rot_k = rotate_word(k, i)
if rot_k in word_dict:
if not rot_k == k:
print k, 'and', rot_k, 'are a rotate pair'
|
sokolowskik/Tutorials
|
ThinkPython/chap11/ex10.py
|
Python
|
mit
| 980 | 0.006122 |
#
# Vortex OpenSplice
#
# This software and documentation are Copyright 2006 to TO_YEAR ADLINK
# Technology Limited, its affiliated companies and licensors. All rights
# reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
Created on Dec 8, 2017
@author: prismtech
'''
import unittest
import struct
import countTest
if countTest.count_test == False:
import Sequence.basic.module_Sequence
class TestSequence(unittest.TestCase):
def testCopyInCopyOut(self):
data = Sequence.basic.module_Sequence.Sequence_struct(
long1=12,
seq1=[21, 32, 43]
)
print('data: ' + str(data))
print('data._get_packing_fmt(): ', data._get_packing_fmt())
print('data._get_packing_args(): ', data._get_packing_args())
buffer = data._serialize()
print('buffer: ', buffer)
values = struct.unpack(data._get_packing_fmt(), buffer)
data1 = Sequence.basic.module_Sequence.Sequence_struct()
data1._deserialize(list(values))
self.assertEqual(data.long1, data1.long1)
self.assertEqual(data.seq1[0],data1.seq1[0])
self.assertEqual(data.seq1[1],data1.seq1[1])
self.assertEqual(data.seq1[2],data1.seq1[2])
def testCopyInCopyOutEmpty(self):
data = Sequence.basic.module_Sequence.Sequence_struct(
long1=12,
seq1=[]
)
print('data: ' + str(data))
print('data._get_packing_fmt(): ', data._get_packing_fmt())
print('data._get_packing_args(): ', data._get_packing_args())
buffer = data._serialize()
print('buffer: ', buffer)
values = struct.unpack(data._get_packing_fmt(), buffer)
data1 = Sequence.basic.module_Sequence.Sequence_struct()
data1._deserialize(list(values))
self.assertEqual(data.long1, data1.long1)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'TestSequence.testCopyInCopyOut']
unittest.main()
|
PrismTech/opensplice
|
src/api/dcps/python/test/TestSequence.py
|
Python
|
gpl-3.0
| 2,566 | 0.005456 |
import json
import unittest
import mock
from django.http import HttpResponseBadRequest
from base import (assert_auth_CREATE, assert_auth_READ, assert_auth_UPDATE, assert_auth_DELETE,
assert_auth_EXECUTE)
from pulp.server.exceptions import InvalidValue, MissingResource, MissingValue, OperationPostponed
from pulp.server.managers.consumer.group import query
from pulp.server.webservices.views import util
from pulp.server.webservices.views.consumer_groups import (serialize,
ConsumerGroupAssociateActionView,
ConsumerGroupBindingView,
ConsumerGroupBindingsView,
ConsumerGroupContentActionView,
ConsumerGroupResourceView,
ConsumerGroupSearchView,
ConsumerGroupUnassociateActionView,
ConsumerGroupView,)
class TestconsumerGroupView(unittest.TestCase):
"""
Test consumer groups view.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_all_consumer_groups(self, mock_collection, mock_resp):
"""
Test the consumer groups retrieval.
"""
consumer_mock = mock.MagicMock()
resp = [{'id': 'foo', 'display_name': 'bar'}]
consumer_mock.find.return_value = resp
mock_collection.return_value = consumer_mock
request = mock.MagicMock()
consumer_group = ConsumerGroupView()
response = consumer_group.get(request)
expected_cont = [{'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}]
mock_resp.assert_called_once_with(expected_cont)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.generate_redirect_response')
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_create_consumer_group(self, mock_factory, mock_resp, mock_redirect):
"""
Test consumer group creation.
"""
resp = {'id': 'foo', 'display_name': 'bar'}
expected_cont = {'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}
request = mock.MagicMock()
request.body = json.dumps({'id': 'foo', 'display_name': 'bar'})
mock_factory.consumer_group_manager.return_value.create_consumer_group.return_value = resp
consumer_group = ConsumerGroupView()
response = consumer_group.post(request)
mock_resp.assert_called_once_with(expected_cont)
mock_redirect.assert_called_once_with(mock_resp.return_value, expected_cont['_href'])
self.assertTrue(response is mock_redirect.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
def test_create_consumer_group_invalid_param(self):
"""
Test consumer group creation with invalid parameters.
"""
request = mock.MagicMock()
request.body = json.dumps({'id': 'foo', 'display_name': 'bar', 'invalid_param': 'some'})
consumer_group = ConsumerGroupView()
try:
response = consumer_group.post(request)
except InvalidValue, response:
pass
else:
raise AssertionError("Invalidvalue should be raised with invalid options")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['invalid_param'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
def test_create_consumer_group_missing_param(self):
"""
Test consumer group creation with missing required group id.
"""
request = mock.MagicMock()
request.body = json.dumps({'display_name': 'bar'})
consumer_group = ConsumerGroupView()
try:
response = consumer_group.post(request)
except MissingValue, response:
pass
else:
raise AssertionError("MissingValue should be raised with missing options")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['id'])
class TestconsumerGroupResourceView(unittest.TestCase):
"""
Test consumer groups resource view.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_DELETE())
@mock.patch('pulp.server.webservices.views.consumer_groups.generate_json_response')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_delete_consumer_group_resource(self, mock_factory, mock_resp):
"""
Test consumer group delete resource.
"""
mock_group_manager = mock.MagicMock()
mock_factory.consumer_group_manager.return_value = mock_group_manager
mock_group_manager.delete_consumer_group.return_value = None
request = mock.MagicMock()
consumer_group_resource = ConsumerGroupResourceView()
response = consumer_group_resource.delete(request, 'test-group')
mock_group_manager.delete_consumer_group.assert_called_once_with('test-group')
mock_resp.assert_called_once_with(None)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_consumer_group_resource(self, mock_collection, mock_resp):
"""
Test single consumer group retrieval.
"""
consumer_mock = mock.MagicMock()
consumer_mock.find_one.return_value = {'id': 'foo'}
mock_collection.return_value = consumer_mock
request = mock.MagicMock()
consumer_group = ConsumerGroupResourceView()
response = consumer_group.get(request, 'foo')
expected_cont = {'id': 'foo', '_href': '/v2/consumer_groups/foo/'}
mock_resp.assert_called_once_with(expected_cont)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_invalid_consumer_group_resource(self, mock_collection):
"""
Test nonexistent consumer group retrieval.
"""
mock_collection.return_value.find_one.return_value = None
request = mock.MagicMock()
consumer_group = ConsumerGroupResourceView()
try:
response = consumer_group.get(request, 'nonexistent_id')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with nonexistent_group")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'consumer_group': 'nonexistent_id'})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_UPDATE())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_update_consumer_group(self, mock_factory, mock_resp):
"""
Test consumer group update.
"""
resp = {'id': 'foo', 'display_name': 'bar'}
expected_cont = {'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}
request = mock.MagicMock()
request.body = json.dumps({'display_name': 'bar'})
mock_factory.consumer_group_manager.return_value.update_consumer_group.return_value = resp
consumer_group = ConsumerGroupResourceView()
response = consumer_group.put(request, 'foo')
mock_resp.assert_called_once_with(expected_cont)
self.assertTrue(response is mock_resp.return_value)
class TestConsumerGroupSearchView(unittest.TestCase):
"""
Tests for ConsumerGroupSearchView.
"""
def test_class_attributes(self):
"""
Ensure that class attributes are set correctly.
"""
consumer_group_search = ConsumerGroupSearchView()
self.assertTrue(isinstance(consumer_group_search.manager, query.ConsumerGroupQueryManager))
self.assertEqual(consumer_group_search.response_builder,
util.generate_json_response_with_pulp_encoder)
self.assertEqual(consumer_group_search.serializer, serialize)
class TestConsumerGroupAssociateActionView(unittest.TestCase):
"""
Tests consumer group membership.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_EXECUTE())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_cons_group_association_view(self, mock_factory, mock_resp):
"""
Test consumer group associate a consumer.
"""
grp = {'id': 'my-group', 'consumer_ids': ['c1']}
mock_factory.consumer_group_manager.return_value.associate.return_value = 'ok'
mock_factory.consumer_group_query_manager.return_value.get_group.return_value = grp
request = mock.MagicMock()
request.body = json.dumps({'criteria': {'filters': {'id': 'c1'}}})
consumer_group_associate = ConsumerGroupAssociateActionView()
response = consumer_group_associate.post(request, 'my-group')
mock_resp.assert_called_once_with(['c1'])
self.assertTrue(response is mock_resp.return_value)
class TestConsumerGroupUnassociateActionView(unittest.TestCase):
"""
Tests consumer group membership.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_EXECUTE())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_cons_group_unassociation_view(self, mock_factory, mock_resp):
"""
Test consumer group unassociate a consumer.
"""
grp = {'id': 'my-group', 'consumer_ids': []}
mock_factory.consumer_group_manager.return_value.unassociate.return_value = 'ok'
mock_factory.consumer_group_query_manager.return_value.get_group.return_value = grp
request = mock.MagicMock()
request.body = json.dumps({'criteria': {'filters': {'id': 'c1'}}})
consumer_group_unassociate = ConsumerGroupUnassociateActionView()
response = consumer_group_unassociate.post(request, 'my-group')
mock_resp.assert_called_once_with([])
self.assertTrue(response is mock_resp.return_value)
class TestConsumerGroupBindingsView(unittest.TestCase):
"""
Represents consumer group binding.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects')
def test_verify_group_resources_repo(self, mock_repo_qs, mock_dist_qs, mock_factory):
"""
Test verify group resources with repo missing.
"""
mock_factory.consumer_group_query_manager.return_value.get_group.return_value = 'test-group'
mock_repo_qs().first.return_value = None
mock_dist_qs.get_or_404.side_effect = MissingResource
request = mock.MagicMock()
request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except InvalidValue, response:
pass
else:
raise AssertionError("InvalidValue should be raised with nonexistent resources")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['repo_id', 'distributor_id'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects')
def test_verify_group_resources_distributor(self, mock_repo_qs, mock_dist_qs, mock_f):
"""
Test verify group resources with distributor missing.
"""
mock_f.consumer_group_query_manager.return_value.get_group.return_value = 'test'
mock_repo_qs.first.return_value = 'xxx'
mock_dist_qs.get_or_404.side_effect = MissingResource
request = mock.MagicMock()
request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except InvalidValue, response:
pass
else:
raise AssertionError("InvalidValue should be raised with nonexistent resources")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['distributor_id'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_verify_group_resources_group(self, mock_f, mock_dist_qs, mock_repo_qs):
"""
Test verify group resources with group id missing.
"""
mock_f.consumer_group_query_manager.return_value.get_group.side_effect = MissingResource
mock_repo_qs.first.return_value = 'xxx'
mock_dist_qs.get_or_404.return_value = 'yyy'
request = mock.MagicMock()
request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with nonexistent resources")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'group_id': 'test-group'})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.bind')
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_create_binding(self, mock_resources, mock_bind):
"""
Test bind consumer group to a repo.
"""
mock_resources.return_value = {}
request = mock.MagicMock()
request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'})
bind_view = ConsumerGroupBindingsView()
self.assertRaises(OperationPostponed, bind_view.post, request, 'test-group')
bind_args_tuple = ('test-group', 'xxx', 'yyy', True, None, {})
mock_bind.apply_async.assert_called_once_with(bind_args_tuple)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_create_binding_with_missing_group_id(self, mock_resources):
"""
Test bind consumer group to a repo when group id missing.
"""
mock_resources.return_value = {'group_id': 'nonexistent_id'}
request = mock.MagicMock()
request.body = json.dumps({})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'nonexistent_id')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with nonexistent_group")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'group_id': 'nonexistent_id'})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_create_binding_with_missing_repo_id(self, mock_resources):
"""
Test bind consumer group to a repo when repo id is missing.
"""
mock_resources.return_value = {'repo_id': 'nonexistent_id'}
request = mock.MagicMock()
request.body = json.dumps({})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except InvalidValue, response:
pass
else:
raise AssertionError("InvalidValue should be raised with nonexistent_repo")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['repo_id'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_create_binding_with_invalid_param(self, mock_resources):
"""
Test bind consumer group to a repo witn invalid parameters.
"""
mock_resources.return_value = {'invalid_param': 'foo'}
request = mock.MagicMock()
request.body = json.dumps({})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except InvalidValue, response:
pass
else:
raise AssertionError("Invalidvalue should be raised with invalid options")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['invalid_param'])
class TestConsumerGroupBindingView(unittest.TestCase):
"""
Represents a specific consumer group binding.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_DELETE())
@mock.patch('pulp.server.webservices.views.consumer_groups.unbind')
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_delete_binding(self, mock_resources, mock_unbind):
"""
Test consumer group binding removal.
"""
mock_resources.return_value = {}
request = mock.MagicMock()
unbind_view = ConsumerGroupBindingView()
self.assertRaises(OperationPostponed, unbind_view.delete, request,
"consumer_group_id", "repo_id", "distributor_id")
unbind_args_tuple = ("consumer_group_id", "repo_id", "distributor_id", {})
mock_unbind.apply_async.assert_called_once_with(unbind_args_tuple)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_DELETE())
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_delete_non_existent_binding(self, mock_resources):
"""
Test consumer group nonexistent binding removal.
"""
mock_resources.return_value = {'repo_id': 'no_such_repo'}
request = mock.MagicMock()
unbind_view = ConsumerGroupBindingView()
try:
response = unbind_view.delete(request, 'test-group', 'no_such_repo', 'dist_id')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with missing options")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'repo_id': 'no_such_repo'})
class TestConsumerGroupContentActionView(unittest.TestCase):
"""
Test Consumer group content manipulation.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
def test_consumer_group_bad_request_content(self):
"""
Test consumer group invalid content action.
"""
request = mock.MagicMock()
request.body = json.dumps('')
consumer_group_content = ConsumerGroupContentActionView()
response = consumer_group_content.post(request, 'my-group', 'no_such_action')
self.assertTrue(isinstance(response, HttpResponseBadRequest))
self.assertEqual(response.status_code, 400)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_consumer_group_content_install(self, mock_factory):
"""
Test consumer group content installation.
"""
mock_factory.consumer_group_manager.return_value.install_content.return_value = 'ok'
request = mock.MagicMock()
request.body = json.dumps({"units": [], "options": {}})
consumer_group_content = ConsumerGroupContentActionView()
self.assertRaises(OperationPostponed, consumer_group_content.post, request,
'my-group', 'install')
mock_factory.consumer_group_manager().install_content.assert_called_once_with(
'my-group', [], {})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_consumer_group_content_update(self, mock_factory):
"""
Test consumer group content update.
"""
mock_factory.consumer_group_manager.return_value.update_content.return_value = 'ok'
request = mock.MagicMock()
request.body = json.dumps({"units": [], "options": {}})
consumer_group_content = ConsumerGroupContentActionView()
self.assertRaises(OperationPostponed, consumer_group_content.post, request,
'my-group', 'update')
mock_factory.consumer_group_manager().update_content.assert_called_once_with(
'my-group', [], {})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_consumer_group_content_uninstall(self, mock_factory):
"""
Test consumer group content uninstall.
"""
mock_factory.consumer_group_manager.return_value.uninstall_content.return_value = 'ok'
request = mock.MagicMock()
request.body = json.dumps({"units": [], "options": {}})
consumer_group_content = ConsumerGroupContentActionView()
self.assertRaises(OperationPostponed, consumer_group_content.post, request,
'my-group', 'uninstall')
mock_factory.consumer_group_manager().uninstall_content.assert_called_once_with(
'my-group', [], {})
|
ulif/pulp
|
server/test/unit/server/webservices/views/test_consumer_groups.py
|
Python
|
gpl-2.0
| 24,673 | 0.003364 |
#!/usr/bin/env python3
# coding: utf-8
import io
from setuptools import setup, find_packages
# http://blog.ionelmc.ro/2014/05/25/python-packaging/
setup(
name="tfgraph",
version="0.2",
description="Python's Tensorflow Graph Library",
author="garciparedes",
author_email="sergio@garciparedes.me",
url="http://tfgraph.readthedocs.io/en/latest/",
download_url="https://github.com/tfgraph/tfgraph",
keywords=[
"tfg", "bigdata", "tensorflow",
"graph theory", "pagerank", "university of valladolid",
],
python_requires=">=3.5",
install_requires=[
"numpy>=1.11",
"pandas>=0.20",
"tensorflow>=1.0",
],
tests_require=[
"pytest"
],
packages=find_packages(),
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Mathematics",
],
long_description=io.open('README.rst', encoding='utf-8').read(),
include_package_data=True,
zip_safe=False,
)
|
tfgraph/tfgraph
|
setup.py
|
Python
|
apache-2.0
| 1,296 | 0 |
import pyglet
import util
class PhysicalObject(pyglet.sprite.Sprite):
"""A sprite with physical properties such as velocity"""
def __init__(self, *args, **kwargs):
super(PhysicalObject, self).__init__(*args, **kwargs)
# Velocity
self.velocity_x, self.velocity_y = 0.0, 0.0
# Flags to toggle collision with bullets
self.reacts_to_bullets = True
self.is_bullet = False
# Flag to remove this object from the game_object list
self.dead = False
# List of new objects to go in the game_objects list
self.new_objects = []
# Tell the game handler about any event handlers
# Only applies to things with keyboard/mouse input
self.event_handlers = []
def update(self, dt):
"""This method should be called every frame."""
# Update position according to velocity and time
self.x += self.velocity_x * dt
self.y += self.velocity_y * dt
# Wrap around the screen if necessary
self.check_bounds()
def check_bounds(self):
"""Use the classic Asteroids screen wrapping behavior"""
min_x = -self.image.width / 2
min_y = -self.image.height / 2
max_x = 800 + self.image.width / 2
max_y = 600 + self.image.height / 2
if self.x < min_x:
self.x = max_x
if self.y < min_y:
self.y = max_y
if self.x > max_x:
self.x = min_x
if self.y > max_y:
self.y = min_y
def collides_with(self, other_object):
"""Determine if this object collides with another"""
# Ignore bullet collisions if we're supposed to
if not self.reacts_to_bullets and other_object.is_bullet:
return False
if self.is_bullet and not other_object.reacts_to_bullets:
return False
# Calculate distance between object centers that would be a collision,
# assuming square resources
collision_distance = self.image.width * 0.5 * self.scale \
+ other_object.image.width * 0.5 * other_object.scale
# Get distance using position tuples
actual_distance = util.distance(self.position, other_object.position)
return (actual_distance <= collision_distance)
def handle_collision_with(self, other_object):
if other_object.__class__ is not self.__class__:
self.dead = True
|
Akagi201/learning-python
|
pyglet/mygame/game/physicalobject.py
|
Python
|
mit
| 2,456 | 0.000814 |
# -*- coding: utf-8 -*-
from django.core.cache import cache
from django.shortcuts import render
from django.http import Http404
from styleguide.utils import (Styleguide, STYLEGUIDE_DIR_NAME,
STYLEGUIDE_DEBUG, STYLEGUIDE_CACHE_NAME,
STYLEGUIDE_ACCESS)
def index(request, module_name=None, component_name=None):
if not STYLEGUIDE_ACCESS(request.user):
raise Http404()
styleguide = None
if not STYLEGUIDE_DEBUG:
styleguide = cache.get(STYLEGUIDE_CACHE_NAME)
if styleguide is None:
styleguide = Styleguide()
cache.set(STYLEGUIDE_CACHE_NAME, styleguide, None)
if module_name is not None:
styleguide.set_current_module(module_name)
context = {'styleguide': styleguide}
index_path = "%s/index.html" % STYLEGUIDE_DIR_NAME
return render(request, index_path, context)
|
andrefarzat/django-styleguide
|
styleguide/views.py
|
Python
|
mit
| 897 | 0 |
from template.test import TestCase, main
class MacroTest(TestCase):
def testMacro(self):
config = { "INCLUDE_PATH": "test/src",
"TRIM": 1,
"EVAL_PYTHON": True }
self.Expect(DATA, config, self._callsign())
DATA = r"""
-- test --
[% MACRO foo INCLUDE foo -%]
foo: [% foo %]
foo(b): [% foo(a = b) %]
-- expect --
foo: This is the foo file, a is alpha
foo(b): This is the foo file, a is bravo
-- test --
foo: [% foo %].
-- expect --
foo: .
-- test --
[% MACRO foo(a) INCLUDE foo -%]
foo: [% foo %]
foo(c): [% foo(c) %]
-- expect --
foo: This is the foo file, a is
foo(c): This is the foo file, a is charlie
-- test --
[% BLOCK mypage %]
Header
[% content %]
Footer
[% END %]
[%- MACRO content BLOCK -%]
This is a macro which encapsulates a template block.
a: [% a -%]
[% END -%]
begin
[% INCLUDE mypage %]
mid
[% INCLUDE mypage a = 'New Alpha' %]
end
-- expect --
begin
Header
This is a macro which encapsulates a template block.
a: alpha
Footer
mid
Header
This is a macro which encapsulates a template block.
a: New Alpha
Footer
end
-- test --
[% BLOCK table %]
<table>
[% rows %]
</table>
[% END -%]
[% # define some dummy data
udata = [
{ id => 'foo', name => 'Fubar' },
{ id => 'bar', name => 'Babar' }
]
-%]
[% # define a macro to print each row of user data
MACRO user_summary INCLUDE user_row FOREACH user = udata
%]
[% # here's the block for each row
BLOCK user_row %]
<tr>
<td>[% user.id %]</td>
<td>[% user.name %]</td>
</tr>
[% END -%]
[% # now we can call the main table template, and alias our macro to 'rows'
INCLUDE table
rows = user_summary
%]
-- expect --
<table>
<tr>
<td>foo</td>
<td>Fubar</td>
</tr><tr>
<td>bar</td>
<td>Babar</td>
</tr>
</table>
-- test --
[% MACRO one BLOCK -%]
one: [% title %]
[% END -%]
[% saveone = one %]
[% MACRO two BLOCK; title="2[$title]" -%]
two: [% title %] -> [% saveone %]
[% END -%]
[% two(title="The Title") %]
-- expect --
two: 2[The Title] -> one:
-- test --
[% MACRO one BLOCK -%]
one: [% title %]
[% END -%]
[% saveone = \one %]
[% MACRO two BLOCK; title="2[$title]" -%]
two: [% title %] -> [% saveone %]
[% END -%]
[% two(title="The Title") %]
-- expect --
two: 2[The Title] -> one: 2[The Title]
-- test --
-- name number macro --
[% MACRO number(n) GET n.chunk(-3).join(',') -%]
[% number(1234567) %]
-- expect --
1,234,567
-- test --
-- name python macro --
[% MACRO triple(n) PYTHON %]
n = stash.get('n').value()
print n * 3
[% END -%]
[% triple(10) %]
-- expect --
30
"""
main()
|
aifeiasdf/Template-tookit
|
t/macro_test.py
|
Python
|
artistic-2.0
| 2,554 | 0.002741 |
"""add vendor, opportunity, category models
Revision ID: 29562eda8fbc
Revises: 3473ff14af7e
Create Date: 2015-05-28 02:31:47.039725
"""
# revision identifiers, used by Alembic.
revision = '29562eda8fbc'
down_revision = '3473ff14af7e'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('category',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('nigp_code', sa.Integer(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=True),
sa.Column('subcategory', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_category_id'), 'category', ['id'], unique=False)
op.create_table('vendor',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('business_name', sa.String(length=255), nullable=False),
sa.Column('email', sa.String(length=80), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('first_name', sa.String(length=30), nullable=True),
sa.Column('last_name', sa.String(length=30), nullable=True),
sa.Column('phone_number', sa.String(length=20), nullable=True),
sa.Column('fax_number', sa.String(length=20), nullable=True),
sa.Column('minority_owned', sa.Boolean(), nullable=True),
sa.Column('veteran_owned', sa.Boolean(), nullable=True),
sa.Column('woman_owned', sa.Boolean(), nullable=True),
sa.Column('disadvantaged_owned', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
op.create_index(op.f('ix_vendor_id'), 'vendor', ['id'], unique=False)
op.create_table('category_vendor_association',
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('vendor_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['vendor_id'], ['vendor.id'], ondelete='SET NULL')
)
op.create_index(op.f('ix_category_vendor_association_category_id'), 'category_vendor_association', ['category_id'], unique=False)
op.create_index(op.f('ix_category_vendor_association_vendor_id'), 'category_vendor_association', ['vendor_id'], unique=False)
op.create_table('opportunity',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contract_id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('title', sa.String(length=255), nullable=True),
sa.Column('department', sa.String(length=255), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=False),
sa.Column('bid_open', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['contract_id'], ['contract.id'], ondelete='cascade'),
sa.PrimaryKeyConstraint('id')
)
op.add_column('app_status', sa.Column('county_max_deadline', sa.DateTime(), nullable=True))
op.add_column('line_item', sa.Column('percentage', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('opportunity')
op.drop_index(op.f('ix_category_vendor_association_vendor_id'), table_name='category_vendor_association')
op.drop_index(op.f('ix_category_vendor_association_category_id'), table_name='category_vendor_association')
op.drop_table('category_vendor_association')
op.drop_index(op.f('ix_vendor_id'), table_name='vendor')
op.drop_table('vendor')
op.drop_index(op.f('ix_category_id'), table_name='category')
op.drop_table('category')
op.drop_column('line_item', 'percentage')
op.drop_column('app_status', 'county_max_deadline')
### end Alembic commands ###
|
CityofPittsburgh/pittsburgh-purchasing-suite
|
migrations/versions/29562eda8fbc_add_vendor_opportunity_category_models.py
|
Python
|
bsd-3-clause
| 3,927 | 0.012478 |
#!/usr/bin/python3
# Example using a character LCD connected to a Raspberry Pi or BeagleBone Black.
import time
import datetime
import Adafruit_CharLCD as LCD
def file_get_contents(filename):
with open(filename) as f:
return f.read()
# Raspberry Pi pin configuration:
lcd_rs = 24 # Note this might need to be changed to 21 for older revision Pi's.
lcd_en = 23
lcd_d4 = 9
lcd_d5 = 11
lcd_d6 = 10
lcd_d7 = 18
lcd_backlight = 8
# BeagleBone Black configuration:
# lcd_rs = 'P8_8'
# lcd_en = 'P8_10'
# lcd_d4 = 'P8_18'
# lcd_d5 = 'P8_16'
# lcd_d6 = 'P8_14'
# lcd_d7 = 'P8_12'
# lcd_backlight = 'P8_7'
# Define LCD column and row size for 16x2 LCD.
lcd_columns = 16
lcd_rows = 2
# Alternatively specify a 20x4 LCD.
# lcd_columns = 20
# lcd_rows = 4
# Initialize the LCD using the pins above.
lcd = LCD.Adafruit_CharLCD(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6, lcd_d7,
lcd_columns, lcd_rows, lcd_backlight)
datestring = datetime.datetime.now().strftime('%Y-%m-%d %H:%M')
lcd.clear()
lcd.message(file_get_contents("../data/lcd.txt") );
|
mhkyg/OrangePIStuff
|
lcd/lcd_update.py
|
Python
|
mit
| 1,190 | 0.012605 |
#!/usr/bin/env python
# coding: utf-8
#
# Copyright 2010 Alexandre Fiori
# based on the original Tornado by Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cyclone.web
import sys
from twisted.internet import reactor
from twisted.python import log
class MainHandler(cyclone.web.RequestHandler):
def get(self):
self.write("Hello, world")
def main():
log.startLogging(sys.stdout)
application = cyclone.web.Application([
(r"/", MainHandler)
])
reactor.listenTCP(8888, application, interface="127.0.0.1")
reactor.run()
if __name__ == "__main__":
main()
|
shirk3y/cyclone
|
demos/helloworld/helloworld.py
|
Python
|
apache-2.0
| 1,114 | 0 |
#!/usr/bin/env python
'''
xtreme Rat Config Decoder
'''
__description__ = 'xtreme Rat Config Extractor'
__author__ = 'Kevin Breen http://techanarchy.net http://malwareconfig.com'
__version__ = '0.1'
__date__ = '2014/04/10'
#Standard Imports Go Here
import os
import sys
import string
from struct import unpack
from optparse import OptionParser
#Non Standard Imports
try:
import pefile
except ImportError:
print "Couldn't Import pefile. Try 'sudo pip install pefile'"
# Main Decode Function Goes Here
'''
data is a read of the file
Must return a python dict of values
'''
def run(data):
key = "C\x00O\x00N\x00F\x00I\x00G"
codedConfig = configExtract(data)
if codedConfig is not None:
rawConfig = rc4crypt(codedConfig, key)
#1.3.x # Not implemented yet
if len(rawConfig) == 0xe10:
config = None
#2.9.x #Not a stable extract
elif len(rawConfig) == 0x1390 or len(rawConfig) == 0x1392:
config = v29(rawConfig)
#3.1 & 3.2
elif len(rawConfig) == 0x5Cc:
config = v32(rawConfig)
#3.5
elif len(rawConfig) == 0x7f0:
config = v35(rawConfig)
else:
config = None
return config
else:
print '[-] Coded config not found'
sys.exit()
#Helper Functions Go Here
def rc4crypt(data, key): # modified for bad implemented key length
x = 0
box = range(256)
for i in range(256):
x = (x + box[i] + ord(key[i % 6])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = []
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out.append(chr(ord(char) ^ box[(box[x] + box[y]) % 256]))
return ''.join(out)
def configExtract(rawData):
try:
pe = pefile.PE(data=rawData)
try:
rt_string_idx = [
entry.id for entry in
pe.DIRECTORY_ENTRY_RESOURCE.entries].index(pefile.RESOURCE_TYPE['RT_RCDATA'])
except ValueError, e:
return None
except AttributeError, e:
return None
rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx]
for entry in rt_string_directory.directory.entries:
if str(entry.name) == "XTREME":
data_rva = entry.directory.entries[0].data.struct.OffsetToData
size = entry.directory.entries[0].data.struct.Size
data = pe.get_memory_mapped_image()[data_rva:data_rva+size]
return data
except:
return None
def v29(rawConfig):
dict = {}
dict["ID"] = getUnicodeString(rawConfig, 0x9e0)
dict["Group"] = getUnicodeString(rawConfig, 0xa5a)
dict["Version"] = getUnicodeString(rawConfig, 0xf2e) # use this to recalc offsets
dict["Mutex"] = getUnicodeString(rawConfig, 0xfaa)
dict["Install Dir"] = getUnicodeString(rawConfig, 0xb50)
dict["Install Name"] = getUnicodeString(rawConfig, 0xad6)
dict["HKLM"] = getUnicodeString(rawConfig, 0xc4f)
dict["HKCU"] = getUnicodeString(rawConfig, 0xcc8)
dict["Custom Reg Key"] = getUnicodeString(rawConfig, 0xdc0)
dict["Custom Reg Name"] = getUnicodeString(rawConfig, 0xe3a)
dict["Custom Reg Value"] = getUnicodeString(rawConfig, 0xa82)
dict["ActiveX Key"] = getUnicodeString(rawConfig, 0xd42)
dict["Injection"] = getUnicodeString(rawConfig, 0xbd2)
dict["FTP Server"] = getUnicodeString(rawConfig, 0x111c)
dict["FTP UserName"] = getUnicodeString(rawConfig, 0x1210)
dict["FTP Password"] = getUnicodeString(rawConfig, 0x128a)
dict["FTP Folder"] = getUnicodeString(rawConfig, 0x1196)
dict["Domain1"] = str(getUnicodeString(rawConfig, 0x50)+":"+str(unpack("<I",rawConfig[0:4])[0]))
dict["Domain2"] = str(getUnicodeString(rawConfig, 0xca)+":"+str(unpack("<I",rawConfig[4:8])[0]))
dict["Domain3"] = str(getUnicodeString(rawConfig, 0x144)+":"+str(unpack("<I",rawConfig[8:12])[0]))
dict["Domain4"] = str(getUnicodeString(rawConfig, 0x1be)+":"+str(unpack("<I",rawConfig[12:16])[0]))
dict["Domain5"] = str(getUnicodeString(rawConfig, 0x238)+":"+str(unpack("<I",rawConfig[16:20])[0]))
dict["Domain6"] = str(getUnicodeString(rawConfig, 0x2b2)+":"+str(unpack("<I",rawConfig[20:24])[0]))
dict["Domain7"] = str(getUnicodeString(rawConfig, 0x32c)+":"+str(unpack("<I",rawConfig[24:28])[0]))
dict["Domain8"] = str(getUnicodeString(rawConfig, 0x3a6)+":"+str(unpack("<I",rawConfig[28:32])[0]))
dict["Domain9"] = str(getUnicodeString(rawConfig, 0x420)+":"+str(unpack("<I",rawConfig[32:36])[0]))
dict["Domain10"] = str(getUnicodeString(rawConfig, 0x49a)+":"+str(unpack("<I",rawConfig[36:40])[0]))
dict["Domain11"] = str(getUnicodeString(rawConfig, 0x514)+":"+str(unpack("<I",rawConfig[40:44])[0]))
dict["Domain12"] = str(getUnicodeString(rawConfig, 0x58e)+":"+str(unpack("<I",rawConfig[44:48])[0]))
dict["Domain13"] = str(getUnicodeString(rawConfig, 0x608)+":"+str(unpack("<I",rawConfig[48:52])[0]))
dict["Domain14"] = str(getUnicodeString(rawConfig, 0x682)+":"+str(unpack("<I",rawConfig[52:56])[0]))
dict["Domain15"] = str(getUnicodeString(rawConfig, 0x6fc)+":"+str(unpack("<I",rawConfig[56:60])[0]))
dict["Domain16"] = str(getUnicodeString(rawConfig, 0x776)+":"+str(unpack("<I",rawConfig[60:64])[0]))
dict["Domain17"] = str(getUnicodeString(rawConfig, 0x7f0)+":"+str(unpack("<I",rawConfig[64:68])[0]))
dict["Domain18"] = str(getUnicodeString(rawConfig, 0x86a)+":"+str(unpack("<I",rawConfig[68:72])[0]))
dict["Domain19"] = str(getUnicodeString(rawConfig, 0x8e4)+":"+str(unpack("<I",rawConfig[72:76])[0]))
dict["Domain20"] = str(getUnicodeString(rawConfig, 0x95e)+":"+str(unpack("<I",rawConfig[76:80])[0]))
return dict
def v32(rawConfig):
dict = {}
dict["ID"] = getUnicodeString(rawConfig, 0x1b4)
dict["Group"] = getUnicodeString(rawConfig, 0x1ca)
dict["Version"] = getUnicodeString(rawConfig, 0x2bc)
dict["Mutex"] = getUnicodeString(rawConfig, 0x2d4)
dict["Install Dir"] = getUnicodeString(rawConfig, 0x1f8)
dict["Install Name"] = getUnicodeString(rawConfig, 0x1e2)
dict["HKLM"] = getUnicodeString(rawConfig, 0x23a)
dict["HKCU"] = getUnicodeString(rawConfig, 0x250)
dict["ActiveX Key"] = getUnicodeString(rawConfig, 0x266)
dict["Injection"] = getUnicodeString(rawConfig, 0x216)
dict["FTP Server"] = getUnicodeString(rawConfig, 0x35e)
dict["FTP UserName"] = getUnicodeString(rawConfig, 0x402)
dict["FTP Password"] = getUnicodeString(rawConfig, 0x454)
dict["FTP Folder"] = getUnicodeString(rawConfig, 0x3b0)
dict["Domain1"] = str(getUnicodeString(rawConfig, 0x14)+":"+str(unpack("<I",rawConfig[0:4])[0]))
dict["Domain2"] = str(getUnicodeString(rawConfig, 0x66)+":"+str(unpack("<I",rawConfig[4:8])[0]))
dict["Domain3"] = str(getUnicodeString(rawConfig, 0xb8)+":"+str(unpack("<I",rawConfig[8:12])[0]))
dict["Domain4"] = str(getUnicodeString(rawConfig, 0x10a)+":"+str(unpack("<I",rawConfig[12:16])[0]))
dict["Domain5"] = str(getUnicodeString(rawConfig, 0x15c)+":"+str(unpack("<I",rawConfig[16:20])[0]))
dict["Msg Box Title"] = getUnicodeString(rawConfig, 0x50c)
dict["Msg Box Text"] = getUnicodeString(rawConfig, 0x522)
return dict
def v35(rawConfig):
dict = {}
dict["ID"] = getUnicodeString(rawConfig, 0x1b4)
dict["Group"] = getUnicodeString(rawConfig, 0x1ca)
dict["Version"] = getUnicodeString(rawConfig, 0x2d8)
dict["Mutex"] = getUnicodeString(rawConfig, 0x2f0)
dict["Install Dir"] = getUnicodeString(rawConfig, 0x1f8)
dict["Install Name"] = getUnicodeString(rawConfig, 0x1e2)
dict["HKLM"] = getUnicodeString(rawConfig, 0x23a)
dict["HKCU"] = getUnicodeString(rawConfig, 0x250)
dict["ActiveX Key"] = getUnicodeString(rawConfig, 0x266)
dict["Injection"] = getUnicodeString(rawConfig, 0x216)
dict["FTP Server"] = getUnicodeString(rawConfig, 0x380)
dict["FTP UserName"] = getUnicodeString(rawConfig, 0x422)
dict["FTP Password"] = getUnicodeString(rawConfig, 0x476)
dict["FTP Folder"] = getUnicodeString(rawConfig, 0x3d2)
dict["Domain1"] = str(getUnicodeString(rawConfig, 0x14)+":"+str(unpack("<I",rawConfig[0:4])[0]))
dict["Domain2"] = str(getUnicodeString(rawConfig, 0x66)+":"+str(unpack("<I",rawConfig[4:8])[0]))
dict["Domain3"] = str(getUnicodeString(rawConfig, 0xb8)+":"+str(unpack("<I",rawConfig[8:12])[0]))
dict["Domain4"] = str(getUnicodeString(rawConfig, 0x10a)+":"+str(unpack("<I",rawConfig[12:16])[0]))
dict["Domain5"] = str(getUnicodeString(rawConfig, 0x15c)+":"+str(unpack("<I",rawConfig[16:20])[0]))
dict["Msg Box Title"] = getUnicodeString(rawConfig, 0x52c)
dict["Msg Box Text"] = getUnicodeString(rawConfig, 0x542)
return dict
def getString(buf,pos):
out = ""
for c in buf[pos:]:
if ord(c) == 0:
break
out += c
if out == "":
return None
else:
return out
def getUnicodeString(buf,pos):
out = ""
for i in range(len(buf[pos:])):
if not (ord(buf[pos+i]) >= 32 and ord(buf[pos+i]) <= 126) and not (ord(buf[pos+i+1]) >= 32 and ord(buf[pos+i+1]) <= 126):
out += "\x00"
break
out += buf[pos+i]
if out == "":
return None
else:
return out.replace("\x00","")
#Recursive Function Goes Here
# Main
if __name__ == "__main__":
parser = OptionParser(usage='usage: %prog inFile outConfig\n' + __description__, version='%prog ' + __version__)
parser.add_option("-r", "--recursive", action='store_true', default=False, help="Recursive Mode")
(options, args) = parser.parse_args()
# If we dont have args quit with help page
if len(args) > 0:
pass
else:
parser.print_help()
sys.exit()
# if we want a recursive extract run this function
if options.recursive == True:
print "[+] Sorry No Recursive Yet Check Back Soon"
parser.print_help()
sys.exit()
# If not recurisve try to open file
try:
print "[+] Reading file"
fileData = open(args[0], 'rb').read()
except:
print "[+] Couldn't Open File {0}".format(args[0])
#Run the config extraction
print "[+] Searching for Config"
config = run(fileData)
#If we have a config figure out where to dump it out.
if config == None:
print "[+] Config not found"
sys.exit()
#if you gave me two args im going to assume the 2nd arg is where you want to save the file
if len(args) == 2:
print "[+] Writing Config to file {0}".format(args[1])
with open(args[1], 'a') as outFile:
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
outFile.write("Key: {0}\t Value: {1}\n".format(key,clean_value))
# if no seconds arg then assume you want it printing to screen
else:
print "[+] Printing Config to screen"
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
print " [-] Key: {0}\t Value: {1}".format(key,clean_value)
print "[+] End of Config"
|
0x0mar/RATDecoders
|
Xtreme.py
|
Python
|
gpl-3.0
| 10,565 | 0.030951 |
from __future__ import absolute_import
from . import TypeSlots
from .ExprNodes import not_a_constant
import cython
cython.declare(UtilityCode=object, EncodedString=object, BytesLiteral=object,
Nodes=object, ExprNodes=object, PyrexTypes=object, Builtin=object,
UtilNodes=object)
from . import Nodes
from . import ExprNodes
from . import PyrexTypes
from . import Visitor
from . import Builtin
from . import UtilNodes
from . import Options
from .Code import UtilityCode
from .StringEncoding import EncodedString, BytesLiteral
from .Errors import error
from .ParseTreeTransforms import SkipDeclarations
import copy
import codecs
try:
from __builtin__ import reduce
except ImportError:
from functools import reduce
try:
from __builtin__ import basestring
except ImportError:
basestring = str # Python 3
def load_c_utility(name):
return UtilityCode.load_cached(name, "Optimize.c")
def unwrap_coerced_node(node, coercion_nodes=(ExprNodes.CoerceToPyTypeNode, ExprNodes.CoerceFromPyTypeNode)):
if isinstance(node, coercion_nodes):
return node.arg
return node
def unwrap_node(node):
while isinstance(node, UtilNodes.ResultRefNode):
node = node.expression
return node
def is_common_value(a, b):
a = unwrap_node(a)
b = unwrap_node(b)
if isinstance(a, ExprNodes.NameNode) and isinstance(b, ExprNodes.NameNode):
return a.name == b.name
if isinstance(a, ExprNodes.AttributeNode) and isinstance(b, ExprNodes.AttributeNode):
return not a.is_py_attr and is_common_value(a.obj, b.obj) and a.attribute == b.attribute
return False
def filter_none_node(node):
if node is not None and node.constant_result is None:
return None
return node
class IterationTransform(Visitor.EnvTransform):
"""Transform some common for-in loop patterns into efficient C loops:
- for-in-dict loop becomes a while loop calling PyDict_Next()
- for-in-enumerate is replaced by an external counter variable
- for-in-range loop becomes a plain C for loop
"""
def visit_PrimaryCmpNode(self, node):
if node.is_ptr_contains():
# for t in operand2:
# if operand1 == t:
# res = True
# break
# else:
# res = False
pos = node.pos
result_ref = UtilNodes.ResultRefNode(node)
if isinstance(node.operand2, ExprNodes.IndexNode):
base_type = node.operand2.base.type.base_type
else:
base_type = node.operand2.type.base_type
target_handle = UtilNodes.TempHandle(base_type)
target = target_handle.ref(pos)
cmp_node = ExprNodes.PrimaryCmpNode(
pos, operator=u'==', operand1=node.operand1, operand2=target)
if_body = Nodes.StatListNode(
pos,
stats = [Nodes.SingleAssignmentNode(pos, lhs=result_ref, rhs=ExprNodes.BoolNode(pos, value=1)),
Nodes.BreakStatNode(pos)])
if_node = Nodes.IfStatNode(
pos,
if_clauses=[Nodes.IfClauseNode(pos, condition=cmp_node, body=if_body)],
else_clause=None)
for_loop = UtilNodes.TempsBlockNode(
pos,
temps = [target_handle],
body = Nodes.ForInStatNode(
pos,
target=target,
iterator=ExprNodes.IteratorNode(node.operand2.pos, sequence=node.operand2),
body=if_node,
else_clause=Nodes.SingleAssignmentNode(pos, lhs=result_ref, rhs=ExprNodes.BoolNode(pos, value=0))))
for_loop = for_loop.analyse_expressions(self.current_env())
for_loop = self.visit(for_loop)
new_node = UtilNodes.TempResultFromStatNode(result_ref, for_loop)
if node.operator == 'not_in':
new_node = ExprNodes.NotNode(pos, operand=new_node)
return new_node
else:
self.visitchildren(node)
return node
def visit_ForInStatNode(self, node):
self.visitchildren(node)
return self._optimise_for_loop(node, node.iterator.sequence)
def _optimise_for_loop(self, node, iterator, reversed=False):
if iterator.type is Builtin.dict_type:
# like iterating over dict.keys()
if reversed:
# CPython raises an error here: not a sequence
return node
return self._transform_dict_iteration(
node, dict_obj=iterator, method=None, keys=True, values=False)
# C array (slice) iteration?
if iterator.type.is_ptr or iterator.type.is_array:
return self._transform_carray_iteration(node, iterator, reversed=reversed)
if iterator.type is Builtin.bytes_type:
return self._transform_bytes_iteration(node, iterator, reversed=reversed)
if iterator.type is Builtin.unicode_type:
return self._transform_unicode_iteration(node, iterator, reversed=reversed)
# the rest is based on function calls
if not isinstance(iterator, ExprNodes.SimpleCallNode):
return node
if iterator.args is None:
arg_count = iterator.arg_tuple and len(iterator.arg_tuple.args) or 0
else:
arg_count = len(iterator.args)
if arg_count and iterator.self is not None:
arg_count -= 1
function = iterator.function
# dict iteration?
if function.is_attribute and not reversed and not arg_count:
base_obj = iterator.self or function.obj
method = function.attribute
# in Py3, items() is equivalent to Py2's iteritems()
is_safe_iter = self.global_scope().context.language_level >= 3
if not is_safe_iter and method in ('keys', 'values', 'items'):
# try to reduce this to the corresponding .iter*() methods
if isinstance(base_obj, ExprNodes.SimpleCallNode):
inner_function = base_obj.function
if (inner_function.is_name and inner_function.name == 'dict'
and inner_function.entry
and inner_function.entry.is_builtin):
# e.g. dict(something).items() => safe to use .iter*()
is_safe_iter = True
keys = values = False
if method == 'iterkeys' or (is_safe_iter and method == 'keys'):
keys = True
elif method == 'itervalues' or (is_safe_iter and method == 'values'):
values = True
elif method == 'iteritems' or (is_safe_iter and method == 'items'):
keys = values = True
if keys or values:
return self._transform_dict_iteration(
node, base_obj, method, keys, values)
# enumerate/reversed ?
if iterator.self is None and function.is_name and \
function.entry and function.entry.is_builtin:
if function.name == 'enumerate':
if reversed:
# CPython raises an error here: not a sequence
return node
return self._transform_enumerate_iteration(node, iterator)
elif function.name == 'reversed':
if reversed:
# CPython raises an error here: not a sequence
return node
return self._transform_reversed_iteration(node, iterator)
# range() iteration?
if Options.convert_range and node.target.type.is_int:
if iterator.self is None and function.is_name and \
function.entry and function.entry.is_builtin and \
function.name in ('range', 'xrange'):
return self._transform_range_iteration(node, iterator, reversed=reversed)
return node
def _transform_reversed_iteration(self, node, reversed_function):
args = reversed_function.arg_tuple.args
if len(args) == 0:
error(reversed_function.pos,
"reversed() requires an iterable argument")
return node
elif len(args) > 1:
error(reversed_function.pos,
"reversed() takes exactly 1 argument")
return node
arg = args[0]
# reversed(list/tuple) ?
if arg.type in (Builtin.tuple_type, Builtin.list_type):
node.iterator.sequence = arg.as_none_safe_node("'NoneType' object is not iterable")
node.iterator.reversed = True
return node
return self._optimise_for_loop(node, arg, reversed=True)
PyBytes_AS_STRING_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_char_ptr_type, [
PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
])
PyBytes_GET_SIZE_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_py_ssize_t_type, [
PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
])
def _transform_bytes_iteration(self, node, slice_node, reversed=False):
target_type = node.target.type
if not target_type.is_int and target_type is not Builtin.bytes_type:
# bytes iteration returns bytes objects in Py2, but
# integers in Py3
return node
unpack_temp_node = UtilNodes.LetRefNode(
slice_node.as_none_safe_node("'NoneType' is not iterable"))
slice_base_node = ExprNodes.PythonCapiCallNode(
slice_node.pos, "PyBytes_AS_STRING",
self.PyBytes_AS_STRING_func_type,
args = [unpack_temp_node],
is_temp = 0,
)
len_node = ExprNodes.PythonCapiCallNode(
slice_node.pos, "PyBytes_GET_SIZE",
self.PyBytes_GET_SIZE_func_type,
args = [unpack_temp_node],
is_temp = 0,
)
return UtilNodes.LetNode(
unpack_temp_node,
self._transform_carray_iteration(
node,
ExprNodes.SliceIndexNode(
slice_node.pos,
base = slice_base_node,
start = None,
step = None,
stop = len_node,
type = slice_base_node.type,
is_temp = 1,
),
reversed = reversed))
PyUnicode_READ_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_py_ucs4_type, [
PyrexTypes.CFuncTypeArg("kind", PyrexTypes.c_int_type, None),
PyrexTypes.CFuncTypeArg("data", PyrexTypes.c_void_ptr_type, None),
PyrexTypes.CFuncTypeArg("index", PyrexTypes.c_py_ssize_t_type, None)
])
init_unicode_iteration_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_int_type, [
PyrexTypes.CFuncTypeArg("s", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("length", PyrexTypes.c_py_ssize_t_ptr_type, None),
PyrexTypes.CFuncTypeArg("data", PyrexTypes.c_void_ptr_ptr_type, None),
PyrexTypes.CFuncTypeArg("kind", PyrexTypes.c_int_ptr_type, None)
],
exception_value = '-1')
def _transform_unicode_iteration(self, node, slice_node, reversed=False):
if slice_node.is_literal:
# try to reduce to byte iteration for plain Latin-1 strings
try:
bytes_value = BytesLiteral(slice_node.value.encode('latin1'))
except UnicodeEncodeError:
pass
else:
bytes_slice = ExprNodes.SliceIndexNode(
slice_node.pos,
base=ExprNodes.BytesNode(
slice_node.pos, value=bytes_value,
constant_result=bytes_value,
type=PyrexTypes.c_char_ptr_type).coerce_to(
PyrexTypes.c_uchar_ptr_type, self.current_env()),
start=None,
stop=ExprNodes.IntNode(
slice_node.pos, value=str(len(bytes_value)),
constant_result=len(bytes_value),
type=PyrexTypes.c_py_ssize_t_type),
type=Builtin.unicode_type, # hint for Python conversion
)
return self._transform_carray_iteration(node, bytes_slice, reversed)
unpack_temp_node = UtilNodes.LetRefNode(
slice_node.as_none_safe_node("'NoneType' is not iterable"))
start_node = ExprNodes.IntNode(
node.pos, value='0', constant_result=0, type=PyrexTypes.c_py_ssize_t_type)
length_temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
end_node = length_temp.ref(node.pos)
if reversed:
relation1, relation2 = '>', '>='
start_node, end_node = end_node, start_node
else:
relation1, relation2 = '<=', '<'
kind_temp = UtilNodes.TempHandle(PyrexTypes.c_int_type)
data_temp = UtilNodes.TempHandle(PyrexTypes.c_void_ptr_type)
counter_temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
target_value = ExprNodes.PythonCapiCallNode(
slice_node.pos, "__Pyx_PyUnicode_READ",
self.PyUnicode_READ_func_type,
args = [kind_temp.ref(slice_node.pos),
data_temp.ref(slice_node.pos),
counter_temp.ref(node.target.pos)],
is_temp = False,
)
if target_value.type != node.target.type:
target_value = target_value.coerce_to(node.target.type,
self.current_env())
target_assign = Nodes.SingleAssignmentNode(
pos = node.target.pos,
lhs = node.target,
rhs = target_value)
body = Nodes.StatListNode(
node.pos,
stats = [target_assign, node.body])
loop_node = Nodes.ForFromStatNode(
node.pos,
bound1=start_node, relation1=relation1,
target=counter_temp.ref(node.target.pos),
relation2=relation2, bound2=end_node,
step=None, body=body,
else_clause=node.else_clause,
from_range=True)
setup_node = Nodes.ExprStatNode(
node.pos,
expr = ExprNodes.PythonCapiCallNode(
slice_node.pos, "__Pyx_init_unicode_iteration",
self.init_unicode_iteration_func_type,
args = [unpack_temp_node,
ExprNodes.AmpersandNode(slice_node.pos, operand=length_temp.ref(slice_node.pos),
type=PyrexTypes.c_py_ssize_t_ptr_type),
ExprNodes.AmpersandNode(slice_node.pos, operand=data_temp.ref(slice_node.pos),
type=PyrexTypes.c_void_ptr_ptr_type),
ExprNodes.AmpersandNode(slice_node.pos, operand=kind_temp.ref(slice_node.pos),
type=PyrexTypes.c_int_ptr_type),
],
is_temp = True,
result_is_used = False,
utility_code=UtilityCode.load_cached("unicode_iter", "Optimize.c"),
))
return UtilNodes.LetNode(
unpack_temp_node,
UtilNodes.TempsBlockNode(
node.pos, temps=[counter_temp, length_temp, data_temp, kind_temp],
body=Nodes.StatListNode(node.pos, stats=[setup_node, loop_node])))
def _transform_carray_iteration(self, node, slice_node, reversed=False):
neg_step = False
if isinstance(slice_node, ExprNodes.SliceIndexNode):
slice_base = slice_node.base
start = filter_none_node(slice_node.start)
stop = filter_none_node(slice_node.stop)
step = None
if not stop:
if not slice_base.type.is_pyobject:
error(slice_node.pos, "C array iteration requires known end index")
return node
elif isinstance(slice_node, ExprNodes.IndexNode):
assert isinstance(slice_node.index, ExprNodes.SliceNode)
slice_base = slice_node.base
index = slice_node.index
start = filter_none_node(index.start)
stop = filter_none_node(index.stop)
step = filter_none_node(index.step)
if step:
if not isinstance(step.constant_result, (int,long)) \
or step.constant_result == 0 \
or step.constant_result > 0 and not stop \
or step.constant_result < 0 and not start:
if not slice_base.type.is_pyobject:
error(step.pos, "C array iteration requires known step size and end index")
return node
else:
# step sign is handled internally by ForFromStatNode
step_value = step.constant_result
if reversed:
step_value = -step_value
neg_step = step_value < 0
step = ExprNodes.IntNode(step.pos, type=PyrexTypes.c_py_ssize_t_type,
value=str(abs(step_value)),
constant_result=abs(step_value))
elif slice_node.type.is_array:
if slice_node.type.size is None:
error(slice_node.pos, "C array iteration requires known end index")
return node
slice_base = slice_node
start = None
stop = ExprNodes.IntNode(
slice_node.pos, value=str(slice_node.type.size),
type=PyrexTypes.c_py_ssize_t_type, constant_result=slice_node.type.size)
step = None
else:
if not slice_node.type.is_pyobject:
error(slice_node.pos, "C array iteration requires known end index")
return node
if start:
start = start.coerce_to(PyrexTypes.c_py_ssize_t_type, self.current_env())
if stop:
stop = stop.coerce_to(PyrexTypes.c_py_ssize_t_type, self.current_env())
if stop is None:
if neg_step:
stop = ExprNodes.IntNode(
slice_node.pos, value='-1', type=PyrexTypes.c_py_ssize_t_type, constant_result=-1)
else:
error(slice_node.pos, "C array iteration requires known step size and end index")
return node
if reversed:
if not start:
start = ExprNodes.IntNode(slice_node.pos, value="0", constant_result=0,
type=PyrexTypes.c_py_ssize_t_type)
# if step was provided, it was already negated above
start, stop = stop, start
ptr_type = slice_base.type
if ptr_type.is_array:
ptr_type = ptr_type.element_ptr_type()
carray_ptr = slice_base.coerce_to_simple(self.current_env())
if start and start.constant_result != 0:
start_ptr_node = ExprNodes.AddNode(
start.pos,
operand1=carray_ptr,
operator='+',
operand2=start,
type=ptr_type)
else:
start_ptr_node = carray_ptr
if stop and stop.constant_result != 0:
stop_ptr_node = ExprNodes.AddNode(
stop.pos,
operand1=ExprNodes.CloneNode(carray_ptr),
operator='+',
operand2=stop,
type=ptr_type
).coerce_to_simple(self.current_env())
else:
stop_ptr_node = ExprNodes.CloneNode(carray_ptr)
counter = UtilNodes.TempHandle(ptr_type)
counter_temp = counter.ref(node.target.pos)
if slice_base.type.is_string and node.target.type.is_pyobject:
# special case: char* -> bytes/unicode
if slice_node.type is Builtin.unicode_type:
target_value = ExprNodes.CastNode(
ExprNodes.DereferenceNode(
node.target.pos, operand=counter_temp,
type=ptr_type.base_type),
PyrexTypes.c_py_ucs4_type).coerce_to(
node.target.type, self.current_env())
else:
# char* -> bytes coercion requires slicing, not indexing
target_value = ExprNodes.SliceIndexNode(
node.target.pos,
start=ExprNodes.IntNode(node.target.pos, value='0',
constant_result=0,
type=PyrexTypes.c_int_type),
stop=ExprNodes.IntNode(node.target.pos, value='1',
constant_result=1,
type=PyrexTypes.c_int_type),
base=counter_temp,
type=Builtin.bytes_type,
is_temp=1)
elif node.target.type.is_ptr and not node.target.type.assignable_from(ptr_type.base_type):
# Allow iteration with pointer target to avoid copy.
target_value = counter_temp
else:
# TODO: can this safely be replaced with DereferenceNode() as above?
target_value = ExprNodes.IndexNode(
node.target.pos,
index=ExprNodes.IntNode(node.target.pos, value='0',
constant_result=0,
type=PyrexTypes.c_int_type),
base=counter_temp,
is_buffer_access=False,
type=ptr_type.base_type)
if target_value.type != node.target.type:
target_value = target_value.coerce_to(node.target.type,
self.current_env())
target_assign = Nodes.SingleAssignmentNode(
pos = node.target.pos,
lhs = node.target,
rhs = target_value)
body = Nodes.StatListNode(
node.pos,
stats = [target_assign, node.body])
relation1, relation2 = self._find_for_from_node_relations(neg_step, reversed)
for_node = Nodes.ForFromStatNode(
node.pos,
bound1=start_ptr_node, relation1=relation1,
target=counter_temp,
relation2=relation2, bound2=stop_ptr_node,
step=step, body=body,
else_clause=node.else_clause,
from_range=True)
return UtilNodes.TempsBlockNode(
node.pos, temps=[counter],
body=for_node)
def _transform_enumerate_iteration(self, node, enumerate_function):
args = enumerate_function.arg_tuple.args
if len(args) == 0:
error(enumerate_function.pos,
"enumerate() requires an iterable argument")
return node
elif len(args) > 2:
error(enumerate_function.pos,
"enumerate() takes at most 2 arguments")
return node
if not node.target.is_sequence_constructor:
# leave this untouched for now
return node
targets = node.target.args
if len(targets) != 2:
# leave this untouched for now
return node
enumerate_target, iterable_target = targets
counter_type = enumerate_target.type
if not counter_type.is_pyobject and not counter_type.is_int:
# nothing we can do here, I guess
return node
if len(args) == 2:
start = unwrap_coerced_node(args[1]).coerce_to(counter_type, self.current_env())
else:
start = ExprNodes.IntNode(enumerate_function.pos,
value='0',
type=counter_type,
constant_result=0)
temp = UtilNodes.LetRefNode(start)
inc_expression = ExprNodes.AddNode(
enumerate_function.pos,
operand1 = temp,
operand2 = ExprNodes.IntNode(node.pos, value='1',
type=counter_type,
constant_result=1),
operator = '+',
type = counter_type,
#inplace = True, # not worth using in-place operation for Py ints
is_temp = counter_type.is_pyobject
)
loop_body = [
Nodes.SingleAssignmentNode(
pos = enumerate_target.pos,
lhs = enumerate_target,
rhs = temp),
Nodes.SingleAssignmentNode(
pos = enumerate_target.pos,
lhs = temp,
rhs = inc_expression)
]
if isinstance(node.body, Nodes.StatListNode):
node.body.stats = loop_body + node.body.stats
else:
loop_body.append(node.body)
node.body = Nodes.StatListNode(
node.body.pos,
stats = loop_body)
node.target = iterable_target
node.item = node.item.coerce_to(iterable_target.type, self.current_env())
node.iterator.sequence = args[0]
# recurse into loop to check for further optimisations
return UtilNodes.LetNode(temp, self._optimise_for_loop(node, node.iterator.sequence))
def _find_for_from_node_relations(self, neg_step_value, reversed):
if reversed:
if neg_step_value:
return '<', '<='
else:
return '>', '>='
else:
if neg_step_value:
return '>=', '>'
else:
return '<=', '<'
def _transform_range_iteration(self, node, range_function, reversed=False):
args = range_function.arg_tuple.args
if len(args) < 3:
step_pos = range_function.pos
step_value = 1
step = ExprNodes.IntNode(step_pos, value='1', constant_result=1)
else:
step = args[2]
step_pos = step.pos
if not isinstance(step.constant_result, (int, long)):
# cannot determine step direction
return node
step_value = step.constant_result
if step_value == 0:
# will lead to an error elsewhere
return node
step = ExprNodes.IntNode(step_pos, value=str(step_value),
constant_result=step_value)
if len(args) == 1:
bound1 = ExprNodes.IntNode(range_function.pos, value='0',
constant_result=0)
bound2 = args[0].coerce_to_integer(self.current_env())
else:
bound1 = args[0].coerce_to_integer(self.current_env())
bound2 = args[1].coerce_to_integer(self.current_env())
relation1, relation2 = self._find_for_from_node_relations(step_value < 0, reversed)
bound2_ref_node = None
if reversed:
bound1, bound2 = bound2, bound1
abs_step = abs(step_value)
if abs_step != 1:
if (isinstance(bound1.constant_result, (int, long)) and
isinstance(bound2.constant_result, (int, long))):
# calculate final bounds now
if step_value < 0:
begin_value = bound2.constant_result
end_value = bound1.constant_result
bound1_value = begin_value - abs_step * ((begin_value - end_value - 1) // abs_step) - 1
else:
begin_value = bound1.constant_result
end_value = bound2.constant_result
bound1_value = end_value + abs_step * ((begin_value - end_value - 1) // abs_step) + 1
bound1 = ExprNodes.IntNode(
bound1.pos, value=str(bound1_value), constant_result=bound1_value,
type=PyrexTypes.spanning_type(bound1.type, bound2.type))
else:
# evaluate the same expression as above at runtime
bound2_ref_node = UtilNodes.LetRefNode(bound2)
spanning_type = PyrexTypes.spanning_type(bound1.type, bound2.type)
spanning_step_type = PyrexTypes.spanning_type(spanning_type, step.type)
if step_value < 0:
begin_value = bound2_ref_node
end_value = bound1
final_op = '-'
else:
begin_value = bound1
end_value = bound2_ref_node
final_op = '+'
bound1 = ExprNodes.binop_node(
bound1.pos,
operand1=ExprNodes.binop_node(
bound1.pos,
operand1=bound2_ref_node,
operator=final_op, # +/-
operand2=ExprNodes.MulNode(
bound1.pos,
operand1=ExprNodes.IntNode(
bound1.pos,
value=str(abs_step),
constant_value=abs_step,
type=spanning_step_type),
operator='*',
operand2=ExprNodes.DivNode(
bound1.pos,
operand1=ExprNodes.SubNode(
bound1.pos,
operand1=ExprNodes.SubNode(
bound1.pos,
operand1=begin_value,
operator='-',
operand2=end_value,
type=spanning_type),
operator='-',
operand2=ExprNodes.IntNode(
bound1.pos,
value='1',
constant_result=1),
type=spanning_step_type),
operator='//',
operand2=ExprNodes.IntNode(
bound1.pos,
value=str(abs_step),
constant_value=abs_step,
type=spanning_step_type),
type=spanning_step_type),
type=spanning_step_type),
type=spanning_step_type),
operator=final_op, # +/-
operand2=ExprNodes.IntNode(
bound1.pos,
value='1',
constant_result=1),
type=spanning_type)
if step_value < 0:
step_value = -step_value
step.value = str(step_value)
step.constant_result = step_value
step = step.coerce_to_integer(self.current_env())
if not bound2.is_literal:
# stop bound must be immutable => keep it in a temp var
bound2_is_temp = True
bound2 = bound2_ref_node or UtilNodes.LetRefNode(bound2)
else:
bound2_is_temp = False
for_node = Nodes.ForFromStatNode(
node.pos,
target=node.target,
bound1=bound1, relation1=relation1,
relation2=relation2, bound2=bound2,
step=step, body=node.body,
else_clause=node.else_clause,
from_range=True)
if bound2_is_temp:
for_node = UtilNodes.LetNode(bound2, for_node)
return for_node
def _transform_dict_iteration(self, node, dict_obj, method, keys, values):
temps = []
temp = UtilNodes.TempHandle(PyrexTypes.py_object_type)
temps.append(temp)
dict_temp = temp.ref(dict_obj.pos)
temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
temps.append(temp)
pos_temp = temp.ref(node.pos)
key_target = value_target = tuple_target = None
if keys and values:
if node.target.is_sequence_constructor:
if len(node.target.args) == 2:
key_target, value_target = node.target.args
else:
# unusual case that may or may not lead to an error
return node
else:
tuple_target = node.target
elif keys:
key_target = node.target
else:
value_target = node.target
if isinstance(node.body, Nodes.StatListNode):
body = node.body
else:
body = Nodes.StatListNode(pos = node.body.pos,
stats = [node.body])
# keep original length to guard against dict modification
dict_len_temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
temps.append(dict_len_temp)
dict_len_temp_addr = ExprNodes.AmpersandNode(
node.pos, operand=dict_len_temp.ref(dict_obj.pos),
type=PyrexTypes.c_ptr_type(dict_len_temp.type))
temp = UtilNodes.TempHandle(PyrexTypes.c_int_type)
temps.append(temp)
is_dict_temp = temp.ref(node.pos)
is_dict_temp_addr = ExprNodes.AmpersandNode(
node.pos, operand=is_dict_temp,
type=PyrexTypes.c_ptr_type(temp.type))
iter_next_node = Nodes.DictIterationNextNode(
dict_temp, dict_len_temp.ref(dict_obj.pos), pos_temp,
key_target, value_target, tuple_target,
is_dict_temp)
iter_next_node = iter_next_node.analyse_expressions(self.current_env())
body.stats[0:0] = [iter_next_node]
if method:
method_node = ExprNodes.StringNode(
dict_obj.pos, is_identifier=True, value=method)
dict_obj = dict_obj.as_none_safe_node(
"'NoneType' object has no attribute '%s'",
error = "PyExc_AttributeError",
format_args = [method])
else:
method_node = ExprNodes.NullNode(dict_obj.pos)
dict_obj = dict_obj.as_none_safe_node("'NoneType' object is not iterable")
def flag_node(value):
value = value and 1 or 0
return ExprNodes.IntNode(node.pos, value=str(value), constant_result=value)
result_code = [
Nodes.SingleAssignmentNode(
node.pos,
lhs = pos_temp,
rhs = ExprNodes.IntNode(node.pos, value='0',
constant_result=0)),
Nodes.SingleAssignmentNode(
dict_obj.pos,
lhs = dict_temp,
rhs = ExprNodes.PythonCapiCallNode(
dict_obj.pos,
"__Pyx_dict_iterator",
self.PyDict_Iterator_func_type,
utility_code = UtilityCode.load_cached("dict_iter", "Optimize.c"),
args = [dict_obj, flag_node(dict_obj.type is Builtin.dict_type),
method_node, dict_len_temp_addr, is_dict_temp_addr,
],
is_temp=True,
)),
Nodes.WhileStatNode(
node.pos,
condition = None,
body = body,
else_clause = node.else_clause
)
]
return UtilNodes.TempsBlockNode(
node.pos, temps=temps,
body=Nodes.StatListNode(
node.pos,
stats = result_code
))
PyDict_Iterator_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("dict", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("is_dict", PyrexTypes.c_int_type, None),
PyrexTypes.CFuncTypeArg("method_name", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("p_orig_length", PyrexTypes.c_py_ssize_t_ptr_type, None),
PyrexTypes.CFuncTypeArg("p_is_dict", PyrexTypes.c_int_ptr_type, None),
])
class SwitchTransform(Visitor.EnvTransform):
"""
This transformation tries to turn long if statements into C switch statements.
The requirement is that every clause be an (or of) var == value, where the var
is common among all clauses and both var and value are ints.
"""
NO_MATCH = (None, None, None)
def extract_conditions(self, cond, allow_not_in):
while True:
if isinstance(cond, (ExprNodes.CoerceToTempNode,
ExprNodes.CoerceToBooleanNode)):
cond = cond.arg
elif isinstance(cond, ExprNodes.BoolBinopResultNode):
cond = cond.arg.arg
elif isinstance(cond, UtilNodes.EvalWithTempExprNode):
# this is what we get from the FlattenInListTransform
cond = cond.subexpression
elif isinstance(cond, ExprNodes.TypecastNode):
cond = cond.operand
else:
break
if isinstance(cond, ExprNodes.PrimaryCmpNode):
if cond.cascade is not None:
return self.NO_MATCH
elif cond.is_c_string_contains() and \
isinstance(cond.operand2, (ExprNodes.UnicodeNode, ExprNodes.BytesNode)):
not_in = cond.operator == 'not_in'
if not_in and not allow_not_in:
return self.NO_MATCH
if isinstance(cond.operand2, ExprNodes.UnicodeNode) and \
cond.operand2.contains_surrogates():
# dealing with surrogates leads to different
# behaviour on wide and narrow Unicode
# platforms => refuse to optimise this case
return self.NO_MATCH
return not_in, cond.operand1, self.extract_in_string_conditions(cond.operand2)
elif not cond.is_python_comparison():
if cond.operator == '==':
not_in = False
elif allow_not_in and cond.operator == '!=':
not_in = True
else:
return self.NO_MATCH
# this looks somewhat silly, but it does the right
# checks for NameNode and AttributeNode
if is_common_value(cond.operand1, cond.operand1):
if cond.operand2.is_literal:
return not_in, cond.operand1, [cond.operand2]
elif getattr(cond.operand2, 'entry', None) \
and cond.operand2.entry.is_const:
return not_in, cond.operand1, [cond.operand2]
if is_common_value(cond.operand2, cond.operand2):
if cond.operand1.is_literal:
return not_in, cond.operand2, [cond.operand1]
elif getattr(cond.operand1, 'entry', None) \
and cond.operand1.entry.is_const:
return not_in, cond.operand2, [cond.operand1]
elif isinstance(cond, ExprNodes.BoolBinopNode):
if cond.operator == 'or' or (allow_not_in and cond.operator == 'and'):
allow_not_in = (cond.operator == 'and')
not_in_1, t1, c1 = self.extract_conditions(cond.operand1, allow_not_in)
not_in_2, t2, c2 = self.extract_conditions(cond.operand2, allow_not_in)
if t1 is not None and not_in_1 == not_in_2 and is_common_value(t1, t2):
if (not not_in_1) or allow_not_in:
return not_in_1, t1, c1+c2
return self.NO_MATCH
def extract_in_string_conditions(self, string_literal):
if isinstance(string_literal, ExprNodes.UnicodeNode):
charvals = list(map(ord, set(string_literal.value)))
charvals.sort()
return [ ExprNodes.IntNode(string_literal.pos, value=str(charval),
constant_result=charval)
for charval in charvals ]
else:
# this is a bit tricky as Py3's bytes type returns
# integers on iteration, whereas Py2 returns 1-char byte
# strings
characters = string_literal.value
characters = list(set([ characters[i:i+1] for i in range(len(characters)) ]))
characters.sort()
return [ ExprNodes.CharNode(string_literal.pos, value=charval,
constant_result=charval)
for charval in characters ]
def extract_common_conditions(self, common_var, condition, allow_not_in):
not_in, var, conditions = self.extract_conditions(condition, allow_not_in)
if var is None:
return self.NO_MATCH
elif common_var is not None and not is_common_value(var, common_var):
return self.NO_MATCH
elif not (var.type.is_int or var.type.is_enum) or sum([not (cond.type.is_int or cond.type.is_enum) for cond in conditions]):
return self.NO_MATCH
return not_in, var, conditions
def has_duplicate_values(self, condition_values):
# duplicated values don't work in a switch statement
seen = set()
for value in condition_values:
if value.has_constant_result():
if value.constant_result in seen:
return True
seen.add(value.constant_result)
else:
# this isn't completely safe as we don't know the
# final C value, but this is about the best we can do
try:
if value.entry.cname in seen:
return True
except AttributeError:
return True # play safe
seen.add(value.entry.cname)
return False
def visit_IfStatNode(self, node):
if not self.current_directives.get('optimize.use_switch'):
self.visitchildren(node)
return node
common_var = None
cases = []
for if_clause in node.if_clauses:
_, common_var, conditions = self.extract_common_conditions(
common_var, if_clause.condition, False)
if common_var is None:
self.visitchildren(node)
return node
cases.append(Nodes.SwitchCaseNode(pos = if_clause.pos,
conditions = conditions,
body = if_clause.body))
condition_values = [
cond for case in cases for cond in case.conditions]
if len(condition_values) < 2:
self.visitchildren(node)
return node
if self.has_duplicate_values(condition_values):
self.visitchildren(node)
return node
common_var = unwrap_node(common_var)
switch_node = Nodes.SwitchStatNode(pos = node.pos,
test = common_var,
cases = cases,
else_clause = node.else_clause)
return switch_node
def visit_CondExprNode(self, node):
if not self.current_directives.get('optimize.use_switch'):
self.visitchildren(node)
return node
not_in, common_var, conditions = self.extract_common_conditions(
None, node.test, True)
if common_var is None \
or len(conditions) < 2 \
or self.has_duplicate_values(conditions):
self.visitchildren(node)
return node
return self.build_simple_switch_statement(
node, common_var, conditions, not_in,
node.true_val, node.false_val)
def visit_BoolBinopNode(self, node):
if not self.current_directives.get('optimize.use_switch'):
self.visitchildren(node)
return node
not_in, common_var, conditions = self.extract_common_conditions(
None, node, True)
if common_var is None \
or len(conditions) < 2 \
or self.has_duplicate_values(conditions):
self.visitchildren(node)
node.wrap_operands(self.current_env()) # in case we changed the operands
return node
return self.build_simple_switch_statement(
node, common_var, conditions, not_in,
ExprNodes.BoolNode(node.pos, value=True, constant_result=True),
ExprNodes.BoolNode(node.pos, value=False, constant_result=False))
def visit_PrimaryCmpNode(self, node):
if not self.current_directives.get('optimize.use_switch'):
self.visitchildren(node)
return node
not_in, common_var, conditions = self.extract_common_conditions(
None, node, True)
if common_var is None \
or len(conditions) < 2 \
or self.has_duplicate_values(conditions):
self.visitchildren(node)
return node
return self.build_simple_switch_statement(
node, common_var, conditions, not_in,
ExprNodes.BoolNode(node.pos, value=True, constant_result=True),
ExprNodes.BoolNode(node.pos, value=False, constant_result=False))
def build_simple_switch_statement(self, node, common_var, conditions,
not_in, true_val, false_val):
result_ref = UtilNodes.ResultRefNode(node)
true_body = Nodes.SingleAssignmentNode(
node.pos,
lhs=result_ref,
rhs=true_val.coerce_to(node.type, self.current_env()),
first=True)
false_body = Nodes.SingleAssignmentNode(
node.pos,
lhs=result_ref,
rhs=false_val.coerce_to(node.type, self.current_env()),
first=True)
if not_in:
true_body, false_body = false_body, true_body
cases = [Nodes.SwitchCaseNode(pos = node.pos,
conditions = conditions,
body = true_body)]
common_var = unwrap_node(common_var)
switch_node = Nodes.SwitchStatNode(pos = node.pos,
test = common_var,
cases = cases,
else_clause = false_body)
replacement = UtilNodes.TempResultFromStatNode(result_ref, switch_node)
return replacement
def visit_EvalWithTempExprNode(self, node):
if not self.current_directives.get('optimize.use_switch'):
self.visitchildren(node)
return node
# drop unused expression temp from FlattenInListTransform
orig_expr = node.subexpression
temp_ref = node.lazy_temp
self.visitchildren(node)
if node.subexpression is not orig_expr:
# node was restructured => check if temp is still used
if not Visitor.tree_contains(node.subexpression, temp_ref):
return node.subexpression
return node
visit_Node = Visitor.VisitorTransform.recurse_to_children
class FlattenInListTransform(Visitor.VisitorTransform, SkipDeclarations):
"""
This transformation flattens "x in [val1, ..., valn]" into a sequential list
of comparisons.
"""
def visit_PrimaryCmpNode(self, node):
self.visitchildren(node)
if node.cascade is not None:
return node
elif node.operator == 'in':
conjunction = 'or'
eq_or_neq = '=='
elif node.operator == 'not_in':
conjunction = 'and'
eq_or_neq = '!='
else:
return node
if not isinstance(node.operand2, (ExprNodes.TupleNode,
ExprNodes.ListNode,
ExprNodes.SetNode)):
return node
args = node.operand2.args
if len(args) == 0:
# note: lhs may have side effects
return node
lhs = UtilNodes.ResultRefNode(node.operand1)
conds = []
temps = []
for arg in args:
try:
# Trial optimisation to avoid redundant temp
# assignments. However, since is_simple() is meant to
# be called after type analysis, we ignore any errors
# and just play safe in that case.
is_simple_arg = arg.is_simple()
except Exception:
is_simple_arg = False
if not is_simple_arg:
# must evaluate all non-simple RHS before doing the comparisons
arg = UtilNodes.LetRefNode(arg)
temps.append(arg)
cond = ExprNodes.PrimaryCmpNode(
pos = node.pos,
operand1 = lhs,
operator = eq_or_neq,
operand2 = arg,
cascade = None)
conds.append(ExprNodes.TypecastNode(
pos = node.pos,
operand = cond,
type = PyrexTypes.c_bint_type))
def concat(left, right):
return ExprNodes.BoolBinopNode(
pos = node.pos,
operator = conjunction,
operand1 = left,
operand2 = right)
condition = reduce(concat, conds)
new_node = UtilNodes.EvalWithTempExprNode(lhs, condition)
for temp in temps[::-1]:
new_node = UtilNodes.EvalWithTempExprNode(temp, new_node)
return new_node
visit_Node = Visitor.VisitorTransform.recurse_to_children
class DropRefcountingTransform(Visitor.VisitorTransform):
"""Drop ref-counting in safe places.
"""
visit_Node = Visitor.VisitorTransform.recurse_to_children
def visit_ParallelAssignmentNode(self, node):
"""
Parallel swap assignments like 'a,b = b,a' are safe.
"""
left_names, right_names = [], []
left_indices, right_indices = [], []
temps = []
for stat in node.stats:
if isinstance(stat, Nodes.SingleAssignmentNode):
if not self._extract_operand(stat.lhs, left_names,
left_indices, temps):
return node
if not self._extract_operand(stat.rhs, right_names,
right_indices, temps):
return node
elif isinstance(stat, Nodes.CascadedAssignmentNode):
# FIXME
return node
else:
return node
if left_names or right_names:
# lhs/rhs names must be a non-redundant permutation
lnames = [ path for path, n in left_names ]
rnames = [ path for path, n in right_names ]
if set(lnames) != set(rnames):
return node
if len(set(lnames)) != len(right_names):
return node
if left_indices or right_indices:
# base name and index of index nodes must be a
# non-redundant permutation
lindices = []
for lhs_node in left_indices:
index_id = self._extract_index_id(lhs_node)
if not index_id:
return node
lindices.append(index_id)
rindices = []
for rhs_node in right_indices:
index_id = self._extract_index_id(rhs_node)
if not index_id:
return node
rindices.append(index_id)
if set(lindices) != set(rindices):
return node
if len(set(lindices)) != len(right_indices):
return node
# really supporting IndexNode requires support in
# __Pyx_GetItemInt(), so let's stop short for now
return node
temp_args = [t.arg for t in temps]
for temp in temps:
temp.use_managed_ref = False
for _, name_node in left_names + right_names:
if name_node not in temp_args:
name_node.use_managed_ref = False
for index_node in left_indices + right_indices:
index_node.use_managed_ref = False
return node
def _extract_operand(self, node, names, indices, temps):
node = unwrap_node(node)
if not node.type.is_pyobject:
return False
if isinstance(node, ExprNodes.CoerceToTempNode):
temps.append(node)
node = node.arg
name_path = []
obj_node = node
while isinstance(obj_node, ExprNodes.AttributeNode):
if obj_node.is_py_attr:
return False
name_path.append(obj_node.member)
obj_node = obj_node.obj
if isinstance(obj_node, ExprNodes.NameNode):
name_path.append(obj_node.name)
names.append( ('.'.join(name_path[::-1]), node) )
elif isinstance(node, ExprNodes.IndexNode):
if node.base.type != Builtin.list_type:
return False
if not node.index.type.is_int:
return False
if not isinstance(node.base, ExprNodes.NameNode):
return False
indices.append(node)
else:
return False
return True
def _extract_index_id(self, index_node):
base = index_node.base
index = index_node.index
if isinstance(index, ExprNodes.NameNode):
index_val = index.name
elif isinstance(index, ExprNodes.ConstNode):
# FIXME:
return None
else:
return None
return (base.name, index_val)
class EarlyReplaceBuiltinCalls(Visitor.EnvTransform):
"""Optimize some common calls to builtin types *before* the type
analysis phase and *after* the declarations analysis phase.
This transform cannot make use of any argument types, but it can
restructure the tree in a way that the type analysis phase can
respond to.
Introducing C function calls here may not be a good idea. Move
them to the OptimizeBuiltinCalls transform instead, which runs
after type analysis.
"""
# only intercept on call nodes
visit_Node = Visitor.VisitorTransform.recurse_to_children
def visit_SimpleCallNode(self, node):
self.visitchildren(node)
function = node.function
if not self._function_is_builtin_name(function):
return node
return self._dispatch_to_handler(node, function, node.args)
def visit_GeneralCallNode(self, node):
self.visitchildren(node)
function = node.function
if not self._function_is_builtin_name(function):
return node
arg_tuple = node.positional_args
if not isinstance(arg_tuple, ExprNodes.TupleNode):
return node
args = arg_tuple.args
return self._dispatch_to_handler(
node, function, args, node.keyword_args)
def _function_is_builtin_name(self, function):
if not function.is_name:
return False
env = self.current_env()
entry = env.lookup(function.name)
if entry is not env.builtin_scope().lookup_here(function.name):
return False
# if entry is None, it's at least an undeclared name, so likely builtin
return True
def _dispatch_to_handler(self, node, function, args, kwargs=None):
if kwargs is None:
handler_name = '_handle_simple_function_%s' % function.name
else:
handler_name = '_handle_general_function_%s' % function.name
handle_call = getattr(self, handler_name, None)
if handle_call is not None:
if kwargs is None:
return handle_call(node, args)
else:
return handle_call(node, args, kwargs)
return node
def _inject_capi_function(self, node, cname, func_type, utility_code=None):
node.function = ExprNodes.PythonCapiFunctionNode(
node.function.pos, node.function.name, cname, func_type,
utility_code = utility_code)
def _error_wrong_arg_count(self, function_name, node, args, expected=None):
if not expected: # None or 0
arg_str = ''
elif isinstance(expected, basestring) or expected > 1:
arg_str = '...'
elif expected == 1:
arg_str = 'x'
else:
arg_str = ''
if expected is not None:
expected_str = 'expected %s, ' % expected
else:
expected_str = ''
error(node.pos, "%s(%s) called with wrong number of args, %sfound %d" % (
function_name, arg_str, expected_str, len(args)))
# specific handlers for simple call nodes
def _handle_simple_function_float(self, node, pos_args):
if not pos_args:
return ExprNodes.FloatNode(node.pos, value='0.0')
if len(pos_args) > 1:
self._error_wrong_arg_count('float', node, pos_args, 1)
arg_type = getattr(pos_args[0], 'type', None)
if arg_type in (PyrexTypes.c_double_type, Builtin.float_type):
return pos_args[0]
return node
def _handle_simple_function_slice(self, node, pos_args):
arg_count = len(pos_args)
start = step = None
if arg_count == 1:
stop, = pos_args
elif arg_count == 2:
start, stop = pos_args
elif arg_count == 3:
start, stop, step = pos_args
else:
self._error_wrong_arg_count('slice', node, pos_args)
return node
return ExprNodes.SliceNode(
node.pos,
start=start or ExprNodes.NoneNode(node.pos),
stop=stop,
step=step or ExprNodes.NoneNode(node.pos))
class YieldNodeCollector(Visitor.TreeVisitor):
def __init__(self):
Visitor.TreeVisitor.__init__(self)
self.yield_stat_nodes = {}
self.yield_nodes = []
visit_Node = Visitor.TreeVisitor.visitchildren
# XXX: disable inlining while it's not back supported
def __visit_YieldExprNode(self, node):
self.yield_nodes.append(node)
self.visitchildren(node)
def __visit_ExprStatNode(self, node):
self.visitchildren(node)
if node.expr in self.yield_nodes:
self.yield_stat_nodes[node.expr] = node
def __visit_GeneratorExpressionNode(self, node):
# enable when we support generic generator expressions
#
# everything below this node is out of scope
pass
def _find_single_yield_expression(self, node):
collector = self.YieldNodeCollector()
collector.visitchildren(node)
if len(collector.yield_nodes) != 1:
return None, None
yield_node = collector.yield_nodes[0]
try:
return (yield_node.arg, collector.yield_stat_nodes[yield_node])
except KeyError:
return None, None
def _handle_simple_function_all(self, node, pos_args):
"""Transform
_result = all(x for L in LL for x in L)
into
for L in LL:
for x in L:
if not x:
_result = False
break
else:
continue
break
else:
_result = True
"""
return self._transform_any_all(node, pos_args, False)
def _handle_simple_function_any(self, node, pos_args):
"""Transform
_result = any(x for L in LL for x in L)
into
for L in LL:
for x in L:
if x:
_result = True
break
else:
continue
break
else:
_result = False
"""
return self._transform_any_all(node, pos_args, True)
def _transform_any_all(self, node, pos_args, is_any):
if len(pos_args) != 1:
return node
if not isinstance(pos_args[0], ExprNodes.GeneratorExpressionNode):
return node
gen_expr_node = pos_args[0]
loop_node = gen_expr_node.loop
yield_expression, yield_stat_node = self._find_single_yield_expression(loop_node)
if yield_expression is None:
return node
if is_any:
condition = yield_expression
else:
condition = ExprNodes.NotNode(yield_expression.pos, operand = yield_expression)
result_ref = UtilNodes.ResultRefNode(pos=node.pos, type=PyrexTypes.c_bint_type)
test_node = Nodes.IfStatNode(
yield_expression.pos,
else_clause = None,
if_clauses = [ Nodes.IfClauseNode(
yield_expression.pos,
condition = condition,
body = Nodes.StatListNode(
node.pos,
stats = [
Nodes.SingleAssignmentNode(
node.pos,
lhs = result_ref,
rhs = ExprNodes.BoolNode(yield_expression.pos, value = is_any,
constant_result = is_any)),
Nodes.BreakStatNode(node.pos)
])) ]
)
loop = loop_node
while isinstance(loop.body, Nodes.LoopNode):
next_loop = loop.body
loop.body = Nodes.StatListNode(loop.body.pos, stats = [
loop.body,
Nodes.BreakStatNode(yield_expression.pos)
])
next_loop.else_clause = Nodes.ContinueStatNode(yield_expression.pos)
loop = next_loop
loop_node.else_clause = Nodes.SingleAssignmentNode(
node.pos,
lhs = result_ref,
rhs = ExprNodes.BoolNode(yield_expression.pos, value = not is_any,
constant_result = not is_any))
Visitor.recursively_replace_node(loop_node, yield_stat_node, test_node)
return ExprNodes.InlinedGeneratorExpressionNode(
gen_expr_node.pos, loop = loop_node, result_node = result_ref,
expr_scope = gen_expr_node.expr_scope, orig_func = is_any and 'any' or 'all')
PySequence_List_func_type = PyrexTypes.CFuncType(
Builtin.list_type,
[PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None)])
def _handle_simple_function_sorted(self, node, pos_args):
"""Transform sorted(genexpr) and sorted([listcomp]) into
[listcomp].sort(). CPython just reads the iterable into a
list and calls .sort() on it. Expanding the iterable in a
listcomp is still faster and the result can be sorted in
place.
"""
if len(pos_args) != 1:
return node
if isinstance(pos_args[0], ExprNodes.ComprehensionNode) \
and pos_args[0].type is Builtin.list_type:
listcomp_node = pos_args[0]
loop_node = listcomp_node.loop
elif isinstance(pos_args[0], ExprNodes.GeneratorExpressionNode):
gen_expr_node = pos_args[0]
loop_node = gen_expr_node.loop
yield_expression, yield_stat_node = self._find_single_yield_expression(loop_node)
if yield_expression is None:
return node
append_node = ExprNodes.ComprehensionAppendNode(
yield_expression.pos, expr = yield_expression)
Visitor.recursively_replace_node(loop_node, yield_stat_node, append_node)
listcomp_node = ExprNodes.ComprehensionNode(
gen_expr_node.pos, loop = loop_node,
append = append_node, type = Builtin.list_type,
expr_scope = gen_expr_node.expr_scope,
has_local_scope = True)
append_node.target = listcomp_node
elif isinstance(pos_args[0], (ExprNodes.ListNode, ExprNodes.TupleNode)):
# sorted([a, b, c]) or sorted((a, b, c)). The result of the latter
# is a list in CPython, so change it into one.
expr = pos_args[0].as_list()
listcomp_node = loop_node = expr
else:
# Interestingly, PySequence_List works on a lot of non-sequence
# things as well.
listcomp_node = loop_node = ExprNodes.PythonCapiCallNode(
node.pos, "PySequence_List", self.PySequence_List_func_type,
args=pos_args, is_temp=True)
result_node = UtilNodes.ResultRefNode(
pos = loop_node.pos, type = Builtin.list_type, may_hold_none=False)
listcomp_assign_node = Nodes.SingleAssignmentNode(
node.pos, lhs = result_node, rhs = listcomp_node, first = True)
sort_method = ExprNodes.AttributeNode(
node.pos, obj = result_node, attribute = EncodedString('sort'),
# entry ? type ?
needs_none_check = False)
sort_node = Nodes.ExprStatNode(
node.pos, expr = ExprNodes.SimpleCallNode(
node.pos, function = sort_method, args = []))
sort_node.analyse_declarations(self.current_env())
return UtilNodes.TempResultFromStatNode(
result_node,
Nodes.StatListNode(node.pos, stats = [ listcomp_assign_node, sort_node ]))
def _handle_simple_function_sum(self, node, pos_args):
"""Transform sum(genexpr) into an equivalent inlined aggregation loop.
"""
if len(pos_args) not in (1,2):
return node
if not isinstance(pos_args[0], (ExprNodes.GeneratorExpressionNode,
ExprNodes.ComprehensionNode)):
return node
gen_expr_node = pos_args[0]
loop_node = gen_expr_node.loop
if isinstance(gen_expr_node, ExprNodes.GeneratorExpressionNode):
yield_expression, yield_stat_node = self._find_single_yield_expression(loop_node)
if yield_expression is None:
return node
else: # ComprehensionNode
yield_stat_node = gen_expr_node.append
yield_expression = yield_stat_node.expr
try:
if not yield_expression.is_literal or not yield_expression.type.is_int:
return node
except AttributeError:
return node # in case we don't have a type yet
# special case: old Py2 backwards compatible "sum([int_const for ...])"
# can safely be unpacked into a genexpr
if len(pos_args) == 1:
start = ExprNodes.IntNode(node.pos, value='0', constant_result=0)
else:
start = pos_args[1]
result_ref = UtilNodes.ResultRefNode(pos=node.pos, type=PyrexTypes.py_object_type)
add_node = Nodes.SingleAssignmentNode(
yield_expression.pos,
lhs = result_ref,
rhs = ExprNodes.binop_node(node.pos, '+', result_ref, yield_expression)
)
Visitor.recursively_replace_node(loop_node, yield_stat_node, add_node)
exec_code = Nodes.StatListNode(
node.pos,
stats = [
Nodes.SingleAssignmentNode(
start.pos,
lhs = UtilNodes.ResultRefNode(pos=node.pos, expression=result_ref),
rhs = start,
first = True),
loop_node
])
return ExprNodes.InlinedGeneratorExpressionNode(
gen_expr_node.pos, loop = exec_code, result_node = result_ref,
expr_scope = gen_expr_node.expr_scope, orig_func = 'sum',
has_local_scope = gen_expr_node.has_local_scope)
def _handle_simple_function_min(self, node, pos_args):
return self._optimise_min_max(node, pos_args, '<')
def _handle_simple_function_max(self, node, pos_args):
return self._optimise_min_max(node, pos_args, '>')
def _optimise_min_max(self, node, args, operator):
"""Replace min(a,b,...) and max(a,b,...) by explicit comparison code.
"""
if len(args) <= 1:
if len(args) == 1 and args[0].is_sequence_constructor:
args = args[0].args
else:
# leave this to Python
return node
cascaded_nodes = list(map(UtilNodes.ResultRefNode, args[1:]))
last_result = args[0]
for arg_node in cascaded_nodes:
result_ref = UtilNodes.ResultRefNode(last_result)
last_result = ExprNodes.CondExprNode(
arg_node.pos,
true_val = arg_node,
false_val = result_ref,
test = ExprNodes.PrimaryCmpNode(
arg_node.pos,
operand1 = arg_node,
operator = operator,
operand2 = result_ref,
)
)
last_result = UtilNodes.EvalWithTempExprNode(result_ref, last_result)
for ref_node in cascaded_nodes[::-1]:
last_result = UtilNodes.EvalWithTempExprNode(ref_node, last_result)
return last_result
def _DISABLED_handle_simple_function_tuple(self, node, pos_args):
if not pos_args:
return ExprNodes.TupleNode(node.pos, args=[], constant_result=())
# This is a bit special - for iterables (including genexps),
# Python actually overallocates and resizes a newly created
# tuple incrementally while reading items, which we can't
# easily do without explicit node support. Instead, we read
# the items into a list and then copy them into a tuple of the
# final size. This takes up to twice as much memory, but will
# have to do until we have real support for genexps.
result = self._transform_list_set_genexpr(node, pos_args, Builtin.list_type)
if result is not node:
return ExprNodes.AsTupleNode(node.pos, arg=result)
return node
def _handle_simple_function_frozenset(self, node, pos_args):
"""Replace frozenset([...]) by frozenset((...)) as tuples are more efficient.
"""
if len(pos_args) != 1:
return node
if pos_args[0].is_sequence_constructor and not pos_args[0].args:
del pos_args[0]
elif isinstance(pos_args[0], ExprNodes.ListNode):
pos_args[0] = pos_args[0].as_tuple()
return node
def _handle_simple_function_list(self, node, pos_args):
if not pos_args:
return ExprNodes.ListNode(node.pos, args=[], constant_result=[])
return self._transform_list_set_genexpr(node, pos_args, Builtin.list_type)
def _handle_simple_function_set(self, node, pos_args):
if not pos_args:
return ExprNodes.SetNode(node.pos, args=[], constant_result=set())
return self._transform_list_set_genexpr(node, pos_args, Builtin.set_type)
def _transform_list_set_genexpr(self, node, pos_args, target_type):
"""Replace set(genexpr) and list(genexpr) by a literal comprehension.
"""
if len(pos_args) > 1:
return node
if not isinstance(pos_args[0], ExprNodes.GeneratorExpressionNode):
return node
gen_expr_node = pos_args[0]
loop_node = gen_expr_node.loop
yield_expression, yield_stat_node = self._find_single_yield_expression(loop_node)
if yield_expression is None:
return node
append_node = ExprNodes.ComprehensionAppendNode(
yield_expression.pos,
expr = yield_expression)
Visitor.recursively_replace_node(loop_node, yield_stat_node, append_node)
comp = ExprNodes.ComprehensionNode(
node.pos,
has_local_scope = True,
expr_scope = gen_expr_node.expr_scope,
loop = loop_node,
append = append_node,
type = target_type)
append_node.target = comp
return comp
def _handle_simple_function_dict(self, node, pos_args):
"""Replace dict( (a,b) for ... ) by a literal { a:b for ... }.
"""
if len(pos_args) == 0:
return ExprNodes.DictNode(node.pos, key_value_pairs=[], constant_result={})
if len(pos_args) > 1:
return node
if not isinstance(pos_args[0], ExprNodes.GeneratorExpressionNode):
return node
gen_expr_node = pos_args[0]
loop_node = gen_expr_node.loop
yield_expression, yield_stat_node = self._find_single_yield_expression(loop_node)
if yield_expression is None:
return node
if not isinstance(yield_expression, ExprNodes.TupleNode):
return node
if len(yield_expression.args) != 2:
return node
append_node = ExprNodes.DictComprehensionAppendNode(
yield_expression.pos,
key_expr = yield_expression.args[0],
value_expr = yield_expression.args[1])
Visitor.recursively_replace_node(loop_node, yield_stat_node, append_node)
dictcomp = ExprNodes.ComprehensionNode(
node.pos,
has_local_scope = True,
expr_scope = gen_expr_node.expr_scope,
loop = loop_node,
append = append_node,
type = Builtin.dict_type)
append_node.target = dictcomp
return dictcomp
# specific handlers for general call nodes
def _handle_general_function_dict(self, node, pos_args, kwargs):
"""Replace dict(a=b,c=d,...) by the underlying keyword dict
construction which is done anyway.
"""
if len(pos_args) > 0:
return node
if not isinstance(kwargs, ExprNodes.DictNode):
return node
return kwargs
class InlineDefNodeCalls(Visitor.NodeRefCleanupMixin, Visitor.EnvTransform):
visit_Node = Visitor.VisitorTransform.recurse_to_children
def get_constant_value_node(self, name_node):
if name_node.cf_state is None:
return None
if name_node.cf_state.cf_is_null:
return None
entry = self.current_env().lookup(name_node.name)
if not entry or (not entry.cf_assignments
or len(entry.cf_assignments) != 1):
# not just a single assignment in all closures
return None
return entry.cf_assignments[0].rhs
def visit_SimpleCallNode(self, node):
self.visitchildren(node)
if not self.current_directives.get('optimize.inline_defnode_calls'):
return node
function_name = node.function
if not function_name.is_name:
return node
function = self.get_constant_value_node(function_name)
if not isinstance(function, ExprNodes.PyCFunctionNode):
return node
inlined = ExprNodes.InlinedDefNodeCallNode(
node.pos, function_name=function_name,
function=function, args=node.args)
if inlined.can_be_inlined():
return self.replace(node, inlined)
return node
class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
Visitor.MethodDispatcherTransform):
"""Optimize some common methods calls and instantiation patterns
for builtin types *after* the type analysis phase.
Running after type analysis, this transform can only perform
function replacements that do not alter the function return type
in a way that was not anticipated by the type analysis.
"""
### cleanup to avoid redundant coercions to/from Python types
def _visit_PyTypeTestNode(self, node):
# disabled - appears to break assignments in some cases, and
# also drops a None check, which might still be required
"""Flatten redundant type checks after tree changes.
"""
old_arg = node.arg
self.visitchildren(node)
if old_arg is node.arg or node.arg.type != node.type:
return node
return node.arg
def _visit_TypecastNode(self, node):
# disabled - the user may have had a reason to put a type
# cast, even if it looks redundant to Cython
"""
Drop redundant type casts.
"""
self.visitchildren(node)
if node.type == node.operand.type:
return node.operand
return node
def visit_ExprStatNode(self, node):
"""
Drop useless coercions.
"""
self.visitchildren(node)
if isinstance(node.expr, ExprNodes.CoerceToPyTypeNode):
node.expr = node.expr.arg
return node
def visit_CoerceToBooleanNode(self, node):
"""Drop redundant conversion nodes after tree changes.
"""
self.visitchildren(node)
arg = node.arg
if isinstance(arg, ExprNodes.PyTypeTestNode):
arg = arg.arg
if isinstance(arg, ExprNodes.CoerceToPyTypeNode):
if arg.type in (PyrexTypes.py_object_type, Builtin.bool_type):
return arg.arg.coerce_to_boolean(self.current_env())
return node
def visit_CoerceFromPyTypeNode(self, node):
"""Drop redundant conversion nodes after tree changes.
Also, optimise away calls to Python's builtin int() and
float() if the result is going to be coerced back into a C
type anyway.
"""
self.visitchildren(node)
arg = node.arg
if not arg.type.is_pyobject:
# no Python conversion left at all, just do a C coercion instead
if node.type == arg.type:
return arg
else:
return arg.coerce_to(node.type, self.current_env())
if isinstance(arg, ExprNodes.PyTypeTestNode):
arg = arg.arg
if arg.is_literal:
if (node.type.is_int and isinstance(arg, ExprNodes.IntNode) or
node.type.is_float and isinstance(arg, ExprNodes.FloatNode) or
node.type.is_int and isinstance(arg, ExprNodes.BoolNode)):
return arg.coerce_to(node.type, self.current_env())
elif isinstance(arg, ExprNodes.CoerceToPyTypeNode):
if arg.type is PyrexTypes.py_object_type:
if node.type.assignable_from(arg.arg.type):
# completely redundant C->Py->C coercion
return arg.arg.coerce_to(node.type, self.current_env())
elif isinstance(arg, ExprNodes.SimpleCallNode):
if node.type.is_int or node.type.is_float:
return self._optimise_numeric_cast_call(node, arg)
elif isinstance(arg, ExprNodes.IndexNode) and not arg.is_buffer_access:
index_node = arg.index
if isinstance(index_node, ExprNodes.CoerceToPyTypeNode):
index_node = index_node.arg
if index_node.type.is_int:
return self._optimise_int_indexing(node, arg, index_node)
return node
PyBytes_GetItemInt_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_char_type, [
PyrexTypes.CFuncTypeArg("bytes", Builtin.bytes_type, None),
PyrexTypes.CFuncTypeArg("index", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("check_bounds", PyrexTypes.c_int_type, None),
],
exception_value = "((char)-1)",
exception_check = True)
def _optimise_int_indexing(self, coerce_node, arg, index_node):
env = self.current_env()
bound_check_bool = env.directives['boundscheck'] and 1 or 0
if arg.base.type is Builtin.bytes_type:
if coerce_node.type in (PyrexTypes.c_char_type, PyrexTypes.c_uchar_type):
# bytes[index] -> char
bound_check_node = ExprNodes.IntNode(
coerce_node.pos, value=str(bound_check_bool),
constant_result=bound_check_bool)
node = ExprNodes.PythonCapiCallNode(
coerce_node.pos, "__Pyx_PyBytes_GetItemInt",
self.PyBytes_GetItemInt_func_type,
args=[
arg.base.as_none_safe_node("'NoneType' object is not subscriptable"),
index_node.coerce_to(PyrexTypes.c_py_ssize_t_type, env),
bound_check_node,
],
is_temp=True,
utility_code=UtilityCode.load_cached(
'bytes_index', 'StringTools.c'))
if coerce_node.type is not PyrexTypes.c_char_type:
node = node.coerce_to(coerce_node.type, env)
return node
return coerce_node
def _optimise_numeric_cast_call(self, node, arg):
function = arg.function
if not isinstance(function, ExprNodes.NameNode) \
or not function.type.is_builtin_type \
or not isinstance(arg.arg_tuple, ExprNodes.TupleNode):
return node
args = arg.arg_tuple.args
if len(args) != 1:
return node
func_arg = args[0]
if isinstance(func_arg, ExprNodes.CoerceToPyTypeNode):
func_arg = func_arg.arg
elif func_arg.type.is_pyobject:
# play safe: Python conversion might work on all sorts of things
return node
if function.name == 'int':
if func_arg.type.is_int or node.type.is_int:
if func_arg.type == node.type:
return func_arg
elif node.type.assignable_from(func_arg.type) or func_arg.type.is_float:
return ExprNodes.TypecastNode(
node.pos, operand=func_arg, type=node.type)
elif function.name == 'float':
if func_arg.type.is_float or node.type.is_float:
if func_arg.type == node.type:
return func_arg
elif node.type.assignable_from(func_arg.type) or func_arg.type.is_float:
return ExprNodes.TypecastNode(
node.pos, operand=func_arg, type=node.type)
return node
def _error_wrong_arg_count(self, function_name, node, args, expected=None):
if not expected: # None or 0
arg_str = ''
elif isinstance(expected, basestring) or expected > 1:
arg_str = '...'
elif expected == 1:
arg_str = 'x'
else:
arg_str = ''
if expected is not None:
expected_str = 'expected %s, ' % expected
else:
expected_str = ''
error(node.pos, "%s(%s) called with wrong number of args, %sfound %d" % (
function_name, arg_str, expected_str, len(args)))
### generic fallbacks
def _handle_function(self, node, function_name, function, arg_list, kwargs):
return node
def _handle_method(self, node, type_name, attr_name, function,
arg_list, is_unbound_method, kwargs):
"""
Try to inject C-API calls for unbound method calls to builtin types.
While the method declarations in Builtin.py already handle this, we
can additionally resolve bound and unbound methods here that were
assigned to variables ahead of time.
"""
if kwargs:
return node
if not function or not function.is_attribute or not function.obj.is_name:
# cannot track unbound method calls over more than one indirection as
# the names might have been reassigned in the meantime
return node
type_entry = self.current_env().lookup(type_name)
if not type_entry:
return node
method = ExprNodes.AttributeNode(
node.function.pos,
obj=ExprNodes.NameNode(
function.pos,
name=type_name,
entry=type_entry,
type=type_entry.type),
attribute=attr_name,
is_called=True).analyse_as_unbound_cmethod_node(self.current_env())
if method is None:
return node
args = node.args
if args is None and node.arg_tuple:
args = node.arg_tuple.args
call_node = ExprNodes.SimpleCallNode(
node.pos,
function=method,
args=args)
if not is_unbound_method:
call_node.self = function.obj
call_node.analyse_c_function_call(self.current_env())
call_node.analysed = True
return call_node.coerce_to(node.type, self.current_env())
### builtin types
PyDict_Copy_func_type = PyrexTypes.CFuncType(
Builtin.dict_type, [
PyrexTypes.CFuncTypeArg("dict", Builtin.dict_type, None)
])
def _handle_simple_function_dict(self, node, function, pos_args):
"""Replace dict(some_dict) by PyDict_Copy(some_dict).
"""
if len(pos_args) != 1:
return node
arg = pos_args[0]
if arg.type is Builtin.dict_type:
arg = arg.as_none_safe_node("'NoneType' is not iterable")
return ExprNodes.PythonCapiCallNode(
node.pos, "PyDict_Copy", self.PyDict_Copy_func_type,
args = [arg],
is_temp = node.is_temp
)
return node
PySequence_List_func_type = PyrexTypes.CFuncType(
Builtin.list_type,
[PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None)])
def _handle_simple_function_list(self, node, function, pos_args):
"""Turn list(ob) into PySequence_List(ob).
"""
if len(pos_args) != 1:
return node
arg = pos_args[0]
return ExprNodes.PythonCapiCallNode(
node.pos, "PySequence_List", self.PySequence_List_func_type,
args=pos_args, is_temp=node.is_temp)
PyList_AsTuple_func_type = PyrexTypes.CFuncType(
Builtin.tuple_type, [
PyrexTypes.CFuncTypeArg("list", Builtin.list_type, None)
])
PySequence_Tuple_func_type = PyrexTypes.CFuncType(
Builtin.tuple_type,
[PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None)])
def _handle_simple_function_tuple(self, node, function, pos_args):
"""Replace tuple([...]) by PyList_AsTuple or PySequence_Tuple.
"""
if len(pos_args) != 1:
return node
arg = pos_args[0]
if arg.type is Builtin.tuple_type and not arg.may_be_none():
return arg
if arg.type is Builtin.list_type:
pos_args[0] = arg.as_none_safe_node(
"'NoneType' object is not iterable")
return ExprNodes.PythonCapiCallNode(
node.pos, "PyList_AsTuple", self.PyList_AsTuple_func_type,
args=pos_args, is_temp=node.is_temp)
else:
return ExprNodes.PythonCapiCallNode(
node.pos, "PySequence_Tuple", self.PySequence_Tuple_func_type,
args=pos_args, is_temp=node.is_temp)
PySet_New_func_type = PyrexTypes.CFuncType(
Builtin.set_type, [
PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None)
])
def _handle_simple_function_set(self, node, function, pos_args):
if len(pos_args) != 1:
return node
if pos_args[0].is_sequence_constructor:
# We can optimise set([x,y,z]) safely into a set literal,
# but only if we create all items before adding them -
# adding an item may raise an exception if it is not
# hashable, but creating the later items may have
# side-effects.
args = []
temps = []
for arg in pos_args[0].args:
if not arg.is_simple():
arg = UtilNodes.LetRefNode(arg)
temps.append(arg)
args.append(arg)
result = ExprNodes.SetNode(node.pos, is_temp=1, args=args)
self.replace(node, result)
for temp in temps[::-1]:
result = UtilNodes.EvalWithTempExprNode(temp, result)
return result
else:
# PySet_New(it) is better than a generic Python call to set(it)
return self.replace(node, ExprNodes.PythonCapiCallNode(
node.pos, "PySet_New",
self.PySet_New_func_type,
args=pos_args,
is_temp=node.is_temp,
py_name="set"))
PyFrozenSet_New_func_type = PyrexTypes.CFuncType(
Builtin.frozenset_type, [
PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None)
])
def _handle_simple_function_frozenset(self, node, function, pos_args):
if not pos_args:
pos_args = [ExprNodes.NullNode(node.pos)]
elif len(pos_args) > 1:
return node
elif pos_args[0].type is Builtin.frozenset_type and not pos_args[0].may_be_none():
return pos_args[0]
# PyFrozenSet_New(it) is better than a generic Python call to frozenset(it)
return ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_PyFrozenSet_New",
self.PyFrozenSet_New_func_type,
args=pos_args,
is_temp=node.is_temp,
utility_code=UtilityCode.load_cached('pyfrozenset_new', 'Builtins.c'),
py_name="frozenset")
PyObject_AsDouble_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_double_type, [
PyrexTypes.CFuncTypeArg("obj", PyrexTypes.py_object_type, None),
],
exception_value = "((double)-1)",
exception_check = True)
def _handle_simple_function_float(self, node, function, pos_args):
"""Transform float() into either a C type cast or a faster C
function call.
"""
# Note: this requires the float() function to be typed as
# returning a C 'double'
if len(pos_args) == 0:
return ExprNodes.FloatNode(
node, value="0.0", constant_result=0.0
).coerce_to(Builtin.float_type, self.current_env())
elif len(pos_args) != 1:
self._error_wrong_arg_count('float', node, pos_args, '0 or 1')
return node
func_arg = pos_args[0]
if isinstance(func_arg, ExprNodes.CoerceToPyTypeNode):
func_arg = func_arg.arg
if func_arg.type is PyrexTypes.c_double_type:
return func_arg
elif node.type.assignable_from(func_arg.type) or func_arg.type.is_numeric:
return ExprNodes.TypecastNode(
node.pos, operand=func_arg, type=node.type)
return ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_PyObject_AsDouble",
self.PyObject_AsDouble_func_type,
args = pos_args,
is_temp = node.is_temp,
utility_code = load_c_utility('pyobject_as_double'),
py_name = "float")
PyNumber_Int_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("o", PyrexTypes.py_object_type, None)
])
def _handle_simple_function_int(self, node, function, pos_args):
"""Transform int() into a faster C function call.
"""
if len(pos_args) == 0:
return ExprNodes.IntNode(node.pos, value="0", constant_result=0,
type=PyrexTypes.py_object_type)
elif len(pos_args) != 1:
return node # int(x, base)
func_arg = pos_args[0]
if isinstance(func_arg, ExprNodes.CoerceToPyTypeNode):
return node # handled in visit_CoerceFromPyTypeNode()
if func_arg.type.is_pyobject and node.type.is_pyobject:
return ExprNodes.PythonCapiCallNode(
node.pos, "PyNumber_Int", self.PyNumber_Int_func_type,
args=pos_args, is_temp=True)
return node
def _handle_simple_function_bool(self, node, function, pos_args):
"""Transform bool(x) into a type coercion to a boolean.
"""
if len(pos_args) == 0:
return ExprNodes.BoolNode(
node.pos, value=False, constant_result=False
).coerce_to(Builtin.bool_type, self.current_env())
elif len(pos_args) != 1:
self._error_wrong_arg_count('bool', node, pos_args, '0 or 1')
return node
else:
# => !!<bint>(x) to make sure it's exactly 0 or 1
operand = pos_args[0].coerce_to_boolean(self.current_env())
operand = ExprNodes.NotNode(node.pos, operand = operand)
operand = ExprNodes.NotNode(node.pos, operand = operand)
# coerce back to Python object as that's the result we are expecting
return operand.coerce_to_pyobject(self.current_env())
### builtin functions
Pyx_strlen_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_size_t_type, [
PyrexTypes.CFuncTypeArg("bytes", PyrexTypes.c_char_ptr_type, None)
])
Pyx_Py_UNICODE_strlen_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_size_t_type, [
PyrexTypes.CFuncTypeArg("unicode", PyrexTypes.c_py_unicode_ptr_type, None)
])
PyObject_Size_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_py_ssize_t_type, [
PyrexTypes.CFuncTypeArg("obj", PyrexTypes.py_object_type, None)
],
exception_value="-1")
_map_to_capi_len_function = {
Builtin.unicode_type : "__Pyx_PyUnicode_GET_LENGTH",
Builtin.bytes_type : "PyBytes_GET_SIZE",
Builtin.list_type : "PyList_GET_SIZE",
Builtin.tuple_type : "PyTuple_GET_SIZE",
Builtin.dict_type : "PyDict_Size",
Builtin.set_type : "PySet_Size",
Builtin.frozenset_type : "__Pyx_PyFrozenSet_Size",
}.get
_ext_types_with_pysize = set(["cpython.array.array"])
def _handle_simple_function_len(self, node, function, pos_args):
"""Replace len(char*) by the equivalent call to strlen(),
len(Py_UNICODE) by the equivalent Py_UNICODE_strlen() and
len(known_builtin_type) by an equivalent C-API call.
"""
if len(pos_args) != 1:
self._error_wrong_arg_count('len', node, pos_args, 1)
return node
arg = pos_args[0]
if isinstance(arg, ExprNodes.CoerceToPyTypeNode):
arg = arg.arg
if arg.type.is_string:
new_node = ExprNodes.PythonCapiCallNode(
node.pos, "strlen", self.Pyx_strlen_func_type,
args = [arg],
is_temp = node.is_temp,
utility_code = UtilityCode.load_cached("IncludeStringH", "StringTools.c"))
elif arg.type.is_pyunicode_ptr:
new_node = ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_Py_UNICODE_strlen", self.Pyx_Py_UNICODE_strlen_func_type,
args = [arg],
is_temp = node.is_temp)
elif arg.type.is_pyobject:
cfunc_name = self._map_to_capi_len_function(arg.type)
if cfunc_name is None:
arg_type = arg.type
if ((arg_type.is_extension_type or arg_type.is_builtin_type)
and arg_type.entry.qualified_name in self._ext_types_with_pysize):
cfunc_name = 'Py_SIZE'
else:
return node
arg = arg.as_none_safe_node(
"object of type 'NoneType' has no len()")
new_node = ExprNodes.PythonCapiCallNode(
node.pos, cfunc_name, self.PyObject_Size_func_type,
args = [arg],
is_temp = node.is_temp)
elif arg.type.is_unicode_char:
return ExprNodes.IntNode(node.pos, value='1', constant_result=1,
type=node.type)
else:
return node
if node.type not in (PyrexTypes.c_size_t_type, PyrexTypes.c_py_ssize_t_type):
new_node = new_node.coerce_to(node.type, self.current_env())
return new_node
Pyx_Type_func_type = PyrexTypes.CFuncType(
Builtin.type_type, [
PyrexTypes.CFuncTypeArg("object", PyrexTypes.py_object_type, None)
])
def _handle_simple_function_type(self, node, function, pos_args):
"""Replace type(o) by a macro call to Py_TYPE(o).
"""
if len(pos_args) != 1:
return node
node = ExprNodes.PythonCapiCallNode(
node.pos, "Py_TYPE", self.Pyx_Type_func_type,
args = pos_args,
is_temp = False)
return ExprNodes.CastNode(node, PyrexTypes.py_object_type)
Py_type_check_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_bint_type, [
PyrexTypes.CFuncTypeArg("arg", PyrexTypes.py_object_type, None)
])
def _handle_simple_function_isinstance(self, node, function, pos_args):
"""Replace isinstance() checks against builtin types by the
corresponding C-API call.
"""
if len(pos_args) != 2:
return node
arg, types = pos_args
temps = []
if isinstance(types, ExprNodes.TupleNode):
types = types.args
if len(types) == 1 and not types[0].type is Builtin.type_type:
return node # nothing to improve here
if arg.is_attribute or not arg.is_simple():
arg = UtilNodes.ResultRefNode(arg)
temps.append(arg)
elif types.type is Builtin.type_type:
types = [types]
else:
return node
tests = []
test_nodes = []
env = self.current_env()
for test_type_node in types:
builtin_type = None
if test_type_node.is_name:
if test_type_node.entry:
entry = env.lookup(test_type_node.entry.name)
if entry and entry.type and entry.type.is_builtin_type:
builtin_type = entry.type
if builtin_type is Builtin.type_type:
# all types have type "type", but there's only one 'type'
if entry.name != 'type' or not (
entry.scope and entry.scope.is_builtin_scope):
builtin_type = None
if builtin_type is not None:
type_check_function = entry.type.type_check_function(exact=False)
if type_check_function in tests:
continue
tests.append(type_check_function)
type_check_args = [arg]
elif test_type_node.type is Builtin.type_type:
type_check_function = '__Pyx_TypeCheck'
type_check_args = [arg, test_type_node]
else:
if not test_type_node.is_literal:
test_type_node = UtilNodes.ResultRefNode(test_type_node)
temps.append(test_type_node)
type_check_function = 'PyObject_IsInstance'
type_check_args = [arg, test_type_node]
test_nodes.append(
ExprNodes.PythonCapiCallNode(
test_type_node.pos, type_check_function, self.Py_type_check_func_type,
args=type_check_args,
is_temp=True,
))
def join_with_or(a, b, make_binop_node=ExprNodes.binop_node):
or_node = make_binop_node(node.pos, 'or', a, b)
or_node.type = PyrexTypes.c_bint_type
or_node.wrap_operands(env)
return or_node
test_node = reduce(join_with_or, test_nodes).coerce_to(node.type, env)
for temp in temps[::-1]:
test_node = UtilNodes.EvalWithTempExprNode(temp, test_node)
return test_node
def _handle_simple_function_ord(self, node, function, pos_args):
"""Unpack ord(Py_UNICODE) and ord('X').
"""
if len(pos_args) != 1:
return node
arg = pos_args[0]
if isinstance(arg, ExprNodes.CoerceToPyTypeNode):
if arg.arg.type.is_unicode_char:
return ExprNodes.TypecastNode(
arg.pos, operand=arg.arg, type=PyrexTypes.c_int_type
).coerce_to(node.type, self.current_env())
elif isinstance(arg, ExprNodes.UnicodeNode):
if len(arg.value) == 1:
return ExprNodes.IntNode(
arg.pos, type=PyrexTypes.c_int_type,
value=str(ord(arg.value)),
constant_result=ord(arg.value)
).coerce_to(node.type, self.current_env())
elif isinstance(arg, ExprNodes.StringNode):
if arg.unicode_value and len(arg.unicode_value) == 1 \
and ord(arg.unicode_value) <= 255: # Py2/3 portability
return ExprNodes.IntNode(
arg.pos, type=PyrexTypes.c_int_type,
value=str(ord(arg.unicode_value)),
constant_result=ord(arg.unicode_value)
).coerce_to(node.type, self.current_env())
return node
### special methods
Pyx_tp_new_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("type", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("args", Builtin.tuple_type, None),
])
Pyx_tp_new_kwargs_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("type", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("args", Builtin.tuple_type, None),
PyrexTypes.CFuncTypeArg("kwargs", Builtin.dict_type, None),
])
def _handle_any_slot__new__(self, node, function, args,
is_unbound_method, kwargs=None):
"""Replace 'exttype.__new__(exttype, ...)' by a call to exttype->tp_new()
"""
obj = function.obj
if not is_unbound_method or len(args) < 1:
return node
type_arg = args[0]
if not obj.is_name or not type_arg.is_name:
# play safe
return node
if obj.type != Builtin.type_type or type_arg.type != Builtin.type_type:
# not a known type, play safe
return node
if not type_arg.type_entry or not obj.type_entry:
if obj.name != type_arg.name:
return node
# otherwise, we know it's a type and we know it's the same
# type for both - that should do
elif type_arg.type_entry != obj.type_entry:
# different types - may or may not lead to an error at runtime
return node
args_tuple = ExprNodes.TupleNode(node.pos, args=args[1:])
args_tuple = args_tuple.analyse_types(
self.current_env(), skip_children=True)
if type_arg.type_entry:
ext_type = type_arg.type_entry.type
if (ext_type.is_extension_type and ext_type.typeobj_cname and
ext_type.scope.global_scope() == self.current_env().global_scope()):
# known type in current module
tp_slot = TypeSlots.ConstructorSlot("tp_new", '__new__')
slot_func_cname = TypeSlots.get_slot_function(ext_type.scope, tp_slot)
if slot_func_cname:
cython_scope = self.context.cython_scope
PyTypeObjectPtr = PyrexTypes.CPtrType(
cython_scope.lookup('PyTypeObject').type)
pyx_tp_new_kwargs_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("type", PyTypeObjectPtr, None),
PyrexTypes.CFuncTypeArg("args", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("kwargs", PyrexTypes.py_object_type, None),
])
type_arg = ExprNodes.CastNode(type_arg, PyTypeObjectPtr)
if not kwargs:
kwargs = ExprNodes.NullNode(node.pos, type=PyrexTypes.py_object_type) # hack?
return ExprNodes.PythonCapiCallNode(
node.pos, slot_func_cname,
pyx_tp_new_kwargs_func_type,
args=[type_arg, args_tuple, kwargs],
is_temp=True)
else:
# arbitrary variable, needs a None check for safety
type_arg = type_arg.as_none_safe_node(
"object.__new__(X): X is not a type object (NoneType)")
utility_code = UtilityCode.load_cached('tp_new', 'ObjectHandling.c')
if kwargs:
return ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_tp_new_kwargs", self.Pyx_tp_new_kwargs_func_type,
args=[type_arg, args_tuple, kwargs],
utility_code=utility_code,
is_temp=node.is_temp
)
else:
return ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_tp_new", self.Pyx_tp_new_func_type,
args=[type_arg, args_tuple],
utility_code=utility_code,
is_temp=node.is_temp
)
### methods of builtin types
PyObject_Append_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_returncode_type, [
PyrexTypes.CFuncTypeArg("list", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("item", PyrexTypes.py_object_type, None),
],
exception_value="-1")
def _handle_simple_method_object_append(self, node, function, args, is_unbound_method):
"""Optimistic optimisation as X.append() is almost always
referring to a list.
"""
if len(args) != 2 or node.result_is_used:
return node
return ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_PyObject_Append", self.PyObject_Append_func_type,
args=args,
may_return_none=False,
is_temp=node.is_temp,
result_is_used=False,
utility_code=load_c_utility('append')
)
PyByteArray_Append_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_returncode_type, [
PyrexTypes.CFuncTypeArg("bytearray", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("value", PyrexTypes.c_int_type, None),
],
exception_value="-1")
PyByteArray_AppendObject_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_returncode_type, [
PyrexTypes.CFuncTypeArg("bytearray", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("value", PyrexTypes.py_object_type, None),
],
exception_value="-1")
def _handle_simple_method_bytearray_append(self, node, function, args, is_unbound_method):
if len(args) != 2:
return node
func_name = "__Pyx_PyByteArray_Append"
func_type = self.PyByteArray_Append_func_type
value = unwrap_coerced_node(args[1])
if value.type.is_int or isinstance(value, ExprNodes.IntNode):
value = value.coerce_to(PyrexTypes.c_int_type, self.current_env())
utility_code = UtilityCode.load_cached("ByteArrayAppend", "StringTools.c")
elif value.is_string_literal:
if not value.can_coerce_to_char_literal():
return node
value = value.coerce_to(PyrexTypes.c_char_type, self.current_env())
utility_code = UtilityCode.load_cached("ByteArrayAppend", "StringTools.c")
elif value.type.is_pyobject:
func_name = "__Pyx_PyByteArray_AppendObject"
func_type = self.PyByteArray_AppendObject_func_type
utility_code = UtilityCode.load_cached("ByteArrayAppendObject", "StringTools.c")
else:
return node
new_node = ExprNodes.PythonCapiCallNode(
node.pos, func_name, func_type,
args=[args[0], value],
may_return_none=False,
is_temp=node.is_temp,
utility_code=utility_code,
)
if node.result_is_used:
new_node = new_node.coerce_to(node.type, self.current_env())
return new_node
PyObject_Pop_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("list", PyrexTypes.py_object_type, None),
])
PyObject_PopIndex_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("list", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("index", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("is_signed", PyrexTypes.c_int_type, None),
],
has_varargs=True) # to fake the additional macro args that lack a proper C type
def _handle_simple_method_list_pop(self, node, function, args, is_unbound_method):
return self._handle_simple_method_object_pop(
node, function, args, is_unbound_method, is_list=True)
def _handle_simple_method_object_pop(self, node, function, args, is_unbound_method, is_list=False):
"""Optimistic optimisation as X.pop([n]) is almost always
referring to a list.
"""
if not args:
return node
obj = args[0]
if is_list:
type_name = 'List'
obj = obj.as_none_safe_node(
"'NoneType' object has no attribute '%s'",
error="PyExc_AttributeError",
format_args=['pop'])
else:
type_name = 'Object'
if len(args) == 1:
return ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_Py%s_Pop" % type_name,
self.PyObject_Pop_func_type,
args=[obj],
may_return_none=True,
is_temp=node.is_temp,
utility_code=load_c_utility('pop'),
)
elif len(args) == 2:
index = unwrap_coerced_node(args[1])
orig_index_type = index.type
if not index.type.is_int:
if is_list or isinstance(index, ExprNodes.IntNode):
index = index.coerce_to(PyrexTypes.c_py_ssize_t_type, self.current_env())
else:
return node
elif not PyrexTypes.numeric_type_fits(index.type, PyrexTypes.c_py_ssize_t_type):
return node
# real type might still be larger at runtime
if not orig_index_type.is_int:
orig_index_type = index.type
if not orig_index_type.create_to_py_utility_code(self.current_env()):
return node
convert_func = orig_index_type.to_py_function
conversion_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [PyrexTypes.CFuncTypeArg("intval", orig_index_type, None)])
return ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_Py%s_PopIndex" % type_name,
self.PyObject_PopIndex_func_type,
args=[obj, index,
ExprNodes.IntNode(index.pos, value=str(orig_index_type.signed and 1 or 0),
constant_result=orig_index_type.signed and 1 or 0,
type=PyrexTypes.c_int_type),
ExprNodes.RawCNameExprNode(index.pos, PyrexTypes.c_void_type,
orig_index_type.empty_declaration_code()),
ExprNodes.RawCNameExprNode(index.pos, conversion_type, convert_func)],
may_return_none=True,
is_temp=node.is_temp,
utility_code=load_c_utility("pop_index"),
)
return node
single_param_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_returncode_type, [
PyrexTypes.CFuncTypeArg("obj", PyrexTypes.py_object_type, None),
],
exception_value = "-1")
def _handle_simple_method_list_sort(self, node, function, args, is_unbound_method):
"""Call PyList_Sort() instead of the 0-argument l.sort().
"""
if len(args) != 1:
return node
return self._substitute_method_call(
node, function, "PyList_Sort", self.single_param_func_type,
'sort', is_unbound_method, args).coerce_to(node.type, self.current_env)
Pyx_PyDict_GetItem_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("dict", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("key", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("default", PyrexTypes.py_object_type, None),
])
def _handle_simple_method_dict_get(self, node, function, args, is_unbound_method):
"""Replace dict.get() by a call to PyDict_GetItem().
"""
if len(args) == 2:
args.append(ExprNodes.NoneNode(node.pos))
elif len(args) != 3:
self._error_wrong_arg_count('dict.get', node, args, "2 or 3")
return node
return self._substitute_method_call(
node, function,
"__Pyx_PyDict_GetItemDefault", self.Pyx_PyDict_GetItem_func_type,
'get', is_unbound_method, args,
may_return_none = True,
utility_code = load_c_utility("dict_getitem_default"))
Pyx_PyDict_SetDefault_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("dict", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("key", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("default", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("is_safe_type", PyrexTypes.c_int_type, None),
])
def _handle_simple_method_dict_setdefault(self, node, function, args, is_unbound_method):
"""Replace dict.setdefault() by calls to PyDict_GetItem() and PyDict_SetItem().
"""
if len(args) == 2:
args.append(ExprNodes.NoneNode(node.pos))
elif len(args) != 3:
self._error_wrong_arg_count('dict.setdefault', node, args, "2 or 3")
return node
key_type = args[1].type
if key_type.is_builtin_type:
is_safe_type = int(key_type.name in
'str bytes unicode float int long bool')
elif key_type is PyrexTypes.py_object_type:
is_safe_type = -1 # don't know
else:
is_safe_type = 0 # definitely not
args.append(ExprNodes.IntNode(
node.pos, value=str(is_safe_type), constant_result=is_safe_type))
return self._substitute_method_call(
node, function,
"__Pyx_PyDict_SetDefault", self.Pyx_PyDict_SetDefault_func_type,
'setdefault', is_unbound_method, args,
may_return_none=True,
utility_code=load_c_utility('dict_setdefault'))
### unicode type methods
PyUnicode_uchar_predicate_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_bint_type, [
PyrexTypes.CFuncTypeArg("uchar", PyrexTypes.c_py_ucs4_type, None),
])
def _inject_unicode_predicate(self, node, function, args, is_unbound_method):
if is_unbound_method or len(args) != 1:
return node
ustring = args[0]
if not isinstance(ustring, ExprNodes.CoerceToPyTypeNode) or \
not ustring.arg.type.is_unicode_char:
return node
uchar = ustring.arg
method_name = function.attribute
if method_name == 'istitle':
# istitle() doesn't directly map to Py_UNICODE_ISTITLE()
utility_code = UtilityCode.load_cached(
"py_unicode_istitle", "StringTools.c")
function_name = '__Pyx_Py_UNICODE_ISTITLE'
else:
utility_code = None
function_name = 'Py_UNICODE_%s' % method_name.upper()
func_call = self._substitute_method_call(
node, function,
function_name, self.PyUnicode_uchar_predicate_func_type,
method_name, is_unbound_method, [uchar],
utility_code = utility_code)
if node.type.is_pyobject:
func_call = func_call.coerce_to_pyobject(self.current_env)
return func_call
_handle_simple_method_unicode_isalnum = _inject_unicode_predicate
_handle_simple_method_unicode_isalpha = _inject_unicode_predicate
_handle_simple_method_unicode_isdecimal = _inject_unicode_predicate
_handle_simple_method_unicode_isdigit = _inject_unicode_predicate
_handle_simple_method_unicode_islower = _inject_unicode_predicate
_handle_simple_method_unicode_isnumeric = _inject_unicode_predicate
_handle_simple_method_unicode_isspace = _inject_unicode_predicate
_handle_simple_method_unicode_istitle = _inject_unicode_predicate
_handle_simple_method_unicode_isupper = _inject_unicode_predicate
PyUnicode_uchar_conversion_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_py_ucs4_type, [
PyrexTypes.CFuncTypeArg("uchar", PyrexTypes.c_py_ucs4_type, None),
])
def _inject_unicode_character_conversion(self, node, function, args, is_unbound_method):
if is_unbound_method or len(args) != 1:
return node
ustring = args[0]
if not isinstance(ustring, ExprNodes.CoerceToPyTypeNode) or \
not ustring.arg.type.is_unicode_char:
return node
uchar = ustring.arg
method_name = function.attribute
function_name = 'Py_UNICODE_TO%s' % method_name.upper()
func_call = self._substitute_method_call(
node, function,
function_name, self.PyUnicode_uchar_conversion_func_type,
method_name, is_unbound_method, [uchar])
if node.type.is_pyobject:
func_call = func_call.coerce_to_pyobject(self.current_env)
return func_call
_handle_simple_method_unicode_lower = _inject_unicode_character_conversion
_handle_simple_method_unicode_upper = _inject_unicode_character_conversion
_handle_simple_method_unicode_title = _inject_unicode_character_conversion
PyUnicode_Splitlines_func_type = PyrexTypes.CFuncType(
Builtin.list_type, [
PyrexTypes.CFuncTypeArg("str", Builtin.unicode_type, None),
PyrexTypes.CFuncTypeArg("keepends", PyrexTypes.c_bint_type, None),
])
def _handle_simple_method_unicode_splitlines(self, node, function, args, is_unbound_method):
"""Replace unicode.splitlines(...) by a direct call to the
corresponding C-API function.
"""
if len(args) not in (1,2):
self._error_wrong_arg_count('unicode.splitlines', node, args, "1 or 2")
return node
self._inject_bint_default_argument(node, args, 1, False)
return self._substitute_method_call(
node, function,
"PyUnicode_Splitlines", self.PyUnicode_Splitlines_func_type,
'splitlines', is_unbound_method, args)
PyUnicode_Split_func_type = PyrexTypes.CFuncType(
Builtin.list_type, [
PyrexTypes.CFuncTypeArg("str", Builtin.unicode_type, None),
PyrexTypes.CFuncTypeArg("sep", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("maxsplit", PyrexTypes.c_py_ssize_t_type, None),
]
)
def _handle_simple_method_unicode_split(self, node, function, args, is_unbound_method):
"""Replace unicode.split(...) by a direct call to the
corresponding C-API function.
"""
if len(args) not in (1,2,3):
self._error_wrong_arg_count('unicode.split', node, args, "1-3")
return node
if len(args) < 2:
args.append(ExprNodes.NullNode(node.pos))
self._inject_int_default_argument(
node, args, 2, PyrexTypes.c_py_ssize_t_type, "-1")
return self._substitute_method_call(
node, function,
"PyUnicode_Split", self.PyUnicode_Split_func_type,
'split', is_unbound_method, args)
PyString_Tailmatch_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_bint_type, [
PyrexTypes.CFuncTypeArg("str", PyrexTypes.py_object_type, None), # bytes/str/unicode
PyrexTypes.CFuncTypeArg("substring", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("start", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("end", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("direction", PyrexTypes.c_int_type, None),
],
exception_value = '-1')
def _handle_simple_method_unicode_endswith(self, node, function, args, is_unbound_method):
return self._inject_tailmatch(
node, function, args, is_unbound_method, 'unicode', 'endswith',
unicode_tailmatch_utility_code, +1)
def _handle_simple_method_unicode_startswith(self, node, function, args, is_unbound_method):
return self._inject_tailmatch(
node, function, args, is_unbound_method, 'unicode', 'startswith',
unicode_tailmatch_utility_code, -1)
def _inject_tailmatch(self, node, function, args, is_unbound_method, type_name,
method_name, utility_code, direction):
"""Replace unicode.startswith(...) and unicode.endswith(...)
by a direct call to the corresponding C-API function.
"""
if len(args) not in (2,3,4):
self._error_wrong_arg_count('%s.%s' % (type_name, method_name), node, args, "2-4")
return node
self._inject_int_default_argument(
node, args, 2, PyrexTypes.c_py_ssize_t_type, "0")
self._inject_int_default_argument(
node, args, 3, PyrexTypes.c_py_ssize_t_type, "PY_SSIZE_T_MAX")
args.append(ExprNodes.IntNode(
node.pos, value=str(direction), type=PyrexTypes.c_int_type))
method_call = self._substitute_method_call(
node, function,
"__Pyx_Py%s_Tailmatch" % type_name.capitalize(),
self.PyString_Tailmatch_func_type,
method_name, is_unbound_method, args,
utility_code = utility_code)
return method_call.coerce_to(Builtin.bool_type, self.current_env())
PyUnicode_Find_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_py_ssize_t_type, [
PyrexTypes.CFuncTypeArg("str", Builtin.unicode_type, None),
PyrexTypes.CFuncTypeArg("substring", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("start", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("end", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("direction", PyrexTypes.c_int_type, None),
],
exception_value = '-2')
def _handle_simple_method_unicode_find(self, node, function, args, is_unbound_method):
return self._inject_unicode_find(
node, function, args, is_unbound_method, 'find', +1)
def _handle_simple_method_unicode_rfind(self, node, function, args, is_unbound_method):
return self._inject_unicode_find(
node, function, args, is_unbound_method, 'rfind', -1)
def _inject_unicode_find(self, node, function, args, is_unbound_method,
method_name, direction):
"""Replace unicode.find(...) and unicode.rfind(...) by a
direct call to the corresponding C-API function.
"""
if len(args) not in (2,3,4):
self._error_wrong_arg_count('unicode.%s' % method_name, node, args, "2-4")
return node
self._inject_int_default_argument(
node, args, 2, PyrexTypes.c_py_ssize_t_type, "0")
self._inject_int_default_argument(
node, args, 3, PyrexTypes.c_py_ssize_t_type, "PY_SSIZE_T_MAX")
args.append(ExprNodes.IntNode(
node.pos, value=str(direction), type=PyrexTypes.c_int_type))
method_call = self._substitute_method_call(
node, function, "PyUnicode_Find", self.PyUnicode_Find_func_type,
method_name, is_unbound_method, args)
return method_call.coerce_to_pyobject(self.current_env())
PyUnicode_Count_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_py_ssize_t_type, [
PyrexTypes.CFuncTypeArg("str", Builtin.unicode_type, None),
PyrexTypes.CFuncTypeArg("substring", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("start", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("end", PyrexTypes.c_py_ssize_t_type, None),
],
exception_value = '-1')
def _handle_simple_method_unicode_count(self, node, function, args, is_unbound_method):
"""Replace unicode.count(...) by a direct call to the
corresponding C-API function.
"""
if len(args) not in (2,3,4):
self._error_wrong_arg_count('unicode.count', node, args, "2-4")
return node
self._inject_int_default_argument(
node, args, 2, PyrexTypes.c_py_ssize_t_type, "0")
self._inject_int_default_argument(
node, args, 3, PyrexTypes.c_py_ssize_t_type, "PY_SSIZE_T_MAX")
method_call = self._substitute_method_call(
node, function, "PyUnicode_Count", self.PyUnicode_Count_func_type,
'count', is_unbound_method, args)
return method_call.coerce_to_pyobject(self.current_env())
PyUnicode_Replace_func_type = PyrexTypes.CFuncType(
Builtin.unicode_type, [
PyrexTypes.CFuncTypeArg("str", Builtin.unicode_type, None),
PyrexTypes.CFuncTypeArg("substring", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("replstr", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("maxcount", PyrexTypes.c_py_ssize_t_type, None),
])
def _handle_simple_method_unicode_replace(self, node, function, args, is_unbound_method):
"""Replace unicode.replace(...) by a direct call to the
corresponding C-API function.
"""
if len(args) not in (3,4):
self._error_wrong_arg_count('unicode.replace', node, args, "3-4")
return node
self._inject_int_default_argument(
node, args, 3, PyrexTypes.c_py_ssize_t_type, "-1")
return self._substitute_method_call(
node, function, "PyUnicode_Replace", self.PyUnicode_Replace_func_type,
'replace', is_unbound_method, args)
PyUnicode_AsEncodedString_func_type = PyrexTypes.CFuncType(
Builtin.bytes_type, [
PyrexTypes.CFuncTypeArg("obj", Builtin.unicode_type, None),
PyrexTypes.CFuncTypeArg("encoding", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("errors", PyrexTypes.c_char_ptr_type, None),
])
PyUnicode_AsXyzString_func_type = PyrexTypes.CFuncType(
Builtin.bytes_type, [
PyrexTypes.CFuncTypeArg("obj", Builtin.unicode_type, None),
])
_special_encodings = ['UTF8', 'UTF16', 'Latin1', 'ASCII',
'unicode_escape', 'raw_unicode_escape']
_special_codecs = [ (name, codecs.getencoder(name))
for name in _special_encodings ]
def _handle_simple_method_unicode_encode(self, node, function, args, is_unbound_method):
"""Replace unicode.encode(...) by a direct C-API call to the
corresponding codec.
"""
if len(args) < 1 or len(args) > 3:
self._error_wrong_arg_count('unicode.encode', node, args, '1-3')
return node
string_node = args[0]
if len(args) == 1:
null_node = ExprNodes.NullNode(node.pos)
return self._substitute_method_call(
node, function, "PyUnicode_AsEncodedString",
self.PyUnicode_AsEncodedString_func_type,
'encode', is_unbound_method, [string_node, null_node, null_node])
parameters = self._unpack_encoding_and_error_mode(node.pos, args)
if parameters is None:
return node
encoding, encoding_node, error_handling, error_handling_node = parameters
if encoding and isinstance(string_node, ExprNodes.UnicodeNode):
# constant, so try to do the encoding at compile time
try:
value = string_node.value.encode(encoding, error_handling)
except:
# well, looks like we can't
pass
else:
value = BytesLiteral(value)
value.encoding = encoding
return ExprNodes.BytesNode(
string_node.pos, value=value, type=Builtin.bytes_type)
if encoding and error_handling == 'strict':
# try to find a specific encoder function
codec_name = self._find_special_codec_name(encoding)
if codec_name is not None:
encode_function = "PyUnicode_As%sString" % codec_name
return self._substitute_method_call(
node, function, encode_function,
self.PyUnicode_AsXyzString_func_type,
'encode', is_unbound_method, [string_node])
return self._substitute_method_call(
node, function, "PyUnicode_AsEncodedString",
self.PyUnicode_AsEncodedString_func_type,
'encode', is_unbound_method,
[string_node, encoding_node, error_handling_node])
PyUnicode_DecodeXyz_func_ptr_type = PyrexTypes.CPtrType(PyrexTypes.CFuncType(
Builtin.unicode_type, [
PyrexTypes.CFuncTypeArg("string", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("size", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("errors", PyrexTypes.c_char_ptr_type, None),
]))
_decode_c_string_func_type = PyrexTypes.CFuncType(
Builtin.unicode_type, [
PyrexTypes.CFuncTypeArg("string", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("start", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("stop", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("encoding", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("errors", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("decode_func", PyUnicode_DecodeXyz_func_ptr_type, None),
])
_decode_bytes_func_type = PyrexTypes.CFuncType(
Builtin.unicode_type, [
PyrexTypes.CFuncTypeArg("string", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("start", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("stop", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("encoding", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("errors", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("decode_func", PyUnicode_DecodeXyz_func_ptr_type, None),
])
_decode_cpp_string_func_type = None # lazy init
def _handle_simple_method_bytes_decode(self, node, function, args, is_unbound_method):
"""Replace char*.decode() by a direct C-API call to the
corresponding codec, possibly resolving a slice on the char*.
"""
if not (1 <= len(args) <= 3):
self._error_wrong_arg_count('bytes.decode', node, args, '1-3')
return node
# normalise input nodes
string_node = args[0]
start = stop = None
if isinstance(string_node, ExprNodes.SliceIndexNode):
index_node = string_node
string_node = index_node.base
start, stop = index_node.start, index_node.stop
if not start or start.constant_result == 0:
start = None
if isinstance(string_node, ExprNodes.CoerceToPyTypeNode):
string_node = string_node.arg
string_type = string_node.type
if string_type in (Builtin.bytes_type, Builtin.bytearray_type):
if is_unbound_method:
string_node = string_node.as_none_safe_node(
"descriptor '%s' requires a '%s' object but received a 'NoneType'",
format_args=['decode', string_type.name])
else:
string_node = string_node.as_none_safe_node(
"'NoneType' object has no attribute '%s'",
error="PyExc_AttributeError",
format_args=['decode'])
elif not string_type.is_string and not string_type.is_cpp_string:
# nothing to optimise here
return node
parameters = self._unpack_encoding_and_error_mode(node.pos, args)
if parameters is None:
return node
encoding, encoding_node, error_handling, error_handling_node = parameters
if not start:
start = ExprNodes.IntNode(node.pos, value='0', constant_result=0)
elif not start.type.is_int:
start = start.coerce_to(PyrexTypes.c_py_ssize_t_type, self.current_env())
if stop and not stop.type.is_int:
stop = stop.coerce_to(PyrexTypes.c_py_ssize_t_type, self.current_env())
# try to find a specific encoder function
codec_name = None
if encoding is not None:
codec_name = self._find_special_codec_name(encoding)
if codec_name is not None:
decode_function = ExprNodes.RawCNameExprNode(
node.pos, type=self.PyUnicode_DecodeXyz_func_ptr_type,
cname="PyUnicode_Decode%s" % codec_name)
encoding_node = ExprNodes.NullNode(node.pos)
else:
decode_function = ExprNodes.NullNode(node.pos)
# build the helper function call
temps = []
if string_type.is_string:
# C string
if not stop:
# use strlen() to find the string length, just as CPython would
if not string_node.is_name:
string_node = UtilNodes.LetRefNode(string_node) # used twice
temps.append(string_node)
stop = ExprNodes.PythonCapiCallNode(
string_node.pos, "strlen", self.Pyx_strlen_func_type,
args=[string_node],
is_temp=False,
utility_code=UtilityCode.load_cached("IncludeStringH", "StringTools.c"),
).coerce_to(PyrexTypes.c_py_ssize_t_type, self.current_env())
helper_func_type = self._decode_c_string_func_type
utility_code_name = 'decode_c_string'
elif string_type.is_cpp_string:
# C++ std::string
if not stop:
stop = ExprNodes.IntNode(node.pos, value='PY_SSIZE_T_MAX',
constant_result=ExprNodes.not_a_constant)
if self._decode_cpp_string_func_type is None:
# lazy init to reuse the C++ string type
self._decode_cpp_string_func_type = PyrexTypes.CFuncType(
Builtin.unicode_type, [
PyrexTypes.CFuncTypeArg("string", string_type, None),
PyrexTypes.CFuncTypeArg("start", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("stop", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("encoding", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("errors", PyrexTypes.c_char_ptr_type, None),
PyrexTypes.CFuncTypeArg("decode_func", self.PyUnicode_DecodeXyz_func_ptr_type, None),
])
helper_func_type = self._decode_cpp_string_func_type
utility_code_name = 'decode_cpp_string'
else:
# Python bytes/bytearray object
if not stop:
stop = ExprNodes.IntNode(node.pos, value='PY_SSIZE_T_MAX',
constant_result=ExprNodes.not_a_constant)
helper_func_type = self._decode_bytes_func_type
if string_type is Builtin.bytes_type:
utility_code_name = 'decode_bytes'
else:
utility_code_name = 'decode_bytearray'
node = ExprNodes.PythonCapiCallNode(
node.pos, '__Pyx_%s' % utility_code_name, helper_func_type,
args=[string_node, start, stop, encoding_node, error_handling_node, decode_function],
is_temp=node.is_temp,
utility_code=UtilityCode.load_cached(utility_code_name, 'StringTools.c'),
)
for temp in temps[::-1]:
node = UtilNodes.EvalWithTempExprNode(temp, node)
return node
_handle_simple_method_bytearray_decode = _handle_simple_method_bytes_decode
def _find_special_codec_name(self, encoding):
try:
requested_codec = codecs.getencoder(encoding)
except LookupError:
return None
for name, codec in self._special_codecs:
if codec == requested_codec:
if '_' in name:
name = ''.join([s.capitalize()
for s in name.split('_')])
return name
return None
def _unpack_encoding_and_error_mode(self, pos, args):
null_node = ExprNodes.NullNode(pos)
if len(args) >= 2:
encoding, encoding_node = self._unpack_string_and_cstring_node(args[1])
if encoding_node is None:
return None
else:
encoding = None
encoding_node = null_node
if len(args) == 3:
error_handling, error_handling_node = self._unpack_string_and_cstring_node(args[2])
if error_handling_node is None:
return None
if error_handling == 'strict':
error_handling_node = null_node
else:
error_handling = 'strict'
error_handling_node = null_node
return (encoding, encoding_node, error_handling, error_handling_node)
def _unpack_string_and_cstring_node(self, node):
if isinstance(node, ExprNodes.CoerceToPyTypeNode):
node = node.arg
if isinstance(node, ExprNodes.UnicodeNode):
encoding = node.value
node = ExprNodes.BytesNode(
node.pos, value=BytesLiteral(encoding.utf8encode()),
type=PyrexTypes.c_char_ptr_type)
elif isinstance(node, (ExprNodes.StringNode, ExprNodes.BytesNode)):
encoding = node.value.decode('ISO-8859-1')
node = ExprNodes.BytesNode(
node.pos, value=node.value, type=PyrexTypes.c_char_ptr_type)
elif node.type is Builtin.bytes_type:
encoding = None
node = node.coerce_to(PyrexTypes.c_char_ptr_type, self.current_env())
elif node.type.is_string:
encoding = None
else:
encoding = node = None
return encoding, node
def _handle_simple_method_str_endswith(self, node, function, args, is_unbound_method):
return self._inject_tailmatch(
node, function, args, is_unbound_method, 'str', 'endswith',
str_tailmatch_utility_code, +1)
def _handle_simple_method_str_startswith(self, node, function, args, is_unbound_method):
return self._inject_tailmatch(
node, function, args, is_unbound_method, 'str', 'startswith',
str_tailmatch_utility_code, -1)
def _handle_simple_method_bytes_endswith(self, node, function, args, is_unbound_method):
return self._inject_tailmatch(
node, function, args, is_unbound_method, 'bytes', 'endswith',
bytes_tailmatch_utility_code, +1)
def _handle_simple_method_bytes_startswith(self, node, function, args, is_unbound_method):
return self._inject_tailmatch(
node, function, args, is_unbound_method, 'bytes', 'startswith',
bytes_tailmatch_utility_code, -1)
''' # disabled for now, enable when we consider it worth it (see StringTools.c)
def _handle_simple_method_bytearray_endswith(self, node, function, args, is_unbound_method):
return self._inject_tailmatch(
node, function, args, is_unbound_method, 'bytearray', 'endswith',
bytes_tailmatch_utility_code, +1)
def _handle_simple_method_bytearray_startswith(self, node, function, args, is_unbound_method):
return self._inject_tailmatch(
node, function, args, is_unbound_method, 'bytearray', 'startswith',
bytes_tailmatch_utility_code, -1)
'''
### helpers
def _substitute_method_call(self, node, function, name, func_type,
attr_name, is_unbound_method, args=(),
utility_code=None, is_temp=None,
may_return_none=ExprNodes.PythonCapiCallNode.may_return_none):
args = list(args)
if args and not args[0].is_literal:
self_arg = args[0]
if is_unbound_method:
self_arg = self_arg.as_none_safe_node(
"descriptor '%s' requires a '%s' object but received a 'NoneType'",
format_args=[attr_name, function.obj.name])
else:
self_arg = self_arg.as_none_safe_node(
"'NoneType' object has no attribute '%s'",
error = "PyExc_AttributeError",
format_args = [attr_name])
args[0] = self_arg
if is_temp is None:
is_temp = node.is_temp
return ExprNodes.PythonCapiCallNode(
node.pos, name, func_type,
args = args,
is_temp = is_temp,
utility_code = utility_code,
may_return_none = may_return_none,
result_is_used = node.result_is_used,
)
def _inject_int_default_argument(self, node, args, arg_index, type, default_value):
assert len(args) >= arg_index
if len(args) == arg_index:
args.append(ExprNodes.IntNode(node.pos, value=str(default_value),
type=type, constant_result=default_value))
else:
args[arg_index] = args[arg_index].coerce_to(type, self.current_env())
def _inject_bint_default_argument(self, node, args, arg_index, default_value):
assert len(args) >= arg_index
if len(args) == arg_index:
default_value = bool(default_value)
args.append(ExprNodes.BoolNode(node.pos, value=default_value,
constant_result=default_value))
else:
args[arg_index] = args[arg_index].coerce_to_boolean(self.current_env())
unicode_tailmatch_utility_code = UtilityCode.load_cached('unicode_tailmatch', 'StringTools.c')
bytes_tailmatch_utility_code = UtilityCode.load_cached('bytes_tailmatch', 'StringTools.c')
str_tailmatch_utility_code = UtilityCode.load_cached('str_tailmatch', 'StringTools.c')
class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
"""Calculate the result of constant expressions to store it in
``expr_node.constant_result``, and replace trivial cases by their
constant result.
General rules:
- We calculate float constants to make them available to the
compiler, but we do not aggregate them into a single literal
node to prevent any loss of precision.
- We recursively calculate constants from non-literal nodes to
make them available to the compiler, but we only aggregate
literal nodes at each step. Non-literal nodes are never merged
into a single node.
"""
def __init__(self, reevaluate=False):
"""
The reevaluate argument specifies whether constant values that were
previously computed should be recomputed.
"""
super(ConstantFolding, self).__init__()
self.reevaluate = reevaluate
def _calculate_const(self, node):
if (not self.reevaluate and
node.constant_result is not ExprNodes.constant_value_not_set):
return
# make sure we always set the value
not_a_constant = ExprNodes.not_a_constant
node.constant_result = not_a_constant
# check if all children are constant
children = self.visitchildren(node)
for child_result in children.values():
if type(child_result) is list:
for child in child_result:
if getattr(child, 'constant_result', not_a_constant) is not_a_constant:
return
elif getattr(child_result, 'constant_result', not_a_constant) is not_a_constant:
return
# now try to calculate the real constant value
try:
node.calculate_constant_result()
# if node.constant_result is not ExprNodes.not_a_constant:
# print node.__class__.__name__, node.constant_result
except (ValueError, TypeError, KeyError, IndexError, AttributeError, ArithmeticError):
# ignore all 'normal' errors here => no constant result
pass
except Exception:
# this looks like a real error
import traceback, sys
traceback.print_exc(file=sys.stdout)
NODE_TYPE_ORDER = [ExprNodes.BoolNode, ExprNodes.CharNode,
ExprNodes.IntNode, ExprNodes.FloatNode]
def _widest_node_class(self, *nodes):
try:
return self.NODE_TYPE_ORDER[
max(map(self.NODE_TYPE_ORDER.index, map(type, nodes)))]
except ValueError:
return None
def _bool_node(self, node, value):
value = bool(value)
return ExprNodes.BoolNode(node.pos, value=value, constant_result=value)
def visit_ExprNode(self, node):
self._calculate_const(node)
return node
def visit_UnopNode(self, node):
self._calculate_const(node)
if not node.has_constant_result():
if node.operator == '!':
return self._handle_NotNode(node)
return node
if not node.operand.is_literal:
return node
if node.operator == '!':
return self._bool_node(node, node.constant_result)
elif isinstance(node.operand, ExprNodes.BoolNode):
return ExprNodes.IntNode(node.pos, value=str(int(node.constant_result)),
type=PyrexTypes.c_int_type,
constant_result=int(node.constant_result))
elif node.operator == '+':
return self._handle_UnaryPlusNode(node)
elif node.operator == '-':
return self._handle_UnaryMinusNode(node)
return node
_negate_operator = {
'in': 'not_in',
'not_in': 'in',
'is': 'is_not',
'is_not': 'is'
}.get
def _handle_NotNode(self, node):
operand = node.operand
if isinstance(operand, ExprNodes.PrimaryCmpNode):
operator = self._negate_operator(operand.operator)
if operator:
node = copy.copy(operand)
node.operator = operator
node = self.visit_PrimaryCmpNode(node)
return node
def _handle_UnaryMinusNode(self, node):
def _negate(value):
if value.startswith('-'):
value = value[1:]
else:
value = '-' + value
return value
node_type = node.operand.type
if isinstance(node.operand, ExprNodes.FloatNode):
# this is a safe operation
return ExprNodes.FloatNode(node.pos, value=_negate(node.operand.value),
type=node_type,
constant_result=node.constant_result)
if node_type.is_int and node_type.signed or \
isinstance(node.operand, ExprNodes.IntNode) and node_type.is_pyobject:
return ExprNodes.IntNode(node.pos, value=_negate(node.operand.value),
type=node_type,
longness=node.operand.longness,
constant_result=node.constant_result)
return node
def _handle_UnaryPlusNode(self, node):
if (node.operand.has_constant_result() and
node.constant_result == node.operand.constant_result):
return node.operand
return node
def visit_BoolBinopNode(self, node):
self._calculate_const(node)
if not node.operand1.has_constant_result():
return node
if node.operand1.constant_result:
if node.operator == 'and':
return node.operand2
else:
return node.operand1
else:
if node.operator == 'and':
return node.operand1
else:
return node.operand2
def visit_BinopNode(self, node):
self._calculate_const(node)
if node.constant_result is ExprNodes.not_a_constant:
return node
if isinstance(node.constant_result, float):
return node
operand1, operand2 = node.operand1, node.operand2
if not operand1.is_literal or not operand2.is_literal:
return node
# now inject a new constant node with the calculated value
try:
type1, type2 = operand1.type, operand2.type
if type1 is None or type2 is None:
return node
except AttributeError:
return node
if type1.is_numeric and type2.is_numeric:
widest_type = PyrexTypes.widest_numeric_type(type1, type2)
else:
widest_type = PyrexTypes.py_object_type
target_class = self._widest_node_class(operand1, operand2)
if target_class is None:
return node
elif target_class is ExprNodes.BoolNode and node.operator in '+-//<<%**>>':
# C arithmetic results in at least an int type
target_class = ExprNodes.IntNode
elif target_class is ExprNodes.CharNode and node.operator in '+-//<<%**>>&|^':
# C arithmetic results in at least an int type
target_class = ExprNodes.IntNode
if target_class is ExprNodes.IntNode:
unsigned = getattr(operand1, 'unsigned', '') and \
getattr(operand2, 'unsigned', '')
longness = "LL"[:max(len(getattr(operand1, 'longness', '')),
len(getattr(operand2, 'longness', '')))]
new_node = ExprNodes.IntNode(pos=node.pos,
unsigned=unsigned, longness=longness,
value=str(int(node.constant_result)),
constant_result=int(node.constant_result))
# IntNode is smart about the type it chooses, so we just
# make sure we were not smarter this time
if widest_type.is_pyobject or new_node.type.is_pyobject:
new_node.type = PyrexTypes.py_object_type
else:
new_node.type = PyrexTypes.widest_numeric_type(widest_type, new_node.type)
else:
if target_class is ExprNodes.BoolNode:
node_value = node.constant_result
else:
node_value = str(node.constant_result)
new_node = target_class(pos=node.pos, type = widest_type,
value = node_value,
constant_result = node.constant_result)
return new_node
def visit_AddNode(self, node):
self._calculate_const(node)
if node.constant_result is ExprNodes.not_a_constant:
return node
if node.operand1.is_string_literal and node.operand2.is_string_literal:
# some people combine string literals with a '+'
str1, str2 = node.operand1, node.operand2
if isinstance(str1, ExprNodes.UnicodeNode) and isinstance(str2, ExprNodes.UnicodeNode):
bytes_value = None
if str1.bytes_value is not None and str2.bytes_value is not None:
if str1.bytes_value.encoding == str2.bytes_value.encoding:
bytes_value = BytesLiteral(str1.bytes_value + str2.bytes_value)
bytes_value.encoding = str1.bytes_value.encoding
string_value = EncodedString(node.constant_result)
return ExprNodes.UnicodeNode(
str1.pos, value=string_value, constant_result=node.constant_result, bytes_value=bytes_value)
elif isinstance(str1, ExprNodes.BytesNode) and isinstance(str2, ExprNodes.BytesNode):
if str1.value.encoding == str2.value.encoding:
bytes_value = BytesLiteral(node.constant_result)
bytes_value.encoding = str1.value.encoding
return ExprNodes.BytesNode(str1.pos, value=bytes_value, constant_result=node.constant_result)
# all other combinations are rather complicated
# to get right in Py2/3: encodings, unicode escapes, ...
return self.visit_BinopNode(node)
def visit_MulNode(self, node):
self._calculate_const(node)
if node.operand1.is_sequence_constructor:
return self._calculate_constant_seq(node, node.operand1, node.operand2)
if isinstance(node.operand1, ExprNodes.IntNode) and \
node.operand2.is_sequence_constructor:
return self._calculate_constant_seq(node, node.operand2, node.operand1)
return self.visit_BinopNode(node)
def _calculate_constant_seq(self, node, sequence_node, factor):
if factor.constant_result != 1 and sequence_node.args:
if isinstance(factor.constant_result, (int, long)) and factor.constant_result <= 0:
del sequence_node.args[:]
sequence_node.mult_factor = None
elif sequence_node.mult_factor is not None:
if (isinstance(factor.constant_result, (int, long)) and
isinstance(sequence_node.mult_factor.constant_result, (int, long))):
value = sequence_node.mult_factor.constant_result * factor.constant_result
sequence_node.mult_factor = ExprNodes.IntNode(
sequence_node.mult_factor.pos,
value=str(value), constant_result=value)
else:
# don't know if we can combine the factors, so don't
return self.visit_BinopNode(node)
else:
sequence_node.mult_factor = factor
return sequence_node
def visit_PrimaryCmpNode(self, node):
# calculate constant partial results in the comparison cascade
self.visitchildren(node, ['operand1'])
left_node = node.operand1
cmp_node = node
while cmp_node is not None:
self.visitchildren(cmp_node, ['operand2'])
right_node = cmp_node.operand2
cmp_node.constant_result = not_a_constant
if left_node.has_constant_result() and right_node.has_constant_result():
try:
cmp_node.calculate_cascaded_constant_result(left_node.constant_result)
except (ValueError, TypeError, KeyError, IndexError, AttributeError, ArithmeticError):
pass # ignore all 'normal' errors here => no constant result
left_node = right_node
cmp_node = cmp_node.cascade
if not node.cascade:
if node.has_constant_result():
return self._bool_node(node, node.constant_result)
return node
# collect partial cascades: [[value, CmpNode...], [value, CmpNode, ...], ...]
cascades = [[node.operand1]]
final_false_result = []
def split_cascades(cmp_node):
if cmp_node.has_constant_result():
if not cmp_node.constant_result:
# False => short-circuit
final_false_result.append(self._bool_node(cmp_node, False))
return
else:
# True => discard and start new cascade
cascades.append([cmp_node.operand2])
else:
# not constant => append to current cascade
cascades[-1].append(cmp_node)
if cmp_node.cascade:
split_cascades(cmp_node.cascade)
split_cascades(node)
cmp_nodes = []
for cascade in cascades:
if len(cascade) < 2:
continue
cmp_node = cascade[1]
pcmp_node = ExprNodes.PrimaryCmpNode(
cmp_node.pos,
operand1=cascade[0],
operator=cmp_node.operator,
operand2=cmp_node.operand2,
constant_result=not_a_constant)
cmp_nodes.append(pcmp_node)
last_cmp_node = pcmp_node
for cmp_node in cascade[2:]:
last_cmp_node.cascade = cmp_node
last_cmp_node = cmp_node
last_cmp_node.cascade = None
if final_false_result:
# last cascade was constant False
cmp_nodes.append(final_false_result[0])
elif not cmp_nodes:
# only constants, but no False result
return self._bool_node(node, True)
node = cmp_nodes[0]
if len(cmp_nodes) == 1:
if node.has_constant_result():
return self._bool_node(node, node.constant_result)
else:
for cmp_node in cmp_nodes[1:]:
node = ExprNodes.BoolBinopNode(
node.pos,
operand1=node,
operator='and',
operand2=cmp_node,
constant_result=not_a_constant)
return node
def visit_CondExprNode(self, node):
self._calculate_const(node)
if not node.test.has_constant_result():
return node
if node.test.constant_result:
return node.true_val
else:
return node.false_val
def visit_IfStatNode(self, node):
self.visitchildren(node)
# eliminate dead code based on constant condition results
if_clauses = []
for if_clause in node.if_clauses:
condition = if_clause.condition
if condition.has_constant_result():
if condition.constant_result:
# always true => subsequent clauses can safely be dropped
node.else_clause = if_clause.body
break
# else: false => drop clause
else:
# unknown result => normal runtime evaluation
if_clauses.append(if_clause)
if if_clauses:
node.if_clauses = if_clauses
return node
elif node.else_clause:
return node.else_clause
else:
return Nodes.StatListNode(node.pos, stats=[])
def visit_SliceIndexNode(self, node):
self._calculate_const(node)
# normalise start/stop values
if node.start is None or node.start.constant_result is None:
start = node.start = None
else:
start = node.start.constant_result
if node.stop is None or node.stop.constant_result is None:
stop = node.stop = None
else:
stop = node.stop.constant_result
# cut down sliced constant sequences
if node.constant_result is not not_a_constant:
base = node.base
if base.is_sequence_constructor and base.mult_factor is None:
base.args = base.args[start:stop]
return base
elif base.is_string_literal:
base = base.as_sliced_node(start, stop)
if base is not None:
return base
return node
def visit_ComprehensionNode(self, node):
self.visitchildren(node)
if isinstance(node.loop, Nodes.StatListNode) and not node.loop.stats:
# loop was pruned already => transform into literal
if node.type is Builtin.list_type:
return ExprNodes.ListNode(
node.pos, args=[], constant_result=[])
elif node.type is Builtin.set_type:
return ExprNodes.SetNode(
node.pos, args=[], constant_result=set())
elif node.type is Builtin.dict_type:
return ExprNodes.DictNode(
node.pos, key_value_pairs=[], constant_result={})
return node
def visit_ForInStatNode(self, node):
self.visitchildren(node)
sequence = node.iterator.sequence
if isinstance(sequence, ExprNodes.SequenceNode):
if not sequence.args:
if node.else_clause:
return node.else_clause
else:
# don't break list comprehensions
return Nodes.StatListNode(node.pos, stats=[])
# iterating over a list literal? => tuples are more efficient
if isinstance(sequence, ExprNodes.ListNode):
node.iterator.sequence = sequence.as_tuple()
return node
def visit_WhileStatNode(self, node):
self.visitchildren(node)
if node.condition and node.condition.has_constant_result():
if node.condition.constant_result:
node.condition = None
node.else_clause = None
else:
return node.else_clause
return node
def visit_ExprStatNode(self, node):
self.visitchildren(node)
if not isinstance(node.expr, ExprNodes.ExprNode):
# ParallelRangeTransform does this ...
return node
# drop unused constant expressions
if node.expr.has_constant_result():
return None
return node
# in the future, other nodes can have their own handler method here
# that can replace them with a constant result node
visit_Node = Visitor.VisitorTransform.recurse_to_children
class FinalOptimizePhase(Visitor.CythonTransform, Visitor.NodeRefCleanupMixin):
"""
This visitor handles several commuting optimizations, and is run
just before the C code generation phase.
The optimizations currently implemented in this class are:
- eliminate None assignment and refcounting for first assignment.
- isinstance -> typecheck for cdef types
- eliminate checks for None and/or types that became redundant after tree changes
- replace Python function calls that look like method calls by a faster PyMethodCallNode
"""
def visit_SingleAssignmentNode(self, node):
"""Avoid redundant initialisation of local variables before their
first assignment.
"""
self.visitchildren(node)
if node.first:
lhs = node.lhs
lhs.lhs_of_first_assignment = True
return node
def visit_SimpleCallNode(self, node):
"""
Replace generic calls to isinstance(x, type) by a more efficient type check.
Replace likely Python method calls by a specialised PyMethodCallNode.
"""
self.visitchildren(node)
function = node.function
if function.type.is_cfunction and function.is_name:
if function.name == 'isinstance' and len(node.args) == 2:
type_arg = node.args[1]
if type_arg.type.is_builtin_type and type_arg.type.name == 'type':
cython_scope = self.context.cython_scope
function.entry = cython_scope.lookup('PyObject_TypeCheck')
function.type = function.entry.type
PyTypeObjectPtr = PyrexTypes.CPtrType(cython_scope.lookup('PyTypeObject').type)
node.args[1] = ExprNodes.CastNode(node.args[1], PyTypeObjectPtr)
elif (self.current_directives.get("optimize.unpack_method_calls")
and node.is_temp and function.type.is_pyobject):
# optimise simple Python methods calls
if isinstance(node.arg_tuple, ExprNodes.TupleNode) and not (
node.arg_tuple.mult_factor or (node.arg_tuple.is_literal and node.arg_tuple.args)):
# simple call, now exclude calls to objects that are definitely not methods
may_be_a_method = True
if function.type is Builtin.type_type:
may_be_a_method = False
elif function.is_name:
if function.entry.is_builtin:
may_be_a_method = False
elif function.cf_state:
# local functions/classes are definitely not methods
non_method_nodes = (ExprNodes.PyCFunctionNode, ExprNodes.ClassNode, ExprNodes.Py3ClassNode)
may_be_a_method = any(
assignment.rhs and not isinstance(assignment.rhs, non_method_nodes)
for assignment in function.cf_state)
if may_be_a_method:
node = self.replace(node, ExprNodes.PyMethodCallNode.from_node(
node, function=function, arg_tuple=node.arg_tuple, type=node.type))
return node
def visit_PyTypeTestNode(self, node):
"""Remove tests for alternatively allowed None values from
type tests when we know that the argument cannot be None
anyway.
"""
self.visitchildren(node)
if not node.notnone:
if not node.arg.may_be_none():
node.notnone = True
return node
def visit_NoneCheckNode(self, node):
"""Remove None checks from expressions that definitely do not
carry a None value.
"""
self.visitchildren(node)
if not node.arg.may_be_none():
return node.arg
return node
class ConsolidateOverflowCheck(Visitor.CythonTransform):
"""
This class facilitates the sharing of overflow checking among all nodes
of a nested arithmetic expression. For example, given the expression
a*b + c, where a, b, and x are all possibly overflowing ints, the entire
sequence will be evaluated and the overflow bit checked only at the end.
"""
overflow_bit_node = None
def visit_Node(self, node):
if self.overflow_bit_node is not None:
saved = self.overflow_bit_node
self.overflow_bit_node = None
self.visitchildren(node)
self.overflow_bit_node = saved
else:
self.visitchildren(node)
return node
def visit_NumBinopNode(self, node):
if node.overflow_check and node.overflow_fold:
top_level_overflow = self.overflow_bit_node is None
if top_level_overflow:
self.overflow_bit_node = node
else:
node.overflow_bit_node = self.overflow_bit_node
node.overflow_check = False
self.visitchildren(node)
if top_level_overflow:
self.overflow_bit_node = None
else:
self.visitchildren(node)
return node
|
ABcDexter/cython
|
Cython/Compiler/Optimize.py
|
Python
|
apache-2.0
| 169,504 | 0.004501 |
"""Views for the ``event_rsvp`` app."""
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponseRedirect
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.generic import (
CreateView,
DeleteView,
DetailView,
ListView,
UpdateView,
)
from .forms import EventForm, GuestForm
from .models import Event, Guest
from .signals import post_guest_create
#--------#
# Mixins #
#--------#
class StaffMixin(object):
"""Mixin to let only staff member pass."""
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
if not request.user.is_staff:
raise Http404
return super(StaffMixin, self).dispatch(request, *args, **kwargs)
class EventViewMixin(object):
"""Mixin to handle event-specific options."""
model = Event
form_class = EventForm
def get_form_kwargs(self):
kwargs = super(EventViewMixin, self).get_form_kwargs()
kwargs.update({'created_by': self.request.user})
return kwargs
def get_success_url(self):
return reverse('rsvp_event_staff')
class EventSecurityMixin(object):
"""Mixin to handle event-specific security options."""
def dispatch(self, request, *args, **kwargs):
self.kwargs = kwargs
self.object = self.get_object()
date = self.object.start
# Check the right starting date within the slug
if (date.year != int(kwargs.get('year'))
or date.month != int(kwargs.get('month'))
or date.day != int(kwargs.get('day'))):
redirect_url = getattr(self.object, 'get_{0}_url'.format(
self.url_mode))
return HttpResponseRedirect(redirect_url())
return super(EventSecurityMixin, self).dispatch(request, *args,
**kwargs)
class GuestViewMixin(object):
"""Mixin to handle guest-specific functions."""
model = Guest
form_class = GuestForm
def dispatch(self, request, *args, **kwargs):
try:
self.event = Event.objects.get(slug=kwargs.get('event_slug'))
except Event.DoesNotExist:
raise Http404
return super(GuestViewMixin, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(GuestViewMixin, self).get_context_data(**kwargs)
context.update({'event': self.event, 'user': self.request.user})
if (self.request.user.is_authenticated()
or self.event.allow_anonymous_rsvp):
context.update({'permission_to_book': True})
return context
def get_form_kwargs(self):
kwargs = super(GuestViewMixin, self).get_form_kwargs()
kwargs.update({'event': self.event, 'user': self.request.user})
return kwargs
def get_success_url(self):
return self.event.get_absolute_url()
class GuestSecurityMixin(object):
"""Mixin to handle guest-specific security options."""
def get_object(self, *args, **kwargs):
obj = super(GuestSecurityMixin, self).get_object(*args, **kwargs)
if obj.event != self.event:
raise Http404
return obj
#--------#
# Views #
#--------#
class EventListView(ListView):
"""List view to display upcoming events."""
def get_queryset(self):
return Event.objects.filter(start__gt=timezone.now(),
is_published=True)
def get_context_data(self, **kwargs):
context = super(EventListView, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
context.update({
'my_participations': self.request.user.guest_set.all()})
return context
class EventDetailView(EventSecurityMixin, EventViewMixin, DetailView):
"""Detail view to display information of an event."""
url_mode = 'absolute'
def dispatch(self, request, *args, **kwargs):
self.kwargs = kwargs
self.object = self.get_object()
if not self.object.is_published and not request.user.is_staff:
raise Http404
return super(EventDetailView, self).dispatch(request, *args, **kwargs)
class EventCreateView(StaffMixin, EventViewMixin, CreateView):
"""Create view to handle information of an event."""
pass
class EventUpdateView(StaffMixin, EventSecurityMixin, EventViewMixin,
UpdateView):
"""Update view to handle information of an event."""
url_mode = 'update'
class EventDeleteView(StaffMixin, EventSecurityMixin, EventViewMixin,
DeleteView):
"""Delete view to remove the relevant event."""
url_mode = 'delete'
class EventCreateFromTemplateView(StaffMixin, EventViewMixin, CreateView):
"""Create view to create information of an event from a template."""
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
try:
# Check if it's really a template
self.template = Event.objects.get(pk=kwargs.get('pk'),
template_name__gt='')
except Event.DoesNotExist:
raise Http404
return super(EventCreateFromTemplateView, self).dispatch(
request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super(EventCreateFromTemplateView, self).get_form_kwargs()
kwargs.update({'instance': self.template,
'create_from_template': True})
return kwargs
class StaffDashboardView(StaffMixin, ListView):
"""View to display event related functions and lists."""
model = Event
template_name = 'event_rsvp/staff_dashboard.html'
def get_context_data(self, **kwargs):
context = super(StaffDashboardView, self).get_context_data(**kwargs)
templates = self.object_list.exclude(template_name__exact='')
self.object_list = self.object_list.filter(template_name__exact='')
context.update({
'upcoming': self.object_list.filter(start__gt=timezone.now()),
'current': self.object_list.filter(start__lte=timezone.now(),
end__gte=timezone.now()),
'past': self.object_list.filter(end__lt=timezone.now()),
'templates': templates,
})
return context
class GuestDetailView(StaffMixin, GuestSecurityMixin, GuestViewMixin,
DetailView):
"""View to display guest related functions and lists."""
pass
class GuestCreateView(GuestViewMixin, CreateView):
"""Create view to add a guest to an event."""
def form_valid(self, form):
resp = super(GuestCreateView, self).form_valid(form)
post_guest_create.send(
sender=self, request=self.request, user=form.user,
event=form.event)
return resp
def get_form_kwargs(self):
kwargs = super(GuestCreateView, self).get_form_kwargs()
if self.request.user.is_authenticated():
kwargs.update({'initial': {
'name': self.request.user.get_full_name(),
'email': self.request.user.email}})
return kwargs
class GuestUpdateView(GuestSecurityMixin, GuestViewMixin, UpdateView):
"""Update view to handle a guest."""
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
try:
self.event = Event.objects.get(slug=kwargs.get('event_slug'))
except Event.DoesNotExist:
raise Http404
self.kwargs = kwargs
self.object = self.get_object()
if (not request.user.is_staff and not self.object.user
and not self.object.user == request.user):
raise Http404
return super(GuestViewMixin, self).dispatch(request, *args, **kwargs)
class GuestDeleteView(StaffMixin, GuestViewMixin, GuestSecurityMixin,
DeleteView):
"""Delete view to remove the relevant guest."""
pass
|
bitmazk/django-event-rsvp
|
event_rsvp/views.py
|
Python
|
mit
| 8,144 | 0.000491 |
#
# Copyright (c) Elliot Peele <elliot@bentlogic.net>
#
# This program is distributed under the terms of the MIT License as found
# in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/mit-license.php.
#
# This program is distributed in the hope that it will be useful, but
# without any warrenty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the MIT License for full details.
#
import time
from datetime import datetime
from base64 import b64decode
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Binary
from sqlalchemy import String
from sqlalchemy import Integer
from sqlalchemy import Boolean
from sqlalchemy import DateTime
from sqlalchemy import Unicode
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import synonym
from zope.sqlalchemy import ZopeTransactionExtension
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
from cryptography.hazmat.backends import default_backend
from .util import oauth2_settings
from .generators import gen_token
from .generators import gen_client_id
from .generators import gen_client_secret
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
backend = default_backend()
class Oauth2Client(Base):
__tablename__ = 'oauth2_provider_clients'
id = Column(Integer, primary_key=True)
client_id = Column(Unicode(64), unique=True, nullable=False)
_client_secret = Column(Binary(255), nullable=False)
revoked = Column(Boolean, default=False)
revocation_date = Column(DateTime)
_salt = None
def __init__(self, salt=None):
self._salt = salt
self.client_id = gen_client_id()
self.client_secret = gen_client_secret()
def new_client_secret(self):
secret = gen_client_secret()
self.client_secret = secret
return secret
def _get_client_secret(self):
return self._client_secret
def _set_client_secret(self, client_secret):
if self._salt:
salt = b64decode(self._salt.encode('utf-8'))
else:
try:
if not oauth2_settings('salt'):
raise ValueError(
'oauth2_provider.salt configuration required.'
)
salt = b64decode(oauth2_settings('salt').encode('utf-8'))
except AttributeError:
return
kdf = Scrypt(
salt=salt,
length=64,
n=2 ** 14,
r=8,
p=1,
backend=backend
)
try:
client_secret = bytes(client_secret, 'utf-8')
except TypeError:
pass
self._client_secret = kdf.derive(client_secret)
client_secret = synonym('_client_secret', descriptor=property(
_get_client_secret, _set_client_secret))
def revoke(self):
self.revoked = True
self.revocation_date = datetime.utcnow()
def isRevoked(self):
return self.revoked
class Oauth2RedirectUri(Base):
__tablename__ = 'oauth2_provider_redirect_uris'
id = Column(Integer, primary_key=True)
uri = Column(Unicode(256), unique=True, nullable=False)
client_id = Column(Integer, ForeignKey(Oauth2Client.id))
client = relationship(Oauth2Client, backref=backref('redirect_uris'))
def __init__(self, client, uri):
self.client = client
self.uri = uri
class Oauth2Code(Base):
__tablename__ = 'oauth2_provider_codes'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, nullable=False)
authcode = Column(Unicode(64), unique=True, nullable=False)
expires_in = Column(Integer, nullable=False, default=10*60)
revoked = Column(Boolean, default=False)
revocation_date = Column(DateTime)
creation_date = Column(DateTime, default=datetime.utcnow)
client_id = Column(Integer, ForeignKey(Oauth2Client.id))
client = relationship(Oauth2Client, backref=backref('authcode'))
def __init__(self, client, user_id):
self.client = client
self.user_id = user_id
self.authcode = gen_token(self.client)
def revoke(self):
self.revoked = True
self.revocation_date = datetime.utcnow()
def isRevoked(self):
expiry = time.mktime(self.create_date.timetuple()) + self.expires_in
if datetime.frometimestamp(expiry) < datetime.utcnow():
self.revoke()
return self.revoked
class Oauth2Token(Base):
__tablename__ = 'oauth2_provider_tokens'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, nullable=False)
access_token = Column(Unicode(64), unique=True, nullable=False)
refresh_token = Column(Unicode(64), unique=True, nullable=False)
expires_in = Column(Integer, nullable=False, default=60*60)
revoked = Column(Boolean, default=False)
revocation_date = Column(DateTime)
creation_date = Column(DateTime, default=datetime.utcnow)
client_id = Column(Integer, ForeignKey(Oauth2Client.id))
client = relationship(Oauth2Client, backref=backref('tokens'))
def __init__(self, client, user_id):
self.client = client
self.user_id = user_id
self.access_token = gen_token(self.client)
self.refresh_token = gen_token(self.client)
def revoke(self):
self.revoked = True
self.revocation_date = datetime.utcnow()
def isRevoked(self):
expiry = time.mktime(self.creation_date.timetuple()) + self.expires_in
if datetime.fromtimestamp(expiry) < datetime.utcnow():
self.revoke()
return self.revoked
def refresh(self):
"""
Generate a new token for this client.
"""
cls = self.__class__
self.revoke()
return cls(self.client, self.user_id)
def asJSON(self, **kwargs):
token = {
'access_token': self.access_token,
'refresh_token': self.refresh_token,
'user_id': self.user_id,
'expires_in': self.expires_in,
}
kwargs.update(token)
return kwargs
def initialize_sql(engine, settings):
DBSession.configure(bind=engine)
Base.metadata.bind = engine
Base.metadata.create_all(engine)
|
elliotpeele/pyramid_oauth2_provider
|
pyramid_oauth2_provider/models.py
|
Python
|
mit
| 6,539 | 0 |
import numpy as np
from ..utils import check_random_state
class ChainWorld(object):
def __init__(self, left_length, left_reward, right_length, right_reward, on_chain_reward, p_return_to_start, random_state=None):
self.left_length = left_length
self.left_reward = left_reward
self.right_length = right_length
self.right_reward = right_reward
self.on_chain_reward = on_chain_reward
self.p_return_to_start = p_return_to_start
self.num_states = self.left_length + self.right_length + 1
self.num_actions = 2
self.random_state = check_random_state(random_state)
self.reset()
def reset(self):
self.state = self.left_length
def observe(self):
return self.state
def is_terminal(self, state):
return state == 0 or state == self.num_states - 1
def perform_action(self, action):
if self.p_return_to_start and self.random_state.rand() < self.p_return_to_start:
self.reset()
elif action == 0:
self.state -= 1
else:
self.state += 1
if self.state == 0:
reward = self.left_reward
elif self.state == self.num_states - 1:
reward = self.right_reward
else:
reward = self.on_chain_reward
return self.observe(), reward
def get_max_reward(self):
return max(self.left_reward, self.right_reward)
|
dustinvtran/bayesrl
|
bayesrl/environments/chainworld.py
|
Python
|
mit
| 1,441 | 0.002082 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_tatooine_hutt_assassin_camp_large1.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
obi-two/Rebelion
|
data/scripts/templates/object/building/poi/shared_tatooine_hutt_assassin_camp_large1.py
|
Python
|
mit
| 465 | 0.047312 |
"""Preprocessing with artifact detection, SSP, and ICA."""
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Matti Hämäläinen <msh@nmr.mgh.harvard.edu>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
# Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD-3-Clause
from .annotate_amplitude import annotate_amplitude
from .flat import annotate_flat
from .maxfilter import apply_maxfilter
from .ssp import compute_proj_ecg, compute_proj_eog
from .eog import find_eog_events, create_eog_epochs
from .ecg import find_ecg_events, create_ecg_epochs
from .ica import (ICA, ica_find_eog_events, ica_find_ecg_events,
get_score_funcs, read_ica, corrmap, read_ica_eeglab)
from .otp import oversampled_temporal_projection
from ._peak_finder import peak_finder
from .infomax_ import infomax
from .stim import fix_stim_artifact
from .maxwell import (maxwell_filter, find_bad_channels_maxwell,
compute_maxwell_basis)
from .realign import realign_raw
from .xdawn import Xdawn
from ._csd import compute_current_source_density
from . import nirs
from .artifact_detection import (annotate_movement, compute_average_dev_head_t,
annotate_muscle_zscore, annotate_break)
from ._regress import regress_artifact
from ._fine_cal import (compute_fine_calibration, read_fine_calibration,
write_fine_calibration)
from .annotate_nan import annotate_nan
from .interpolate import equalize_bads
from . import ieeg
from ._css import cortical_signal_suppression
|
wmvanvliet/mne-python
|
mne/preprocessing/__init__.py
|
Python
|
bsd-3-clause
| 1,572 | 0 |
from .hub import Hub
class Client(object):
def __init__(self, mac):
self.__mac__ = mac
def hub(self, hub):
return Hub(self.__mac__, hub)
|
fancl20/pili-python
|
pili/client.py
|
Python
|
mit
| 163 | 0.006135 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import logging
from time import sleep
from sanji.core import Sanji
from sanji.connection.mqtt import Mqtt
REQ_RESOURCE = "/system/firmware"
class View(Sanji):
# This function will be executed after registered.
def run(self):
for count in xrange(0, 100, 1):
# Normal CRUD Operation
# self.publish.[get, put, delete, post](...)
# One-to-One Messaging
# self.publish.direct.[get, put, delete, post](...)
# (if block=True return Message, else return mqtt mid number)
# Agruments
# (resource[, data=None, block=True, timeout=60])
# case 1: test GET
print "GET %s" % REQ_RESOURCE
res = self.publish.get(REQ_RESOURCE)
if res.code != 200:
print "GET is supported, code 200 is expected"
print res.to_json()
self.stop()
# case 2: test PUT with no data
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, None)
if res.code != 400:
print "data is required, code 400 is expected"
print res.to_json()
self.stop()
# case 3: test PUT with empty data (no required attributes)
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={})
if res.code != 400:
print "data.reset, data.server, or data.upgrade is required," \
" code 400 is expected"
print res.to_json()
self.stop()
# case 4: test PUT with reset=0
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={"reset": 0})
if res.code != 200:
print "data.reset=0 should reply code 200"
print res.to_json()
self.stop()
# case 5: test PUT with reset=1 (setdef)
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={"reset": 1})
if res.code != 200:
print "data.reset=1 should reply code 200 and cause setdef"
print res.to_json()
# case 6: test PUT with server="something"
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE,
data={"server": "test.server"})
if res.code != 200:
print "data.reset=0 should reply code 200"
print res.to_json()
self.stop()
print "GET %s" % REQ_RESOURCE
res = self.publish.get(REQ_RESOURCE)
if res.code != 200:
print "GET is supported, code 200 is expected"
print res.to_json()
self.stop()
elif "test.server" != res.data["server"]:
print "PUT failed, server (%s) should be \"test.server\"" \
% res.data["server"]
self.stop()
# case 7: test PUT with upgrade=0
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={"upgrade": 0})
if res.code != 200:
print "data.upgrade=0 should reply code 200"
print res.to_json()
self.stop()
# case 8: test PUT with upgrade=1 (upgradehfm)
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={"upgrade": 1})
if res.code != 200:
print "data.upgrade=1 should reply code 200 and cause" \
"upgradehfm"
print res.to_json()
# stop the test view
self.stop()
if __name__ == "__main__":
FORMAT = "%(asctime)s - %(levelname)s - %(lineno)s - %(message)s"
logging.basicConfig(level=0, format=FORMAT)
logger = logging.getLogger("Firmware")
view = View(connection=Mqtt())
view.start()
|
Sanji-IO/sanji-firmware
|
tests/test_e2e/view_firmware.py
|
Python
|
gpl-2.0
| 4,216 | 0 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for api module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import gc
import imp
import os
import re
import textwrap
import types
import numpy as np
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.layers import core
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.util import function_utils
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
tf = utils.fake_tf()
global_n = 2
class TestResource(object):
def __init__(self):
self.x = 3
class ApiTest(test.TestCase):
@test_util.run_deprecated_v1
def test_decorator_recursive(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
@test_util.run_deprecated_v1
def test_decorator_not_recursive(self):
class TestClass(object):
def called_member(self, a):
return tf.negative(a)
@api.convert(recursive=False)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
@test_util.run_deprecated_v1
def test_convert_then_do_not_convert(self):
class TestClass(object):
@api.do_not_convert
def called_member(self, a):
return tf.negative(a)
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant((2, 4)), constant_op.constant(1),
constant_op.constant(-2))
self.assertAllEqual((0, 1), self.evaluate(x))
@test_util.run_deprecated_v1
def test_decorator_calls_decorated(self):
class TestClass(object):
@api.convert()
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
def test_decorator_preserves_argspec(self):
class TestClass(object):
def test_method(self, a):
if a < 0:
a = -a
return a
test_method_converted = api.convert()(test_method)
tc = TestClass()
self.assertListEqual(
list(tf_inspect.getfullargspec(tc.test_method)),
list(tf_inspect.getfullargspec(tc.test_method_converted)))
def test_do_not_convert_argspec(self):
class TestClass(object):
def test_method(self, x, y):
z = x + y
return z
test_method_whitelisted = api.do_not_convert(test_method)
tc = TestClass()
self.assertTrue(tf_inspect.ismethod(tc.test_method_whitelisted))
# Because the wrapped function is not generated, we can't preserve its
# arg spec.
self.assertEqual((),
tuple(function_utils.fn_args(tc.test_method_whitelisted)))
def test_do_not_convert_callable_object(self):
class TestClass(object):
def __call__(self):
return 1
tc = TestClass()
self.assertEqual(1, api.do_not_convert(tc)())
@test_util.run_deprecated_v1
def test_convert_call_site_decorator(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= api.converted_call(self.called_member,
converter.ConversionOptions(recursive=True),
(a,), {})
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
def test_converted_call_builtin(self):
x = api.converted_call(range, converter.ConversionOptions(recursive=True),
(3,), {})
self.assertEqual((0, 1, 2), tuple(x))
x = api.converted_call(re.compile,
converter.ConversionOptions(recursive=True),
('mnas_v4_a.*\\/.*(weights|kernel):0$',), {})
self.assertIsNotNone(x.match('mnas_v4_a/weights:0'))
def test_converted_call_function(self):
def test_fn(x):
if x < 0:
return -x
return x
x = api.converted_call(test_fn, converter.ConversionOptions(recursive=True),
(constant_op.constant(-1),), {})
self.assertEqual(1, self.evaluate(x))
@test_util.run_v1_only('b/120545219')
def test_converted_call_functools_partial(self):
def test_fn(x, y, z):
if x < 0:
return -x, -y, -z
return x, y, z
x = api.converted_call(
functools.partial(test_fn, constant_op.constant(-1), z=-3),
converter.ConversionOptions(recursive=True),
(constant_op.constant(-2),), {})
self.assertEqual((1, 2, 3), self.evaluate(x))
x = api.converted_call(
functools.partial(
functools.partial(test_fn, constant_op.constant(-1)), z=-3),
converter.ConversionOptions(recursive=True),
(constant_op.constant(-2),), {})
self.assertEqual((1, 2, 3), self.evaluate(x))
def test_converted_call_method(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc.test_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_synthetic_method(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_function(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
test_method = types.MethodType(test_function, tc)
x = api.converted_call(test_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_method_wrapper(self):
class TestClass(object):
def foo(self):
pass
tc = TestClass()
# `method.__get__()` returns a so-called method-wrapper.
wrapper = api.converted_call(tc.foo.__get__,
converter.ConversionOptions(recursive=True),
(tc,), {})
self.assertEqual(wrapper, tc.foo)
def test_converted_call_method_as_object_attribute(self):
class AnotherClass(object):
def __init__(self):
self.another_class_attr = constant_op.constant(1)
def method(self):
if self.another_class_attr > 0:
return self.another_class_attr + 1
return self.another_class_attr + 10
class TestClass(object):
def __init__(self, another_obj_method):
self.another_obj_method = another_obj_method
obj = AnotherClass()
tc = TestClass(obj.method)
x = api.converted_call(tc.another_obj_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(self.evaluate(x), 2)
def test_converted_call_method_converts_recursively(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def other_method(self):
if self.x < 0:
return -self.x
return self.x
def test_method(self):
return self.other_method()
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc.test_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_method_by_class(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(TestClass.test_method,
converter.ConversionOptions(recursive=True), (tc,),
{})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_callable_object(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def __call__(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc, converter.ConversionOptions(recursive=True), (),
{})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_callable_metaclass(self):
class TestMetaclass(type):
x = constant_op.constant(-1)
def __call__(cls):
if cls.x < 0:
cls.x = -cls.x
return cls
tc = TestMetaclass('TestClass', (), {})
# This functools.partial will hide the class form the constructor
# check. Not ideal. See b/120224672.
tc = functools.partial(tc)
converted_tc = api.converted_call(
tc, converter.ConversionOptions(recursive=True), (), {})
self.assertIsInstance(converted_tc, TestMetaclass)
self.assertEqual(1, self.evaluate(converted_tc.x))
@test_util.run_deprecated_v1
def test_converted_call_constructor(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = api.converted_call(TestClass,
converter.ConversionOptions(recursive=True),
(constant_op.constant(-1),), {})
# tc is still a TestClass - constructors are whitelisted.
# TODO(b/124016764): Support this use case.
# The error below is specific to the `if` statement not being converted.
with self.assertRaises(TypeError):
tc.test_method()
def test_converted_call_mangled_properties(self):
class TestClass(object):
def __init__(self, x):
self.__private = x
def test_method(self):
if self.__private < 0:
return self.__private
return self.__private
tc = TestClass(constant_op.constant(-1))
# The error below is specific to the `if` statement not being converted.
with self.assertRaisesRegex(NotImplementedError, 'Mangled names'):
api.converted_call(tc.test_method,
converter.ConversionOptions(recursive=True), (), {})
tc.test_method()
def test_converted_call_already_converted(self):
def f(x):
return x == 0
x = api.converted_call(f, converter.ConversionOptions(recursive=True),
(constant_op.constant(0),), {})
self.assertTrue(self.evaluate(x))
converted_f = api.to_graph(
f, experimental_optional_features=converter.Feature.ALL)
x = api.converted_call(converted_f,
converter.ConversionOptions(recursive=True),
(constant_op.constant(0),), {})
self.assertTrue(self.evaluate(x))
def test_converted_call_then_already_converted_dynamic(self):
@api.convert()
def g(x):
if x > 0:
return x
else:
return -x
def f(g, x):
return g(x)
x = api.converted_call(f, converter.ConversionOptions(recursive=True),
(g, constant_op.constant(1)), {})
self.assertEqual(self.evaluate(x), 1)
def test_converted_call_forced_when_explicitly_whitelisted(self):
@api.do_not_convert()
def f(x):
return x + 1
x = api.converted_call(
f, converter.ConversionOptions(recursive=True, user_requested=True),
(constant_op.constant(0),), {})
self.assertTrue(self.evaluate(x))
converted_f = api.to_graph(
f, experimental_optional_features=converter.Feature.ALL)
x = api.converted_call(converted_f,
converter.ConversionOptions(recursive=True), (0,),
{})
self.assertEqual(x, 1)
@test_util.run_deprecated_v1
def test_converted_call_no_user_code(self):
def f(x):
return len(x)
opts = converter.ConversionOptions(internal_convert_user_code=False)
# f should not be converted, causing len to error out.
with self.assertRaisesRegexp(Exception, 'len is not well defined'):
api.converted_call(f, opts, (constant_op.constant([0]),), {})
# len on the other hand should work fine.
x = api.converted_call(len, opts, (constant_op.constant([0]),), {})
# The constant has static shape so the result is a primitive not a Tensor.
self.assertEqual(x, 1)
def test_converted_call_no_kwargs_allowed(self):
def f(*args):
# Note: np.broadcast rejects any **kwargs, even *{}
return np.broadcast(args[:1])
opts = converter.ConversionOptions(internal_convert_user_code=False)
self.assertIsNotNone(api.converted_call(f, opts, (1, 2, 3, 4), None))
def test_converted_call_whitelisted_method(self):
opts = converter.ConversionOptions(recursive=True)
model = sequential.Sequential([core.Dense(2)])
x = api.converted_call(model.call, opts, (constant_op.constant([[0.0]]),),
{'training': True})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual([[0.0, 0.0]], self.evaluate(x))
def test_converted_call_whitelisted_method_via_owner(self):
opts = converter.ConversionOptions(recursive=True)
model = sequential.Sequential([core.Dense(2)])
x = api.converted_call(model.call, opts, (constant_op.constant([[0.0]]),),
{'training': True})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual([[0.0, 0.0]], self.evaluate(x))
def test_converted_call_numpy(self):
opts = converter.ConversionOptions(recursive=True)
x = api.converted_call(np.arange, opts, (5,), {})
self.assertAllEqual(x, list(range(5)))
def test_converted_call_tf_op_forced(self):
# TODO(mdan): Add the missing level of support to LOGICAL_EXPRESSIONS.
opts = converter.ConversionOptions(
user_requested=True, optional_features=None)
x = api.converted_call(gen_math_ops.add, opts, (1, 1), {})
self.assertAllEqual(self.evaluate(x), 2)
def test_converted_call_exec_generated_code(self):
temp_mod = imp.new_module('test_module')
dynamic_code = """
def foo(x):
return x + 1
"""
exec(textwrap.dedent(dynamic_code), temp_mod.__dict__) # pylint:disable=exec-used
opts = converter.ConversionOptions(optional_features=None)
x = api.converted_call(temp_mod.foo, opts, (1,), {})
self.assertAllEqual(x, 2)
def test_converted_call_namedtuple(self):
opts = converter.ConversionOptions(recursive=True)
x = api.converted_call(collections.namedtuple, opts,
('TestNamedtuple', ('a', 'b')), {})
self.assertTrue(inspect_utils.isnamedtuple(x))
def test_converted_call_namedtuple_via_collections(self):
opts = converter.ConversionOptions(recursive=True)
x = api.converted_call(collections.namedtuple, opts,
('TestNamedtuple', ('a', 'b')), {})
self.assertTrue(inspect_utils.isnamedtuple(x))
def test_converted_call_namedtuple_subclass_bound_method(self):
class TestClass(collections.namedtuple('TestNamedtuple', ('a', 'b'))):
def test_method(self, x):
while tf.reduce_sum(x) > self.a:
x //= self.b
return x
opts = converter.ConversionOptions(recursive=True)
obj = TestClass(5, 2)
x = api.converted_call(obj.test_method, opts,
(constant_op.constant([2, 4]),), {})
self.assertAllEqual(self.evaluate(x), [1, 2])
def test_converted_call_namedtuple_method(self):
class TestClass(collections.namedtuple('TestNamedtuple', ('a', 'b'))):
pass
opts = converter.ConversionOptions(recursive=True)
obj = TestClass(5, 2)
# _asdict is a documented method of namedtuple.
x = api.converted_call(obj._asdict, opts, (), {})
self.assertDictEqual(x, {'a': 5, 'b': 2})
def test_converted_call_namedtuple_subclass_unbound_method(self):
class TestClass(collections.namedtuple('TestNamedtuple', ('a', 'b'))):
def test_method(self, x):
while tf.reduce_sum(x) > self.a:
x //= self.b
return x
opts = converter.ConversionOptions(recursive=True)
obj = TestClass(5, 2)
x = api.converted_call(TestClass.test_method, opts,
(obj, constant_op.constant([2, 4])), {})
self.assertAllEqual(self.evaluate(x), [1, 2])
def test_converted_call_lambda(self):
opts = converter.ConversionOptions(recursive=True)
l = lambda x: x == 0
x = api.converted_call(l, opts, (constant_op.constant(0),), {})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(True, self.evaluate(x))
def test_converted_call_defun_object_method(self):
opts = converter.ConversionOptions(recursive=True)
# pylint:disable=method-hidden
class TestClass(object):
def method(self):
return 1
def prepare(self):
self.method = function.defun(self.method)
# pylint:enable=method-hidden
tc = TestClass()
tc.prepare()
x = api.converted_call(tc.method, opts, (), {})
self.assertAllEqual(1, self.evaluate(x))
def test_converted_call_through_tf_dataset(self):
def other_fn(x):
if x > 0:
return x
return -x
def f():
return dataset_ops.Dataset.range(-3, 3).map(other_fn)
# Dataset iteration only works inside tf.
@def_function.function
def graph_fn():
opts = converter.ConversionOptions(recursive=True)
ds = api.converted_call(f, opts, (), {})
itr = iter(ds)
return next(itr), next(itr), next(itr)
self.assertAllEqual(self.evaluate(graph_fn()), (3, 2, 1))
def assertNoMemoryLeaks(self, f):
object_ids_before = {id(o) for o in gc.get_objects()}
f()
gc.collect()
objects_after = tuple(
o for o in gc.get_objects() if id(o) not in object_ids_before)
self.assertEmpty(
tuple(o for o in objects_after if isinstance(o, TestResource)))
def test_converted_call_no_leaks_via_closure(self):
def test_fn():
res = TestResource()
def f(y):
return res.x + y
opts = converter.ConversionOptions(recursive=True)
api.converted_call(f, opts, (1,), {})
self.assertNoMemoryLeaks(test_fn)
def test_converted_call_no_leaks_via_inner_function_closure(self):
def test_fn():
res = TestResource()
def f(y):
def inner_f():
return res.x + y
return inner_f
opts = converter.ConversionOptions(recursive=True)
api.converted_call(f, opts, (1,), {})()
self.assertNoMemoryLeaks(test_fn)
def test_context_tracking_direct_calls(self):
@api.do_not_convert()
def unconverted_fn():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.DISABLED)
@api.convert()
def converted_fn():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.ENABLED)
unconverted_fn()
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.ENABLED)
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.UNSPECIFIED)
converted_fn()
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.UNSPECIFIED)
@api.call_with_unspecified_conversion_status
def unspecified_fn():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.UNSPECIFIED)
unspecified_fn()
def test_to_graph_basic(self):
def test_fn(x, s):
while tf.reduce_sum(x) > s:
x //= 2
return x
compiled_fn = api.to_graph(test_fn)
with tf.Graph().as_default():
x = compiled_fn(constant_op.constant((4, 8)), 4)
self.assertAllEqual(self.evaluate(x), (1, 2))
@test_util.run_deprecated_v1
def test_to_graph_with_defaults(self):
foo = 4
def test_fn(x, s=foo):
while tf.reduce_sum(x) > s:
x //= 2
return x
compiled_fn = api.to_graph(test_fn)
with self.cached_session() as sess:
x = compiled_fn(constant_op.constant([4, 8]))
self.assertListEqual([1, 2], self.evaluate(x).tolist())
def test_to_graph_with_globals(self):
def test_fn(x):
global global_n
global_n = x + global_n
return global_n
converted_fn = api.to_graph(test_fn)
prev_val = global_n
converted_fn(10)
self.assertGreater(global_n, prev_val)
def test_to_graph_with_kwargs_clashing_converted_call(self):
def called_fn(**kwargs):
return kwargs['f'] + kwargs['owner']
def test_fn():
# These arg names intentionally match converted_call's
return called_fn(f=1, owner=2)
compiled_fn = api.to_graph(test_fn)
self.assertEqual(compiled_fn(), 3)
def test_to_graph_with_kwargs_clashing_unconverted_call(self):
@api.do_not_convert
def called_fn(**kwargs):
return kwargs['f'] + kwargs['owner']
def test_fn():
# These arg names intentionally match _call_unconverted's
return called_fn(f=1, owner=2)
compiled_fn = api.to_graph(test_fn)
self.assertEqual(compiled_fn(), 3)
def test_to_graph_caching(self):
def test_fn(x):
if x > 0:
return x
else:
return -x
converted_functions = tuple(api.to_graph(test_fn) for _ in (-1, 0, 1))
# All outputs are from the same module. We can't use __module__ because
# that's reset when we instantiate the function (see conversion.py).
# TODO(mdan): Can and should we overwrite __module__ instead?
module_names = frozenset(f.ag_module for f in converted_functions)
self.assertEqual(len(module_names), 1)
self.assertNotIn('__main__', module_names)
self.assertEqual(len(frozenset(id(f) for f in converted_functions)), 3)
def test_to_graph_caching_different_options(self):
def called_fn():
pass
def test_fn():
return called_fn()
converted_recursive = api.to_graph(test_fn, recursive=True)
converted_non_recursive = api.to_graph(test_fn, recursive=False)
self.assertNotEqual(converted_recursive.ag_module,
converted_non_recursive.ag_module)
self.assertRegex(tf_inspect.getsource(converted_recursive),
'FunctionScope(.*recursive=True.*)')
self.assertRegex(tf_inspect.getsource(converted_non_recursive),
'FunctionScope(.*recursive=False.*)')
def test_to_graph_preserves_bindings(self):
y = 3
def test_fn():
return y
converted = api.to_graph(test_fn)
self.assertEqual(converted(), 3)
y = 7
self.assertEqual(converted(), 7)
def test_to_graph_source_map(self):
def test_fn(y):
return y**2
self.assertTrue(hasattr(api.to_graph(test_fn), 'ag_source_map'))
def test_to_graph_sets_conversion_context(self):
def g():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.ENABLED)
return 0
# Note: the autograph=False sets the contect to Status.DISABLED. The test
# verifies that to_graph overrides that.
@def_function.function(autograph=False)
def f():
converted_g = api.to_graph(g)
converted_g()
f()
def test_to_code_basic(self):
def test_fn(x, s):
while tf.reduce_sum(x) > s:
x /= 2
return x
# Just check that the output is parseable Python code.
self.assertIsNotNone(parser.parse_str(api.to_code(test_fn)))
def test_to_code_with_wrapped_function(self):
@def_function.function
def test_fn(x, s):
while tf.reduce_sum(x) > s:
x /= 2
return x
with self.assertRaisesRegex(Exception, 'try passing.*python_function'):
api.to_code(test_fn)
def test_tf_convert_direct(self):
def f():
if tf.reduce_sum([1, 2]) > 0:
return -1
return 1
# Note: the autograph setting of tf.function has nothing to do with the
# test case. We just disable it to avoid confusion.
@def_function.function(autograph=False)
def test_fn(ctx):
return api.tf_convert(f, ctx)()
self.assertEqual(
self.evaluate(
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.ENABLED))), -1)
with self.assertRaisesRegex(TypeError, 'tf.Tensor.*bool'):
# The code in `f` is only valid with AutoGraph.
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.DISABLED))
def test_tf_convert_unspecified_not_converted_by_default(self):
def f():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.UNSPECIFIED)
if tf.reduce_sum([1, 2]) > 0:
return -1
return 1
@def_function.function
def test_fn(ctx):
return api.tf_convert(f, ctx, convert_by_default=False)()
with self.assertRaisesRegex(TypeError, 'tf.Tensor.*bool'):
# The code in `f` is only valid with AutoGraph.
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.UNSPECIFIED))
def test_tf_convert_whitelisted_method(self):
model = sequential.Sequential([core.Dense(2)])
converted_call = api.tf_convert(
model.call, ag_ctx.ControlStatusCtx(status=ag_ctx.Status.ENABLED))
_, converted_target = tf_decorator.unwrap(converted_call)
self.assertIs(converted_target.__func__, model.call.__func__)
def test_tf_convert_wrapped(self):
def f():
if tf.reduce_sum([1, 2]) > 0:
return -1
return 1
@functools.wraps(f)
def wrapper(*args, **kwargs):
return wrapper.__wrapped__(*args, **kwargs)
decorated_f = tf_decorator.make_decorator(f, wrapper)
# Note: the autograph setting of tf has nothing to do with the
# test case. We just disable it to avoid confusion.
@def_function.function(autograph=False)
def test_fn(ctx):
return api.tf_convert(decorated_f, ctx)()
self.assertEqual(
self.evaluate(
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.ENABLED))), -1)
# tf_convert mutates the decorator, so we need to create a new one for
# another test.
decorated_f = tf_decorator.make_decorator(f, wrapper)
with self.assertRaisesRegex(TypeError, 'tf.Tensor.*bool'):
# The code in `f` is only valid with AutoGraph.
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.DISABLED))
def test_super_with_one_arg(self):
test_case_self = self
class TestBase(object):
def plus_three(self, x):
return x + 3
class TestSubclass(TestBase):
def plus_three(self, x):
test_case_self.fail('This should never be called.')
def one_arg(self, x):
test_base_unbound = super(TestSubclass)
test_base = test_base_unbound.__get__(self, TestSubclass)
return test_base.plus_three(x)
tc = api.converted_call(TestSubclass,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(5, tc.one_arg(2))
def test_super_with_two_args(self):
test_case_self = self
class TestBase(object):
def plus_three(self, x):
return x + 3
class TestSubclass(TestBase):
def plus_three(self, x):
test_case_self.fail('This should never be called.')
def two_args(self, x):
return super(TestSubclass, self).plus_three(x)
tc = api.converted_call(TestSubclass,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(5, tc.two_args(2))
if __name__ == '__main__':
os.environ['AUTOGRAPH_STRICT_CONVERSION'] = '1'
test.main()
|
chemelnucfin/tensorflow
|
tensorflow/python/autograph/impl/api_test.py
|
Python
|
apache-2.0
| 30,339 | 0.008141 |
import os
from conan.tools.files.files import save_toolchain_args
from conan.tools.gnu import Autotools
from conans.test.utils.mocks import ConanFileMock
from conans.test.utils.test_files import temp_folder
def test_source_folder_works():
folder = temp_folder()
os.chdir(folder)
save_toolchain_args({
"configure_args": "-foo bar",
"make_args": ""}
)
conanfile = ConanFileMock()
conanfile.folders.set_base_install(folder)
sources = "/path/to/sources"
conanfile.folders.set_base_source(sources)
autotools = Autotools(conanfile)
autotools.configure(build_script_folder="subfolder")
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/subfolder/configure" -foo bar'
autotools.configure()
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/configure" -foo bar'
|
conan-io/conan
|
conans/test/unittests/tools/gnu/autotools_test.py
|
Python
|
mit
| 857 | 0.002334 |
from controlscript import *
print "This is a simple control script. It just does nothing and exits successfully."
print "Start parameter is %s, additional parameters are %s" % (start, arguments)
class DoNothing(ControlAction):
""" Control script action for exiting with error 1 on stop """
def __init__(self):
ControlAction.__init__(self, "Do nothing")
def start(self):
print "Do nothing on start"
print
def stop(self):
print "Do nothing on stop"
print
ControlScript([
DoNothing()
])
|
remybaranx/qtaste
|
Testbeds/ControlScripts/do_nothing.py
|
Python
|
gpl-3.0
| 507 | 0.027613 |
import re
import Queue
import HTMLParser
import dateutil.parser as parser
class TwitterData:
#Twitter Datum properties
tweet_fields_list = ['id', 'user_id', 'in_reply_to_status_id', 'in_reply_to_user_id', 'favorited', 'retweeted', 'retweet_count', 'lang', 'created_at']
tweet_text_fields_list = ['tweet_id', 'user_id', 'text', 'geo_lat', 'geo_long', 'place_full_name', 'place_id']
tweet_url_fields_list = ['tweet_id', 'user_id', 'progressive', 'url']
tweet_hashtag_fields_list = ['tweet_id', 'user_id', 'hashtag_id']
user_fields_list = ['id', 'screen_name', 'name', 'verified', 'protected', 'followers_count', 'friends_count', 'statuses_count', 'favourites_count', 'location', 'utc_offset', 'time_zone', 'geo_enabled', 'lang', 'description', 'url', 'created_at']
#Utils for cleaning
highpoints = re.compile(u'[\U00010000-\U0010ffff]')
alphanum = re.compile(u'^[\w]+$')
def __init__(self, buffer_size):
#Queue of Twitter data records
self.tweets_queue = Queue.Queue(buffer_size)
self.tweet_texts_queue = Queue.Queue(buffer_size)
self.users_queue = Queue.Queue(buffer_size)
self.hashtags_queue = Queue.Queue()
self.urls_queue = Queue.Queue()
def contains_fields(self, data_array, fields_list, skip_list=[]):
for field in fields_list:
if not field in data_array and not field in skip_list:
return False
return True
def parse_tweet_basic_infos(self, tweet, tweet_fields_list):
tweet_record = []
user_data = tweet['user']
user_id = user_data['id']
for field in tweet_fields_list:
if field == 'user_id':
tweet_record.append(user_id)
elif field == 'created_at':
datetime = parser.parse(tweet['created_at'])
datetime = datetime.isoformat(' ')[:-6]
tweet_record.append(datetime)
elif field in tweet:
if not tweet[field]:
value = 0
else:
value = tweet[field]
tweet_record.append(value)
return tweet_record
def parse_tweet_text_infos(self, tweet, tweet_text_fields_list):
tweet_text_record = []
user_data = tweet['user']
user_id = user_data['id']
html_parser = HTMLParser.HTMLParser()
for field in tweet_text_fields_list:
if field == 'tweet_id':
tweet_text_record.append(tweet['id'])
elif field == 'user_id':
tweet_text_record.append(user_id)
elif field == 'text':
if not tweet['text']:
value = ''
else:
value = tweet['text'].strip()
value = self.highpoints.sub(u'', value)
value = html_parser.unescape(value)
tweet_text_record.append(value)
elif field == 'geo_lat':
if not tweet['coordinates']:
tweet_text_record.append(0)
else:
tweet_text_record.append(tweet['coordinates']['coordinates'][0])
elif field == 'geo_long':
if not tweet['coordinates']:
tweet_text_record.append(0)
else:
tweet_text_record.append(tweet['coordinates']['coordinates'][1])
elif field == 'place_full_name':
if not tweet['place']:
value = ''
else:
value = tweet['place']['full_name'].strip()
value = self.highpoints.sub(u'', value)
value = html_parser.unescape(value)
tweet_text_record.append(value)
elif field == 'place_id':
# http://api.twitter.com/1/geo/id/6b9ed4869788d40e.json
if not tweet['place']:
tweet_text_record.append('')
else:
tweet_text_record.append(tweet['place']['id'])
elif field in tweet:
if not tweet[field]:
value = 0
else:
value = tweet[field]
tweet_text_record.append(value)
return tweet_text_record
def parse_user_infos(self, user_data, user_fields_list):
user_record = []
#user_id = user_data['id']
html_parser = HTMLParser.HTMLParser()
for field in user_fields_list:
if field == 'created_at':
datetime = parser.parse(user_data['created_at'])
datetime = datetime.isoformat(' ')[:-6]
user_record.append(datetime)
elif field == 'lang':
if not user_data['lang']:
value = 'NN'
else:
#TODO: lang codes are longer than 2
value = user_data['lang'][:2]
user_record.append(value)
elif field == 'utc_offset':
if not user_data['utc_offset']:
user_record.append(0)
else:
user_record.append(user_data['utc_offset'])
elif field == 'url':
if not user_data['url']:
user_record.append('')
else:
value = user_data['url'][:159]
user_record.append(value)
elif field in ['description', 'name', 'location']:
if not user_data[field]:
value = ''
else:
value = user_data[field].strip()
value = self.highpoints.sub(u'', value)
value = html_parser.unescape(value)
user_record.append(value)
elif field in ['followers_count', 'friends_count', 'statuses_count', 'favourites_count']:
value = user_data[field]
if value is None or value < 0:
return None
user_record.append(value)
elif field in ['verified', 'protected', 'geo_enabled']:
user_record.append(user_data[field])
elif field in user_data:
if not user_data[field]:
value = ''
else:
value = user_data[field]
user_record.append(value)
return user_record
def enqueue_tweet_data(self, tweet):
tweet_record = []
tweet_text_record = []
user_record = []
user_data = tweet['user']
user_id = user_data['id']
tweet_record = self.parse_tweet_basic_infos(tweet, self.tweet_fields_list)
if tweet_record is None:
#logger.error("Problem parsing tweet {0} ".format(tweet['id']))
return False
tweet_text_record = self.parse_tweet_text_infos(tweet, self.tweet_text_fields_list)
if tweet_text_record is None:
#logger.error("Problem parsing text for tweet {0} ".format(tweet['id']))
return False
user_record = self.parse_user_infos(user_data, self.user_fields_list)
if user_record is None:
# logger.info("Problem parsing user {0} for tweet {1} ".format(user_id, tweet['id']))
return False
#Enqueue
self.tweets_queue.put(tweet_record)
self.tweet_texts_queue.put(tweet_text_record)
self.users_queue.put(user_record)
#To avoid duplicates
tweet_inserted_hashtags = []
if len(tweet['entities']) > 0:
if len(tweet['entities']['urls']) > 0:
url_count = 0
for url in tweet['entities']['urls']:
url_count = url_count + 1
self.urls_queue.put([tweet['id'], user_id, url_count, url['expanded_url']])
if len(tweet['entities']['hashtags']) > 0:
for hash in tweet['entities']['hashtags']:
hash_text = self.highpoints.sub(u'', hash['text'])
hash_text = hash_text.lower()
valid_hashtag = self.alphanum.match(hash_text)
if valid_hashtag and hash_text not in tweet_inserted_hashtags:
partition = ord(hash_text[0])
self.hashtags_queue.put([hash_text, partition, tweet['id'], user_id])
tweet_inserted_hashtags.append(hash_text)
return True
|
kuzeko/Twitter-Importer
|
twitter_helper/twitter_data.py
|
Python
|
mit
| 8,560 | 0.002921 |
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from celery.schedules import crontab
from sendit.settings import (
INSTALLED_APPS,
BROKER_URL
)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sendit.settings')
app = Celery('sendit',broker=BROKER_URL)
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: INSTALLED_APPS)
|
pydicom/sendit
|
sendit/celery.py
|
Python
|
mit
| 512 | 0.001953 |
# encoding: utf-8
from django.db import models
from gemeinde import settings
from mezzanine.generic.fields import KeywordsField
from mezzanine.pages.models import Page
from pytz import timezone
# from django.utils.timezone import activate
# activate()
class Gottesdienst(models.Model):
u"""Repräsentiert einen Gottesdienst an einem Tag
Besonderheiten:
Abendmahl, Taufen, Kirchencafe, Glaubensgespräch
"""
datum = models.DateTimeField()
dauer = models.IntegerField('Dauer in Minuten', default=60)
titel = models.CharField(max_length=200, blank=True)
prediger = models.CharField(max_length=50, blank=True,
help_text='Kann leer bleiben. '
'Wenn Prediger-Auswahl leer bleibt, hier Prediger eintragen.')
prediger_key = models.ForeignKey('Prediger', blank=True, null=True,
verbose_name="Prediger-Auswahl")
keywords = KeywordsField(verbose_name=u"Stichwörter")
freitext = models.TextField(null=True, blank=True)
predigttext_stelle = models.CharField("Bibelstelle des Predigttexts",
max_length=50, blank=True)
predigttext = models.TextField(null=True, blank=True)
ort = models.CharField('Ort (wenn nicht Thomaskirche)', max_length=50,
blank=True)
class Meta:
verbose_name = u'Gottesdienst'
verbose_name_plural = u'Gottesdienste'
get_latest_by = 'datum'
def __unicode__(self):
return (u'{} am {:%d.%m.%Y um %H:%M} Uhr'
.format(self.titel or u'Gottesdienst',
self.datum.astimezone(timezone(settings.TIME_ZONE))))
def save(self, *args, **kwargs):
if self.prediger_key:
self.prediger = unicode(self.prediger_key)
super(Gottesdienst, self).save(*args, **kwargs)
def prediger_name(self):
return (self.prediger_key.nachname if self.prediger_key
else self.prediger)
class Prediger(models.Model):
"""Ein Prediger, der Gottesdienste feiert"""
nachname = models.CharField(max_length=50)
vorname = models.CharField(max_length=50, blank=True)
titel = models.CharField(max_length=10, blank=True)
class Meta:
verbose_name = u'Prediger'
verbose_name_plural = u'Prediger'
def __unicode__(self):
return " ".join([p for p in
(self.titel, self.vorname, self.nachname) if p])
class Gottesdienste(Page):
class Meta:
verbose_name = 'Gottesdienst-Seite'
verbose_name_plural = 'Gottesdienst-Seiten'
# def __unicode__(self):
# pass
|
jammon/gemeinde
|
gottesdienste/models.py
|
Python
|
mit
| 2,564 | 0.007419 |
from django.http import HttpResponse
from django.template import Template
def admin_required_view(request):
if request.user.is_staff:
return HttpResponse(Template('You are an admin').render({}))
return HttpResponse(Template('Access denied').render({}))
|
bfirsh/pytest_django
|
tests/views.py
|
Python
|
bsd-3-clause
| 270 | 0.003704 |
#!/usr/bin/env python
"""
Copyright (c) 2013-2014 Ben Croston
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
"""This test suite assumes the following circuit is connected:
GND_PIN = 6
LED_PIN = 12 (with resistor to 0v)
SWITCH_PIN = 18 (with 0.1 uF capacitor around switch) to 0v
LOOP_IN = 16 connected with 1K resistor to LOOP_OUT
LOOP_OUT = 22
"""
import sys
import warnings
import time
from threading import Timer
import RPi.GPIO as GPIO
if sys.version[:3] == '2.6':
import unittest2 as unittest
else:
import unittest
GND_PIN = 6
LED_PIN = 12
LED_PIN_BCM = 18
SWITCH_PIN = 18
LOOP_IN = 16
LOOP_OUT = 22
# Test starts with 'AAA' so that it is run first
class TestAAASetup(unittest.TestCase):
def runTest(self):
# Test mode not set (BOARD or BCM) exception
with self.assertRaises(RuntimeError) as e:
GPIO.setup(LED_PIN, GPIO.OUT)
self.assertEqual(str(e.exception), 'Please set pin numbering mode using GPIO.setmode(GPIO.BOARD) or GPIO.setmode(GPIO.BCM)')
GPIO.setmode(GPIO.BOARD)
# Test not set as OUTPUT message
with self.assertRaises(RuntimeError) as e:
GPIO.output(LED_PIN, GPIO.HIGH)
self.assertEqual(str(e.exception), 'The GPIO channel has not been set up as an OUTPUT')
GPIO.setup(LED_PIN, GPIO.IN)
# Test setup(..., pull_up_down=GPIO.HIGH) raises exception
with self.assertRaises(ValueError):
GPIO.setup(LED_PIN, GPIO.IN, pull_up_down=GPIO.HIGH)
# Test 'already in use' warning
GPIO.cleanup()
with open('/sys/class/gpio/export','wb') as f:
f.write(str(LED_PIN_BCM).encode())
with open('/sys/class/gpio/gpio%s/direction'%LED_PIN_BCM,'wb') as f:
f.write(b'out')
with open('/sys/class/gpio/gpio%s/value'%LED_PIN_BCM,'wb') as f:
f.write(b'1')
with warnings.catch_warnings(record=True) as w:
GPIO.setup(LED_PIN, GPIO.OUT) # generate 'already in use' warning
self.assertEqual(w[0].category, RuntimeWarning)
with open('/sys/class/gpio/unexport','wb') as f:
f.write(str(LED_PIN_BCM).encode())
GPIO.cleanup()
# test initial value of high reads back as high
GPIO.setup(LED_PIN, GPIO.OUT, initial=GPIO.HIGH)
self.assertEqual(GPIO.input(LED_PIN), GPIO.HIGH)
GPIO.cleanup()
# test initial value of low reads back as low
GPIO.setup(LED_PIN, GPIO.OUT, initial=GPIO.LOW)
self.assertEqual(GPIO.input(LED_PIN), GPIO.LOW)
GPIO.cleanup()
class TestInputOutput(unittest.TestCase):
def test_outputread(self):
"""Test that an output() can be input()"""
GPIO.setup(LED_PIN, GPIO.OUT)
GPIO.output(LED_PIN, GPIO.HIGH)
self.assertEqual(GPIO.input(LED_PIN), GPIO.HIGH)
GPIO.output(LED_PIN, GPIO.LOW)
self.assertEqual(GPIO.input(LED_PIN), GPIO.LOW)
GPIO.cleanup()
def test_loopback(self):
"""Test output loops back to another input"""
GPIO.setup(LOOP_IN, GPIO.IN, pull_up_down=GPIO.PUD_OFF)
GPIO.setup(LOOP_OUT, GPIO.OUT, initial=GPIO.LOW)
self.assertEqual(GPIO.input(LOOP_IN), GPIO.LOW)
GPIO.output(LOOP_OUT, GPIO.HIGH)
self.assertEqual(GPIO.input(LOOP_IN), GPIO.HIGH)
GPIO.cleanup()
def test_output_on_input(self):
"""Test output() can not be done on input"""
GPIO.setup(SWITCH_PIN, GPIO.IN)
with self.assertRaises(RuntimeError):
GPIO.output(SWITCH_PIN, GPIO.LOW)
GPIO.cleanup()
class TestSoftPWM(unittest.TestCase):
def runTest(self):
GPIO.setup(LED_PIN, GPIO.OUT)
pwm = GPIO.PWM(LED_PIN, 50)
pwm.start(100)
print "\nPWM tests"
response = raw_input('Is the LED on (y/n) ? ').upper()
self.assertEqual(response,'Y')
pwm.start(0)
response = raw_input('Is the LED off (y/n) ? ').upper()
self.assertEqual(response,'Y')
print "LED Brighten/fade test..."
for i in range(0,3):
for x in range(0,101,5):
pwm.ChangeDutyCycle(x)
time.sleep(0.1)
for x in range(100,-1,-5):
pwm.ChangeDutyCycle(x)
time.sleep(0.1)
pwm.stop()
response = raw_input('Did it work (y/n) ? ').upper()
self.assertEqual(response,'Y')
GPIO.cleanup()
class TestSetWarnings(unittest.TestCase):
def test_alreadyinuse(self):
"""Test 'already in use' warning"""
GPIO.setwarnings(False)
with open('/sys/class/gpio/export','wb') as f:
f.write(str(LED_PIN_BCM).encode())
with open('/sys/class/gpio/gpio%s/direction'%LED_PIN_BCM,'wb') as f:
f.write(b'out')
with open('/sys/class/gpio/gpio%s/value'%LED_PIN_BCM,'wb') as f:
f.write(b'1')
with warnings.catch_warnings(record=True) as w:
GPIO.setup(LED_PIN, GPIO.OUT) # generate 'already in use' warning
self.assertEqual(len(w),0) # should be no warnings
with open('/sys/class/gpio/unexport','wb') as f:
f.write(str(LED_PIN_BCM).encode())
GPIO.cleanup()
GPIO.setwarnings(True)
with open('/sys/class/gpio/export','wb') as f:
f.write(str(LED_PIN_BCM).encode())
with open('/sys/class/gpio/gpio%s/direction'%LED_PIN_BCM,'wb') as f:
f.write(b'out')
with open('/sys/class/gpio/gpio%s/value'%LED_PIN_BCM,'wb') as f:
f.write(b'1')
with warnings.catch_warnings(record=True) as w:
GPIO.setup(LED_PIN, GPIO.OUT) # generate 'already in use' warning
self.assertEqual(w[0].category, RuntimeWarning)
with open('/sys/class/gpio/unexport','wb') as f:
f.write(str(LED_PIN_BCM).encode())
GPIO.cleanup()
def test_cleanupwarning(self):
"""Test initial GPIO.cleanup() produces warning"""
GPIO.setwarnings(False)
GPIO.setup(SWITCH_PIN, GPIO.IN)
with warnings.catch_warnings(record=True) as w:
GPIO.cleanup()
self.assertEqual(len(w),0) # no warnings
GPIO.cleanup()
self.assertEqual(len(w),0) # no warnings
GPIO.setwarnings(True)
GPIO.setup(SWITCH_PIN, GPIO.IN)
with warnings.catch_warnings(record=True) as w:
GPIO.cleanup()
self.assertEqual(len(w),0) # no warnings
GPIO.cleanup()
self.assertEqual(w[0].category, RuntimeWarning) # a warning
class TestVersions(unittest.TestCase):
def test_rpi_revision(self):
if GPIO.RPI_REVISION == 0:
revision = 'Compute Module'
elif GPIO.RPI_REVISION == 1:
revision = 'revision 1'
elif GPIO.RPI_REVISION == 2:
revision = 'revision 2'
elif GPIO.RPI_REVISION == 3:
revision = 'Model B+'
else:
revision = '**undetected**'
response = raw_input('\nThis board appears to be a %s - is this correct (y/n) ? '%revision).upper()
self.assertEqual(response, 'Y')
def test_gpio_version(self):
response = raw_input('\nRPi.GPIO version %s - is this correct (y/n) ? '%GPIO.VERSION).upper()
self.assertEqual(response, 'Y')
class TestGPIOFunction(unittest.TestCase):
def runTest(self):
GPIO.setmode(GPIO.BCM)
GPIO.setup(LED_PIN_BCM, GPIO.IN)
self.assertEqual(GPIO.gpio_function(LED_PIN_BCM), GPIO.IN)
GPIO.setup(LED_PIN_BCM, GPIO.OUT)
self.assertEqual(GPIO.gpio_function(LED_PIN_BCM), GPIO.OUT)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(LED_PIN, GPIO.IN)
self.assertEqual(GPIO.gpio_function(LED_PIN), GPIO.IN)
GPIO.setup(LED_PIN, GPIO.OUT)
self.assertEqual(GPIO.gpio_function(LED_PIN), GPIO.OUT)
def tearDown(self):
GPIO.cleanup()
class TestSwitchBounce(unittest.TestCase):
def __init__(self, *a, **k):
unittest.TestCase.__init__(self, *a, **k)
self.switchcount = 0
def cb(self,chan):
self.switchcount += 1
print 'Button press',self.switchcount
def setUp(self):
GPIO.setup(SWITCH_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def test_switchbounce(self):
self.switchcount = 0
print "\nSwitch bounce test. Press switch at least 10 times and count..."
GPIO.add_event_detect(SWITCH_PIN, GPIO.FALLING, callback=self.cb, bouncetime=200)
while self.switchcount < 10:
time.sleep(1)
GPIO.remove_event_detect(SWITCH_PIN)
def test_event_detected(self):
self.switchcount = 0
print "\nGPIO.event_detected() switch bounce test. Press switch at least 10 times and count..."
GPIO.add_event_detect(SWITCH_PIN, GPIO.FALLING, bouncetime=200)
while self.switchcount < 10:
if GPIO.event_detected(SWITCH_PIN):
self.switchcount += 1
print 'Button press',self.switchcount
GPIO.remove_event_detect(SWITCH_PIN)
def tearDown(self):
GPIO.cleanup()
class TestEdgeDetection(unittest.TestCase):
def setUp(self):
GPIO.setup(LOOP_IN, GPIO.IN)
GPIO.setup(LOOP_OUT, GPIO.OUT)
def testWaitForEdgeWithCallback(self):
def cb():
raise Exception("Callback should not be called")
def makehigh():
GPIO.output(LOOP_OUT, GPIO.HIGH)
GPIO.output(LOOP_OUT, GPIO.LOW)
t = Timer(0.1, makehigh)
GPIO.add_event_detect(LOOP_IN, GPIO.RISING)
t.start()
GPIO.wait_for_edge(LOOP_IN, GPIO.RISING)
GPIO.output(LOOP_OUT, GPIO.LOW)
GPIO.add_event_callback(LOOP_IN, callback=cb)
with self.assertRaises(RuntimeError):
GPIO.wait_for_edge(LOOP_IN, GPIO.RISING)
GPIO.remove_event_detect(LOOP_IN)
def testWaitForEventSwitchbounce(self):
def bounce():
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.01)
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.01)
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.01)
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.2)
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.01)
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.01)
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.01)
GPIO.output(LOOP_OUT, GPIO.LOW)
GPIO.output(LOOP_OUT, GPIO.LOW)
t1 = Timer(0.1, bounce)
t1.start()
starttime = time.time()
GPIO.wait_for_edge(LOOP_IN, GPIO.RISING, bouncetime=100)
GPIO.wait_for_edge(LOOP_IN, GPIO.RISING, bouncetime=100)
finishtime = time.time()
self.assertGreater(finishtime-starttime, 0.2)
def testInvalidBouncetime(self):
with self.assertRaises(ValueError):
GPIO.add_event_detect(LOOP_IN, GPIO.RISING, bouncetime=-1)
with self.assertRaises(ValueError):
GPIO.wait_for_edge(LOOP_IN, GPIO.RISING, bouncetime=-1)
GPIO.add_event_detect(LOOP_IN, GPIO.RISING, bouncetime=123)
with self.assertRaises(RuntimeError):
GPIO.wait_for_edge(LOOP_IN, GPIO.RISING, bouncetime=321)
GPIO.remove_event_detect(LOOP_IN)
def testAlreadyAdded(self):
GPIO.add_event_detect(LOOP_IN, GPIO.RISING)
with self.assertRaises(RuntimeError):
GPIO.add_event_detect(LOOP_IN, GPIO.RISING)
with self.assertRaises(RuntimeError):
GPIO.wait_for_edge(LOOP_IN, GPIO.FALLING)
GPIO.remove_event_detect(LOOP_IN)
def testHighLowEvent(self):
with self.assertRaises(ValueError):
GPIO.add_event_detect(LOOP_IN, GPIO.LOW)
with self.assertRaises(ValueError):
GPIO.add_event_detect(LOOP_IN, GPIO.HIGH)
def testFallingEventDetected(self):
GPIO.output(LOOP_OUT, GPIO.HIGH)
GPIO.add_event_detect(LOOP_IN, GPIO.FALLING)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), False)
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), True)
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), False)
GPIO.remove_event_detect(LOOP_IN)
def testRisingEventDetected(self):
GPIO.output(LOOP_OUT, GPIO.LOW)
GPIO.add_event_detect(LOOP_IN, GPIO.RISING)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), False)
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), True)
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), False)
GPIO.remove_event_detect(LOOP_IN)
def testBothEventDetected(self):
GPIO.output(LOOP_OUT, GPIO.LOW)
GPIO.add_event_detect(LOOP_IN, GPIO.BOTH)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), False)
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), True)
self.assertEqual(GPIO.event_detected(LOOP_IN), False)
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.01)
self.assertEqual(GPIO.event_detected(LOOP_IN), True)
GPIO.remove_event_detect(LOOP_IN)
def testWaitForRising(self):
def makehigh():
GPIO.output(LOOP_OUT, GPIO.HIGH)
GPIO.output(LOOP_OUT, GPIO.LOW)
t = Timer(0.1, makehigh)
t.start()
GPIO.wait_for_edge(LOOP_IN, GPIO.RISING)
def testWaitForFalling(self):
def makelow():
GPIO.output(LOOP_OUT, GPIO.LOW)
GPIO.output(LOOP_OUT, GPIO.HIGH)
t = Timer(0.1, makelow)
t.start()
GPIO.wait_for_edge(LOOP_IN, GPIO.FALLING)
def testExceptionInCallback(self):
self.run_cb = False
def cb(channel):
with self.assertRaises(ZeroDivisionError):
self.run_cb = True
a = 1/0
GPIO.output(LOOP_OUT, GPIO.LOW)
GPIO.add_event_detect(LOOP_IN, GPIO.RISING, callback=cb)
time.sleep(0.01)
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.01)
self.assertEqual(self.run_cb, True)
GPIO.remove_event_detect(LOOP_IN)
def testAddEventCallback(self):
def cb(channel):
self.callback_count += 1
# falling test
self.callback_count = 0
GPIO.output(LOOP_OUT, GPIO.HIGH)
GPIO.add_event_detect(LOOP_IN, GPIO.FALLING)
GPIO.add_event_callback(LOOP_IN, cb)
time.sleep(0.01)
for i in range(2048):
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.001)
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.001)
GPIO.remove_event_detect(LOOP_IN)
self.assertEqual(self.callback_count, 2048)
# rising test
self.callback_count = 0
GPIO.output(LOOP_OUT, GPIO.LOW)
GPIO.add_event_detect(LOOP_IN, GPIO.RISING, callback=cb)
time.sleep(0.01)
for i in range(2048):
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.001)
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.001)
GPIO.remove_event_detect(LOOP_IN)
self.assertEqual(self.callback_count, 2048)
# both test
self.callback_count = 0
GPIO.output(LOOP_OUT, GPIO.LOW)
GPIO.add_event_detect(LOOP_IN, GPIO.BOTH, callback=cb)
time.sleep(0.01)
for i in range(2048):
GPIO.output(LOOP_OUT, GPIO.HIGH)
time.sleep(0.001)
GPIO.output(LOOP_OUT, GPIO.LOW)
time.sleep(0.001)
GPIO.remove_event_detect(LOOP_IN)
self.assertEqual(self.callback_count, 4096)
def testEventOnOutput(self):
with self.assertRaises(RuntimeError):
GPIO.add_event_detect(LOOP_OUT, GPIO.FALLING)
def tearDown(self):
GPIO.cleanup()
class TestCleanup(unittest.TestCase):
def test_cleanall(self):
GPIO.setup(LOOP_OUT, GPIO.OUT)
GPIO.setup(LED_PIN, GPIO.OUT)
self.assertEqual(GPIO.gpio_function(LOOP_OUT), GPIO.OUT)
self.assertEqual(GPIO.gpio_function(LED_PIN), GPIO.OUT)
GPIO.cleanup()
self.assertEqual(GPIO.gpio_function(LOOP_OUT), GPIO.IN)
self.assertEqual(GPIO.gpio_function(LED_PIN), GPIO.IN)
def test_cleanone(self):
GPIO.setup(LOOP_OUT, GPIO.OUT)
GPIO.setup(LED_PIN, GPIO.OUT)
self.assertEqual(GPIO.gpio_function(LOOP_OUT), GPIO.OUT)
self.assertEqual(GPIO.gpio_function(LED_PIN), GPIO.OUT)
GPIO.cleanup(LOOP_OUT)
self.assertEqual(GPIO.gpio_function(LOOP_OUT), GPIO.IN)
self.assertEqual(GPIO.gpio_function(LED_PIN), GPIO.OUT)
GPIO.cleanup(LED_PIN)
self.assertEqual(GPIO.gpio_function(LOOP_OUT), GPIO.IN)
self.assertEqual(GPIO.gpio_function(LED_PIN), GPIO.IN)
#def test_suite():
# suite = unittest.TestLoader().loadTestsFromModule()
# return suite
if __name__ == '__main__':
unittest.main()
|
NeoBelerophon/Arietta.GPIO
|
test/test.py
|
Python
|
mit
| 18,250 | 0.003671 |
from setuptools import setup, find_packages
model_blocks = __import__('model_blocks')
readme_file = 'README.rst'
try:
long_description = open(readme_file).read()
except IOError, err:
sys.stderr.write("[ERROR] Cannot find file specified as "
"``long_description`` (%s)\n" % readme_file)
sys.exit(1)
setup(name='django-model-blocks',
version='0.8.9',
description=('Simple filters and tags for generic Django '
'model template partials'),
long_description=long_description,
zip_safe=False,
author='Mjumbe Wawatu Ukweli',
author_email='mjumbewu@kwawatu.com',
url='https://github.com/mjumbewu/django-model-blocks/',
download_url='https://github.com/mjumbewu/django-model-blocks/downloads',
packages = find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
install_requires = [
'Django>=1.2.1',
],
obsoletes = [
'model_filters',
],
provides = [
'model_blocks',
],
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
mjumbewu/django-model-blocks
|
setup.py
|
Python
|
bsd-3-clause
| 1,497 | 0.008684 |
"""gui systems to manage actions
"""
import os
from sftoolbox.content import ActionContent, PanelContent
from sftoolboxqt import qtgui, qtcore
from sftoolboxqt.tree import PanelsModel, PanelsTreeWidget
class ActionsTreeWidget(qtgui.QTreeWidget):
"""tree widget holding actions
"""
def startDrag(self, dropAction):
# create mime data object
mime = qtcore.QMimeData()
mime.setData('application/x-item', '???')
# start drag
drag = qtgui.QDrag(self)
drag.setMimeData(mime)
# drag.start(qtcore.Qt.CopyAction)
# drag.start(qtcore.Qt.CopyAction)
drag.exec_(dropAction, qtcore.Qt.MoveAction)
class PanelsWidget(qtgui.QWidget):
"""browser for panels
"""
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(PanelsWidget, self).__init__(parent=parent)
self.setWindowTitle('Panels Browser')
self._project = project
self._tree_model = PanelsModel(project)
self._tree = self._create_panels_tree_widget(self._tree_model)
layout = qtgui.QVBoxLayout()
layout.addWidget(self._tree)
self.setLayout(layout)
def _create_panels_tree_widget(self, model):
"""return tree widget that will contain the actions
"""
tree = PanelsTreeWidget()
tree.setModel(model)
tree.setSortingEnabled(True)
tree.setDragEnabled(True)
tree.setAcceptDrops(True)
return tree
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._tree_model.project = value
class ActionsWidget(qtgui.QWidget):
"""browser system for browsing trough the actions
"""
def _create_actions_tree_widget(self):
"""return tree widget that will contain the actions
"""
tree = ActionsTreeWidget()
tree.setHeaderLabels(['Action', 'IDName', 'Tags'])
tree.setSortingEnabled(True)
tree.setDragEnabled(True)
return tree
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(ActionsWidget, self).__init__(parent=parent)
self.setWindowTitle('Actions Browser')
self._project = project
self._tree_widget = self._create_actions_tree_widget()
layout = qtgui.QVBoxLayout()
layout.addWidget(self._tree_widget)
self.setLayout(layout)
layout.addWidget(self._tree_widget)
self._refresh_content()
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._refresh_content()
def _handle_item_double_clicked(self, item):
"""handle doubleclicking item
"""
item.action.run()
def _refresh_content(self):
"""refresh the content
"""
self._tree_widget.clear()
self._tree_widget.itemDoubleClicked.connect(
self._handle_item_double_clicked)
if not self.project:
return
for action in self.project.actions:
item = qtgui.QTreeWidgetItem()
icon_filepath = action.absolute_icon_filepath
if icon_filepath and os.path.exists(icon_filepath):
item.setIcon(0, qtgui.QIcon(icon_filepath))
item.setText(0, action.human_label)
item.setText(1, action.idname)
item.setText(2, ', '.join(map(str, action.tags)))
item.action = action
self._tree_widget.addTopLevelItem(item)
class EditorWidget(qtgui.QWidget):
""""""
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(EditorWidget, self).__init__(parent=parent)
self.setWindowTitle('Editor')
self._actions_widget = ActionsWidget(project)
self._panels_widget = PanelsWidget(project)
layout = qtgui.QHBoxLayout()
splitter = qtgui.QSplitter(qtcore.Qt.Horizontal)
splitter.addWidget(self._panels_widget)
splitter.addWidget(self._actions_widget)
layout.addWidget(splitter)
self.setLayout(layout)
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._actions_widget.project = value
self._panels_widget.project = value
def sizeHint(self):
return qtcore.QSize(900, 800)
|
svenfraeys/sftoolbox
|
sftoolboxqt/editor.py
|
Python
|
mit
| 4,569 | 0 |
from app import setup
app = setup()
|
OniOni/ril
|
wsgi.py
|
Python
|
apache-2.0
| 37 | 0 |
"""
strategies
~~~~~~~~~~
Various strategies.
:copyright: (c) 2013 by Matthias Hummel and Kristoffer Kleine.
:license: BSD, see LICENSE for more details.
"""
from graph import (State, make_walkable, split_into_extended_islands,
split_into_subgraphs, make_flooded)
def get_direction(current, target):
x = target.x - current.x
y = target.y - current.y
if x == -1:
return 'WEST'
elif x == 1:
return 'EAST'
elif y == -1:
return 'NORTH'
elif y == 1:
return 'SOUTH'
else:
return 'CURRENT'
class Strategy(object):
def __init__(self, debug=False, round_=0):
self.actions = []
self.position = (0, 0)
self.floodlevel = 0
self.extended_islands = None
self.debug = debug
self.round_ = round_
def do(self, cmd, direction, x=None, y=None):
self.actions.append((cmd + ' ' + direction, x, y))
if cmd == 'GO':
self.position = (x, y)
def commit(self):
actions = self.actions
self.actions = []
return actions
def get_actions(self, graph, position):
raise NotImplementedError()
def split_graph_into_extended_islands(self, graph):
self.extended_islands = split_into_extended_islands(graph)
def dry_one_if_possible(self, graph):
def _set_dry_state(node):
graph.get_node(node.x, node.y).state = State.redry
for island in self.extended_islands:
n = island.get_node(node.x, node.y)
if n is not None:
n.state = State.redry
current_node = graph.get_node(*self.position)
if current_node.state == State.flooded:
self.do('DRY', 'CURRENT', current_node.x, current_node.y)
_set_dry_state(current_node)
return True
for node in current_node.neighbors:
if node.state == State.flooded:
direction = get_direction(current_node, node)
self.do('DRY', direction, node.x, node.y)
_set_dry_state(node)
return True
return False
def go(self, graph, start, target):
next_node = graph.get_next_node_on_path_to(start, target)
if next_node is not None:
direction = get_direction(start, next_node)
self.do('GO', direction, next_node.x, next_node.y)
def find_target(self, graph):
current_node = graph.get_node(*self.position)
target_island = None
for island in self.extended_islands:
other_node = graph.get_node(island.nodes[0].x, island.nodes[0].y)
if not graph.is_reachable(current_node, other_node):
continue
if target_island is None:
target_island = island
else:
if island.calculate_island_value() > target_island.calculate_island_value():
target_island = island
target = target_island.get_middle()
return graph.get_node(target.x, target.y)
class DryMaxStrategy(Strategy):
"""
Dries as many fields as possible
"""
def get_actions(self, graph, position):
self.position = position
current_node = graph.get_node(*self.position)
while len(self.actions) < 3 and self.dry_one_if_possible(graph):
pass
while len(self.actions) < 3:
if len(current_node.neighbors) == 0:
self.do('GO', 'CURRENT')
continue
next_node = min(current_node.neighbors, key=lambda n: n.distance_to_flooded)
direction = get_direction(current_node, next_node)
self.do('GO', direction)
current_node = next_node
return self.commit()
class MetaStrategy(Strategy):
"""
Evaluates the current situation and chooses the right strategy
"""
def evaluate_mode(self, walkable, position):
current_node = walkable.get_node(*position)
target_island = None
for island in self.extended_islands:
other_node = walkable.get_node(island.nodes[0].x, island.nodes[0].y)
if not walkable.is_reachable(current_node, other_node): continue
if target_island is None:
target_island = island
elif island.calculate_island_value() > target_island.calculate_island_value():
target_island = island
if target_island is None:
return 'DRYMAX'
if target_island.get_node(*position) is not None:
return 'FARMING'
return 'MOVING'
def get_actions(self, graph, position):
walkable = make_walkable(graph)
self.split_graph_into_extended_islands(walkable)
mode = self.evaluate_mode(walkable, position)
if mode == 'MOVING':
strategy = MovingStrategy(self.debug, self.round_)
elif mode == 'FARMING':
strategy = FarmingStrategy(self.debug, self.round_)
elif mode == 'DRYMAX':
strategy = DryMaxStrategy(self.debug, self.round_)
strategy.extended_islands = self.extended_islands
return strategy.get_actions(walkable, position), mode
class MovingStrategy(Strategy):
"""
This Strategy moves the bot towards more safe places on the island while
drying fields on its way there.
"""
def get_actions(self, graph, position):
self.position = position
moved = False
while len(self.actions) < 2 or (moved and len(self.actions) < 3):
if not self.dry_one_if_possible(graph):
moved = True
current_node = graph.get_node(*self.position)
target = self.find_target(graph)
self.go(graph, current_node, target)
if len(self.actions) < 3:
current_node = graph.get_node(*self.position)
target = self.find_target(graph)
if target == current_node: # we already are at our destination, go towards water
graph.calculate_distance_to_flooded()
target = min(current_node.neighbors, key=lambda n: n.distance_to_flooded)
self.go(graph, current_node, target)
else:
self.go(graph, current_node, target)
return self.commit()
class FarmingStrategy(Strategy):
"""
This Strategy aims to dry as many fields as possible.
"""
def get_nearest_node(self, walkable, island, current_node):
"""
Returns the node of the island that is nearest to our current position
"""
x, y = current_node.x, current_node.y
nearest_node = (None, -1)
for node in island.nodes:
if abs(node.x - x) + abs(node.y - y) > 10:
continue
node = walkable.get_node(node.x, node.y)
if nearest_node[0] is None:
distance = walkable.get_distance_between(current_node, node)
nearest_node = (node, distance)
else:
distance = walkable.get_distance_between(current_node, node)
if distance < nearest_node[1]:
nearest_node = (node, distance)
return nearest_node
def calculate_distance_value(self, walkable, island, current_node):
"""
Calculates a value of an island consisting of the normal island value
minus the distance times 4. This way islands which are near by get a
better overall value
"""
nearest_node, distance = self.get_nearest_node(walkable, island, current_node)
self.nearest_nodes[island] = nearest_node
if nearest_node is None:
distance = 1000
return island.calculate_island_value() - (distance*4)
def get_best_island(self, walkable, current_node):
"""
Returns a flooded island (which is not really an island) which has the
most fields that can be dried, but is not too far away.
"""
best_island = (None, -1)
for island in self.flooded_islands:
node = walkable.get_node(island.nodes[0].x, island.nodes[0].y)
if not walkable.is_reachable(current_node, node):
continue
if best_island[0] is None:
value = self.calculate_distance_value(walkable, island, current_node)
best_island = (island, value)
else:
value = self.calculate_distance_value(walkable, island, current_node)
if value > best_island[1]:
best_island = (island, value)
return best_island[0]
def go_towards_best_flooded(self, walkable):
"""
Goes one field in the direction where a maximum number of fields can be dried
but it does not take too far away fields into account.
"""
self.nearest_nodes = {}
current_node = walkable.get_node(*self.position)
best_island = self.get_best_island(walkable, current_node)
if best_island is None:
return False
nearest_node = self.nearest_nodes[best_island]
if nearest_node is None:
return False
target = walkable.get_node(nearest_node.x, nearest_node.y)
next_node = walkable.get_next_node_on_path_to(current_node, target)
if next_node is None:
return False
return True
def get_actions(self, graph, position):
self.flooded_islands = None
self.position = position
while len(self.actions) < 3:
dried = False
while len(self.actions) < 3 and self.dry_one_if_possible(graph):
dried = True
if dried or self.flooded_islands is None:
self.flooded_islands = split_into_subgraphs(make_flooded(graph))
if len(self.actions) == 3:
return self.commit()
did_succeed = self.go_towards_best_flooded(graph)
if not did_succeed:
current_node = graph.get_node(*self.position)
if current_node.distance_to_flooded == -1:
target = self.find_target(graph)
self.go(graph, current_node, target)
else:
target = min(current_node.neighbors, key=lambda n: n.distance_to_flooded)
self.go(graph, current_node, target)
return self.commit()
|
kkris/wheatley
|
strategies.py
|
Python
|
bsd-3-clause
| 10,513 | 0.003044 |
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 23:01:44 2014
@author: Jose Capriles
"""
import pygame, Buttons
from pygame.locals import *
#Initialize pygame
pygame.init()
class Button_Test:
def __init__(self):
self.loopFlag = True
self.main()
#Create a display
def display(self):
self.screen = pygame.display.set_mode((650,370),0,32)
pygame.display.set_caption("Button.py TEST")
#Update the display and show the button
def update_display(self):
self.screen.fill((30,144,255))
self.Button1.update()
pygame.display.flip()
#Run the loop
def main(self):
self.display()
self.Button1 = Buttons.Button(self.screen, color = (107,142,35), x = 225, y = 135, length = 200, height = 100, width = 0, text ="Button Test", text_color = (255,255,255), font_size=25, fade_on = False)
while self.loopFlag:
self.update_display()
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.loopFlag = False
if event.type == KEYDOWN:
self.loopFlag=False
if event.type == MOUSEBUTTONDOWN:
if self.Button1.pressed(pygame.mouse.get_pos()):
print "Test Passed!"
if __name__ == '__main__':
obj = Button_Test()
|
jrcapriles/armSimulator
|
ButtonTest.py
|
Python
|
mit
| 1,443 | 0.028413 |
# -*- coding: utf-8 -*-
from enigma import eEnv
from Components.SystemInfo import SystemInfo
from Components.config import config
from os import path, listdir
import xml.etree.cElementTree
from Plugins.Extensions.OpenWebif.__init__ import _
def addCollapsedMenu(name):
tags = config.OpenWebif.webcache.collapsedmenus.value.split("|")
if name not in tags:
tags.append(name)
config.OpenWebif.webcache.collapsedmenus.value = "|".join(tags).strip("|")
config.OpenWebif.webcache.collapsedmenus.save()
return {
"result": True
}
def removeCollapsedMenu(name):
tags = config.OpenWebif.webcache.collapsedmenus.value.split("|")
if name in tags:
tags.remove(name)
config.OpenWebif.webcache.collapsedmenus.value = "|".join(tags).strip("|")
config.OpenWebif.webcache.collapsedmenus.save()
return {
"result": True
}
def getCollapsedMenus():
return {
"result": True,
"collapsed": config.OpenWebif.webcache.collapsedmenus.value.split("|")
}
def setRemoteGrabScreenshot(value):
config.OpenWebif.webcache.remotegrabscreenshot.value = value
config.OpenWebif.webcache.remotegrabscreenshot.save()
return {
"result": True
}
def getRemoteGrabScreenshot():
return {
"result": True,
"remotegrabscreenshot": config.OpenWebif.webcache.remotegrabscreenshot.value
}
def setZapStream(value):
config.OpenWebif.webcache.zapstream.value = value
config.OpenWebif.webcache.zapstream.save()
return {
"result": True
}
def getZapStream():
return {
"result": True,
"zapstream": config.OpenWebif.webcache.zapstream.value
}
def getJsonFromConfig(cnf):
if cnf.__class__.__name__ == "ConfigSelection" or cnf.__class__.__name__ == "ConfigSelectionNumber":
if type(cnf.choices.choices) == dict:
choices = []
for choice in cnf.choices.choices:
choices.append((choice, _(cnf.choices.choices[choice])))
elif type(cnf.choices.choices[0]) == tuple:
choices = []
for choice_tuple in cnf.choices.choices:
choices.append((choice_tuple[0], _(choice_tuple[1])))
else:
choices = []
for choice in cnf.choices.choices:
choices.append((choice, _(choice)))
return {
"result": True,
"type": "select",
"choices": choices,
"current": cnf.value
}
elif cnf.__class__.__name__ == "ConfigBoolean" or cnf.__class__.__name__ == "ConfigEnableDisable" or cnf.__class__.__name__ == "ConfigYesNo":
return {
"result": True,
"type": "checkbox",
"current": cnf.value
}
elif cnf.__class__.__name__ == "ConfigSet":
return {
"result": True,
"type": "multicheckbox",
"choices": cnf.choices.choices,
"current": cnf.value
}
elif cnf.__class__.__name__ == "ConfigNumber":
return {
"result": True,
"type": "number",
"current": cnf.value
}
elif cnf.__class__.__name__ == "ConfigInteger":
return {
"result": True,
"type": "number",
"current": cnf.value,
"limits": (cnf.limits[0][0], cnf.limits[0][1])
}
print "[OpenWebif] Unknown class ", cnf.__class__.__name__
return {
"result": False,
"type": "unknown"
}
def saveConfig(path, value):
try:
cnf = eval(path)
if cnf.__class__.__name__ == "ConfigBoolean" or cnf.__class__.__name__ == "ConfigEnableDisable" or cnf.__class__.__name__ == "ConfigYesNo":
cnf.value = value == "true"
elif cnf.__class__.__name__ == "ConfigSet":
values = cnf.value
if int(value) in values:
values.remove(int(value))
else:
values.append(int(value))
cnf.value = values
elif cnf.__class__.__name__ == "ConfigNumber":
cnf.value = int(value)
elif cnf.__class__.__name__ == "ConfigInteger":
cnf_min = int(cnf.limits[0][0])
cnf_max = int(cnf.limits[0][1])
cnf_value = int(value)
if cnf_value < cnf_min:
cnf_value = cnf_min
elif cnf_value > cnf_max:
cnf_value = cnf_max
cnf.value = cnf_value
else:
cnf.value = value
cnf.save()
except Exception, e:
print "[OpenWebif] ", e
return {
"result": False
}
return {
"result": True
}
def getConfigs(key):
configs = []
title = None
if not len(configfiles.sections):
configfiles.getConfigs()
if key in configfiles.section_config:
config_entries = configfiles.section_config[key][1]
title = configfiles.section_config[key][0]
if config_entries:
for entry in config_entries:
try:
data = getJsonFromConfig(eval(entry.text or ""))
text = _(entry.get("text", ""))
if "limits" in data:
text = "%s (%d - %d)" % (text, data["limits"][0], data["limits"][1])
configs.append({
"description": text,
"path": entry.text or "",
"data": data
})
except Exception, e:
pass
return {
"result": True,
"configs": configs,
"title": title
}
def getConfigsSections():
if not len(configfiles.sections):
configfiles.parseConfigFiles()
return {
"result": True,
"sections": configfiles.sections
}
def privSettingValues(prefix, top, result):
for (key, val) in top.items():
name = prefix + "." + key
if isinstance(val, dict):
privSettingValues(name, val, result)
elif isinstance(val, tuple):
result.append((name, val[0]))
else:
result.append((name, val))
def getSettings():
configkeyval = []
privSettingValues("config", config.saved_value, configkeyval)
return {
"result": True,
"settings": configkeyval
}
class ConfigFiles:
def __init__(self):
self.setupfiles = []
self.sections = []
self.section_config = {}
self.allowedsections = ["usage", "recording", "subtitlesetup", "autolanguagesetup", "avsetup", "harddisk", "keyboard", "timezone"]
self.getConfigFiles()
def getConfigFiles(self):
setupfiles = [eEnv.resolve('${datadir}/enigma2/setup.xml')]
locations = ('SystemPlugins', 'Extensions')
libdir = eEnv.resolve('${libdir}')
for location in locations:
plugins = listdir(('%s/enigma2/python/Plugins/%s' % (libdir,location)))
for plugin in plugins:
setupfiles.append(('%s/enigma2/python/Plugins/%s/%s/setup.xml' % (libdir, location, plugin)))
for setupfile in setupfiles:
if path.exists(setupfile):
self.setupfiles.append(setupfile)
def parseConfigFiles(self):
sections = []
for setupfile in self.setupfiles:
# print "[OpenWebif] loading configuration file :", setupfile
setupfile = file(setupfile, 'r')
setupdom = xml.etree.cElementTree.parse(setupfile)
setupfile.close()
xmldata = setupdom.getroot()
for section in xmldata.findall("setup"):
configs = []
key = section.get("key")
if key not in self.allowedsections:
showOpenWebIF = section.get("showOpenWebIF")
if showOpenWebIF == "1":
self.allowedsections.append(key)
else:
continue
# print "[OpenWebif] loading configuration section :", key
for entry in section:
if entry.tag == "item":
requires = entry.get("requires")
if requires and not SystemInfo.get(requires, False):
continue;
if int(entry.get("level", 0)) > config.usage.setup_level.index:
continue
configs.append(entry)
if len(configs):
sections.append({
"key": key,
"description": _(section.get("title"))
})
title = _(section.get("title", ""))
self.section_config[key] = (title, configs)
sections = sorted(sections, key=lambda k: k['description'])
self.sections = sections
configfiles = ConfigFiles()
|
MOA-2011/enigma2-plugin-extensions-openwebif
|
plugin/controllers/models/config.py
|
Python
|
gpl-2.0
| 7,243 | 0.034102 |
from django.conf.urls import url
from . import views
urlpatterns = [
# url(r'^news/([\w-]+)$', views.ListView.as_view(), name='list'),
url(r'^news/$', views.ListView.as_view(), name='list_all'),
url(r'^news/(?P<pk>[0-9]+)/$', views.DetailView.as_view(), name='detail'),
]
|
Zex0n/django-simple-cms
|
news/urls.py
|
Python
|
mit
| 286 | 0 |
#!/usr/bin/env python
if __name__ == '__main__':
from gevent import monkey
monkey.patch_all()
import redis
import json
import pprint
import argparse
import mturk_vision
import base64
import uuid
import pickle
import time
import databases
from hadoop_parse import scrape_hadoop_jobs
class UnauthorizedException(Exception):
"""User is not authorized to make this call"""
class NotFoundException(Exception):
"""Task was not found"""
class Jobs(object):
def __init__(self, host, port, db, annotation_redis_host, annotation_redis_port):
self.args = (host, port, db, annotation_redis_host, annotation_redis_port)
self.redis_host = host
self.redis_port = port
self.db = redis.StrictRedis(host=host, port=port, db=db)
self._owner_prefix = 'owner:'
self._task_prefix = 'task:'
self._lock_prefix = 'lock:'
self.annotation_redis_host = annotation_redis_host
self.annotation_redis_port = annotation_redis_port
self.hadoop_completed_jobs_cache = set()
def __reduce__(self):
return (Jobs, self.args)
def add_task(self, type, owner, params, secret_params):
task = base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]
data = {'owner': owner, '_params': json.dumps(secret_params),
'params': json.dumps(params), 'type': type, 'startTime': str(time.time())}
if not self.db.set(self._lock_prefix + task, '', nx=True):
raise UnauthorizedException
# TODO: Do these atomically
self.db.hmset(self._task_prefix + task, data)
self.db.sadd(self._owner_prefix + owner, task)
return task
def _check_owner(self, task, owner):
if self.db.hget(self._task_prefix + task, 'owner') != owner:
raise UnauthorizedException
def _get_task_type(self, task):
out = self.db.hgetall(self._task_prefix + task)
out = self.db.hget(self._task_prefix + task, 'type')
if out is None:
raise NotFoundException
return out
def _check_type(self, task, type):
if self._get_task_type(task) != type:
raise NotFoundException
def _exists(self, task):
if not self.db.exists(self._lock_prefix + task):
raise NotFoundException
def get_task(self, task, owner):
self._exists(task)
self._check_owner(task, owner)
out = self.db.hgetall(self._task_prefix + task)
out = dict((k, v) for k, v in out.items() if not k.startswith('_'))
return out
def get_task_secret(self, task, owner):
self._exists(task)
self._check_owner(task, owner)
return json.loads(self.db.hget(self._task_prefix + task, '_params'))
def delete_task(self, task, owner, **kw):
self._exists(task)
self._check_owner(task, owner)
task_type = self._get_task_type(task)
if task_type == 'annotation':
manager = self.get_annotation_manager(task, data_connection=kw['data_connection'])
# TODO: Do these atomically
self.db.delete(self._task_prefix + task, self._lock_prefix + task)
self.db.srem(self._owner_prefix + owner, task)
if task_type == 'annotation':
manager.destroy() # TODO: MTurk specific
# TODO: For Hadoop jobs kill the task if it is running
# TODO: For worker/crawl/model jobs kill the worker process or send it a signal
def update_task(self, row, columns):
self.db.hmset(self._task_prefix + row, columns)
def update_hadoop_jobs(self, hadoop_jobtracker):
for row, columns in scrape_hadoop_jobs(hadoop_jobtracker, self.hadoop_completed_jobs_cache).items():
# NOTE: We do this at this point as a job may not exist but is finished completed/failed in hadoop
if columns.get('status', '') in ('completed', 'failed'):
self.hadoop_completed_jobs_cache.add(row)
try:
self._exists(row)
self._check_type(row, 'process')
except NotFoundException:
continue
# TODO: Need to do this atomically with the exists check
self.update_task(row, columns)
def get_tasks(self, owner):
outs = {}
for job_key in self.db.smembers(self._owner_prefix + owner):
# TODO: Error check if something gets removed while we are accumulating
task = self._task_prefix + job_key
if self.db.hget(task, 'owner') == owner:
out = self.db.hgetall(task)
out = dict((k, v) for k, v in out.items() if not k.startswith('_'))
outs[task.split(':', 1)[1]] = out
return outs
def get_annotation_manager(self, task, data_connection, sync=False):
self._exists(task)
self._check_type(task, 'annotation')
data = self.db.hgetall(self._task_prefix + task)
p = json.loads(data['params'])
ps = json.loads(data['_params'])
p['sync'] = sync
p['secret'] = str(ps['secret'])
p['redis_address'] = self.annotation_redis_host
p['redis_port'] = int(self.annotation_redis_port)
p['task_key'] = task
# TODO: Currently only compatible with thrift based datastores
if data_connection:
data_connection = data_connection._thrift
return mturk_vision.manager(data=str(ps['data']), data_connection=data_connection, **p)
def get_annotation_manager_check(self, task, owner, data_connection):
self._exists(task)
self._check_type(task, 'annotation')
self._check_owner(task, owner)
return self.get_annotation_manager(task, data_connection)
def add_work(self, front, queue, **kw):
push = self.db.lpush if front else self.db.rpush
push('queue:' + queue, pickle.dumps(kw, -1))
def get_work(self, queues, timeout=0):
out = self.db.brpop(['queue:' + x for x in queues], timeout=timeout)
if not out:
return
queue = out[0][:len('queue:')]
data = pickle.loads(out[1])
print('Processing job from [%s][%s]' % (queue, data['func']))
pprint.pprint(data['method_args'])
return queue, data
def main():
def _get_all_tasks(jobs):
outs = []
for job_key in jobs.db.keys('task:*'):
out = jobs.db.hgetall(job_key)
outs.append(out)
return outs
def _info(args, jobs):
pprint.pprint(_get_all_tasks(jobs))
def _destroy(args, jobs):
jobs.db.flushall()
def job_worker(db, func, method_args, method_kwargs):
getattr(db, func)(*method_args, **method_kwargs)
def _work(args, jobs):
if args.raven:
import raven
RAVEN = raven.Client(args.raven)
else:
RAVEN = None
import gevent_inotifyx as inotifyx
fd = inotifyx.init()
# NOTE: .git/logs/HEAD is the last thing updated after a git pull/merge
inotifyx.add_watch(fd, '../.git/logs/HEAD', inotifyx.IN_MODIFY)
inotifyx.add_watch(fd, '.reloader', inotifyx.IN_MODIFY | inotifyx.IN_ATTRIB)
db = THRIFT_CONSTRUCTOR()
while 1:
try:
work = jobs.get_work(args.queues, timeout=5)
if work:
jobs.add_work(True, 'old' + work[0], **work[1])
job_worker(db=db, **work[1])
if inotifyx.get_events(fd, 0):
print('Shutting down due to new update')
break
except:
if RAVEN:
RAVEN.captureException()
raise
parser = argparse.ArgumentParser(description='Picarus job operations')
parser.add_argument('--redis_host', help='Redis Host', default='localhost')
parser.add_argument('--redis_port', type=int, help='Redis Port', default=6379)
parser.add_argument('--raven', help='URL to the Raven/Sentry logging server')
parser.add_argument('--annotations_redis_host', help='Annotations Host', default='localhost')
parser.add_argument('--annotations_redis_port', type=int, help='Annotations Port', default=6380)
parser.add_argument('--thrift_server', default='localhost')
parser.add_argument('--thrift_port', default='9090')
parser.add_argument('--database', choices=['hbase', 'hbasehadoop', 'redis'], default='hbasehadoop', help='Select which database to use as our backend. Those ending in hadoop use it for job processing.')
subparsers = parser.add_subparsers(help='Commands')
subparser = subparsers.add_parser('info', help='Display info about jobs')
subparser.set_defaults(func=_info)
subparser = subparsers.add_parser('destroy', help='Delete everything in the jobs DB')
subparser.set_defaults(func=_destroy)
subparser = subparsers.add_parser('work', help='Do background work')
parser.add_argument('queues', nargs='+', help='Queues to do work on')
subparser.set_defaults(func=_work)
args = parser.parse_args()
jobs = Jobs(args.redis_host, args.redis_port, 3,
args.annotations_redis_host, args.annotations_redis_port)
def THRIFT_CONSTRUCTOR():
return databases.factory(args.database, True, jobs,
thrift_server=args.thrift_server, thrift_port=args.thrift_port,
redis_host=args.redis_host, redis_port=args.redis_port)
args.func(args, jobs)
if __name__ == '__main__':
main()
|
bwhite/picarus
|
server/jobs.py
|
Python
|
apache-2.0
| 9,515 | 0.002207 |
"""Points and related utilities
"""
from ctypes import c_double
from ctypes import cast, POINTER
from shapely.coords import required
from shapely.geos import lgeos, DimensionError
from shapely.geometry.base import BaseGeometry
from shapely.geometry.proxy import CachingGeometryProxy
__all__ = ['Point', 'asPoint']
class Point(BaseGeometry):
"""
A zero dimensional feature
A point has zero length and zero area.
Attributes
----------
x, y, z : float
Coordinate values
Example
-------
>>> p = Point(1.0, -1.0)
>>> print p
POINT (1.0000000000000000 -1.0000000000000000)
>>> p.y
-1.0
>>> p.x
1.0
"""
def __init__(self, *args):
"""
Parameters
----------
There are 2 cases:
1) 1 parameter: this must satisfy the numpy array protocol.
2) 2 or more parameters: x, y, z : float
Easting, northing, and elevation.
"""
BaseGeometry.__init__(self)
if len(args) > 0:
self._set_coords(*args)
# Coordinate getters and setters
@property
def x(self):
"""Return x coordinate."""
return self.coords[0][0]
@property
def y(self):
"""Return y coordinate."""
return self.coords[0][1]
@property
def z(self):
"""Return z coordinate."""
if self._ndim != 3:
raise DimensionError("This point has no z coordinate.")
return self.coords[0][2]
@property
def __geo_interface__(self):
return {
'type': 'Point',
'coordinates': self.coords[0]
}
@property
def ctypes(self):
if not self._ctypes_data:
array_type = c_double * self._ndim
array = array_type()
xy = self.coords[0]
array[0] = xy[0]
array[1] = xy[1]
if self._ndim == 3:
array[2] = xy[2]
self._ctypes_data = array
return self._ctypes_data
def array_interface(self):
"""Provide the Numpy array protocol."""
ai = self.array_interface_base
ai.update({'shape': (self._ndim,)})
return ai
__array_interface__ = property(array_interface)
@property
def bounds(self):
xy = self.coords[0]
return (xy[0], xy[1], xy[0], xy[1])
# Coordinate access
def _set_coords(self, *args):
self.empty()
if len(args) == 1:
self._geom, self._ndim = geos_point_from_py(args[0])
else:
self._geom, self._ndim = geos_point_from_py(tuple(args))
coords = property(BaseGeometry._get_coords, _set_coords)
@property
def xy(self):
"""Separate arrays of X and Y coordinate values
Example:
>>> x, y = Point(0, 0).xy
>>> list(x)
[0.0]
>>> list(y)
[0.0]
"""
return self.coords.xy
class PointAdapter(CachingGeometryProxy, Point):
_owned = False
def __init__(self, context):
self.context = context
self.factory = geos_point_from_py
@property
def _ndim(self):
try:
# From array protocol
array = self.context.__array_interface__
n = array['shape'][0]
assert n == 2 or n == 3
return n
except AttributeError:
# Fall back on list
return len(self.context)
@property
def __array_interface__(self):
"""Provide the Numpy array protocol."""
try:
return self.context.__array_interface__
except AttributeError:
return self.array_interface()
_get_coords = BaseGeometry._get_coords
def _set_coords(self, ob):
raise NotImplementedError("Adapters can not modify their sources")
coords = property(_get_coords)
def asPoint(context):
"""Adapt an object to the Point interface"""
return PointAdapter(context)
def geos_point_from_py(ob, update_geom=None, update_ndim=0):
"""Create a GEOS geom from an object that is a coordinate sequence
or that provides the array interface.
Returns the GEOS geometry and the number of its dimensions.
"""
# If numpy is present, we use numpy.require to ensure that we have a
# C-continguous array that owns its data. View data will be copied.
ob = required(ob)
try:
# From array protocol
array = ob.__array_interface__
assert len(array['shape']) == 1
n = array['shape'][0]
assert n == 2 or n == 3
dz = None
da = array['data']
if type(da) == type((0,)):
cdata = da[0]
# If we had numpy, we would do
# from numpy.ctypeslib import as_ctypes
# cp = as_ctypes(ob) - check that code?
cp = cast(cdata, POINTER(c_double))
dx = c_double(cp[0])
dy = c_double(cp[1])
if n == 3:
dz = c_double(cp[2])
ndim = 3
else:
dx, dy = da[0:2]
if n == 3:
dz = da[2]
ndim = 3
except AttributeError:
# Fall back on the case of Python sequence data
# Accept either (x, y) or [(x, y)]
if type(ob[0]) == type(tuple()):
coords = ob[0]
else:
coords = ob
n = len(coords)
dx = c_double(coords[0])
dy = c_double(coords[1])
dz = None
if n == 3:
dz = c_double(coords[2])
if update_geom:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(1, n)
# Because of a bug in the GEOS C API, always set X before Y
lgeos.GEOSCoordSeq_setX(cs, 0, dx)
lgeos.GEOSCoordSeq_setY(cs, 0, dy)
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, 0, dz)
if update_geom:
return None
else:
return lgeos.GEOSGeom_createPoint(cs), n
def update_point_from_py(geom, ob):
geos_point_from_py(ob, geom._geom, geom._ndim)
# Test runner
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/shapely/geometry/point.py
|
Python
|
agpl-3.0
| 6,390 | 0.002817 |
# Simple script to run required operations to
# 1. Download FASTAs from database
# 2. Copy FASTAs to nextflu directory
# 3. Download titer tables from database
# 4. Copy titer tables to nextflu directory
# Run from base fauna directory with python flu/download_all.py
# Assumes that nextflu/, nextflu-cdc/ and nextflu-cdc-fra/ are
# sister directories to fauna/
import os, subprocess
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--virus', default="flu", help="virus to download; default is flu")
parser.add_argument('--flu_lineages', default=["h3n2", "h1n1pdm", "vic", "yam"], nargs='+', type = str, help ="seasonal flu lineages to download, options are h3n2, h1n1pdm, vic and yam")
parser.add_argument('--segments', type=str, default=['ha', 'na'], nargs='+', help="specify segment(s) to download")
parser.add_argument('--sequences', default=False, action="store_true", help="download sequences from vdb")
parser.add_argument('--titers', default=False, action="store_true", help="download titers from tdb")
parser.add_argument('--titers_sources', default=["base", "crick", "cdc", "niid", "vidrl"], nargs='+', type = str, help ="titer sources to download, options are base, cdc, crick, niid and vidrl")
parser.add_argument('--titers_passages', default=["egg", "cell"], nargs='+', type = str, help ="titer passage types to download, options are egg and cell")
def concatenate_titers(params, passage, assay):
for lineage in params.flu_lineages:
out = 'data/%s_who_%s_%s_titers.tsv'%(lineage, assay, passage)
hi_titers = []
for source in params.titers_sources:
hi_titers_file = 'data/%s_%s_%s_%s_titers.tsv'%(lineage, source, assay, passage)
if os.path.isfile(hi_titers_file):
hi_titers.append(hi_titers_file)
if len(hi_titers) > 0:
with open(out, 'w+') as f:
call = ['cat'] + hi_titers
print call
subprocess.call(call, stdout=f)
for lineage in params.flu_lineages:
out = 'data/%s_public_%s_%s_titers.tsv'%(lineage, assay, passage)
hi_titers = []
for source in ["base", "cdc"]:
hi_titers_file = 'data/%s_%s_%s_%s_titers.tsv'%(lineage, source, assay, passage)
if os.path.isfile(hi_titers_file):
hi_titers.append(hi_titers_file)
if len(hi_titers) > 0:
with open(out, 'w+') as f:
call = ['cat'] + hi_titers
print call
subprocess.call(call, stdout=f)
if __name__=="__main__":
params = parser.parse_args()
if params.virus == "flu":
# Download FASTAs from database
if params.sequences:
segments = params.segments
for segment in segments:
for lineage in params.flu_lineages:
call = "python vdb/flu_download.py -db vdb -v flu --select locus:%s lineage:seasonal_%s --fstem %s_%s --resolve_method split_passage"%(segment.upper(), lineage, lineage, segment)
print(call)
os.system(call)
if params.titers:
# download titers
for source in params.titers_sources:
if source == "base":
for lineage in params.flu_lineages:
call = "python tdb/download.py -db tdb -v flu --subtype %s --select assay_type:hi --fstem %s_base_hi_cell"%(lineage, lineage)
print(call)
os.system(call)
if source in ["cdc", "crick", "niid", "vidrl"]:
for passage in params.titers_passages:
for lineage in params.flu_lineages:
call = "python tdb/download.py -db %s_tdb -v flu --subtype %s --select assay_type:hi serum_passage_category:%s --fstem %s_%s_hi_%s"%(source, lineage, passage, lineage, source, passage)
print(call)
os.system(call)
lineage = 'h3n2'
call = "python tdb/download.py -db %s_tdb -v flu --subtype %s --select assay_type:fra serum_passage_category:%s --fstem %s_%s_fra_%s"%(source, lineage, passage, lineage, source, passage)
print(call)
os.system(call)
if source == "cdc":
for lineage in params.flu_lineages:
call = "python tdb/download.py -db %s_tdb -v flu --subtype %s --select assay_type:hi serum_host:human --fstem %s_%s_hi_%s_human"%(source, lineage, lineage, source, passage)
print(call)
os.system(call)
lineage = 'h3n2'
call = "python tdb/download.py -db %s_tdb -v flu --subtype %s --select assay_type:fra serum_host:human --fstem %s_%s_fra_%s_human"%(source, lineage, lineage, source, passage)
print(call)
os.system(call)
# concatenate to create default HI strain TSVs for each subtype
concatenate_titers(params, "cell", "hi")
concatenate_titers(params, "cell", "fra")
concatenate_titers(params, "egg", "hi")
concatenate_titers(params, "egg", "fra")
elif params.virus == "ebola":
call = "python vdb/ebola_download.py -db vdb -v ebola --fstem ebola"
print(call)
os.system(call)
elif params.virus == "dengue":
# Download all serotypes together.
call = "python vdb/dengue_download.py"
print(call)
os.system(call)
# Download individual serotypes.
serotypes = [1, 2, 3, 4]
for serotype in serotypes:
call = "python vdb/dengue_download.py --select serotype:%i" % serotype
print(call)
os.system(call)
# Download titers.
if params.titers:
call = "python tdb/download.py -db tdb -v dengue --fstem dengue"
print(call)
os.system(call)
elif params.virus == "zika":
call = "python vdb/zika_download.py -db vdb -v zika --fstem zika"
print(call)
os.system(call)
elif params.virus == "mumps":
call = "python vdb/mumps_download.py -db vdb -v mumps --fstem mumps --resolve_method choose_genbank"
print(call)
os.system(call)
elif params.virus == "h7n9" or params.virus == "avian":
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:PB2 --fstem h7n9_pb2")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:PB1 --fstem h7n9_pb1")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:PA --fstem h7n9_pa")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:HA --fstem h7n9_ha")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:NP --fstem h7n9_np")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:NA --fstem h7n9_na")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:MP --fstem h7n9_mp")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:NS --fstem h7n9_ns")
else:
print("%s is an invalid virus type.\nValid viruses are flu, ebola, dengue, zika, mumps, h7n9, and avian."%(params.virus))
sys.exit(2)
|
blab/nextstrain-db
|
download_all.py
|
Python
|
agpl-3.0
| 7,407 | 0.006615 |
# Kingsoft Antivirus
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def SetUninstallName(self, arg):
if len(arg) > 900:
log.ThugLogging.log_exploit_event(self._window.url,
"Kingsoft AntiVirus ActiveX",
"SetUninstallName Heap Overflow")
|
tweemeterjop/thug
|
thug/ActiveX/modules/Kingsoft.py
|
Python
|
gpl-2.0
| 350 | 0 |
# -*- coding: utf-8 -*-
from openerp import http
# class PayrollCancel(http.Controller):
# @http.route('/payroll_cancel/payroll_cancel/', auth='public')
# def index(self, **kw):
# return "Hello, world"
# @http.route('/payroll_cancel/payroll_cancel/objects/', auth='public')
# def list(self, **kw):
# return http.request.render('payroll_cancel.listing', {
# 'root': '/payroll_cancel/payroll_cancel',
# 'objects': http.request.env['payroll_cancel.payroll_cancel'].search([]),
# })
# @http.route('/payroll_cancel/payroll_cancel/objects/<model("payroll_cancel.payroll_cancel"):obj>/', auth='public')
# def object(self, obj, **kw):
# return http.request.render('payroll_cancel.object', {
# 'object': obj
# })
|
suhe/odoo
|
addons/payroll_cancel/controllers/controllers.py
|
Python
|
gpl-3.0
| 806 | 0.003722 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import osv, fields
from openerp.tools import html2plaintext
class note_stage(osv.osv):
""" Category of Note """
_name = "note.stage"
_description = "Note Stage"
_columns = {
'name': fields.char('Stage Name', translate=True, required=True),
'sequence': fields.integer('Sequence', help="Used to order the note stages"),
'user_id': fields.many2one('res.users', 'Owner', help="Owner of the note stage.", required=True, ondelete='cascade'),
'fold': fields.boolean('Folded by Default'),
}
_order = 'sequence asc'
_defaults = {
'fold': 0,
'user_id': lambda self, cr, uid, ctx: uid,
'sequence' : 1,
}
class note_tag(osv.osv):
_name = "note.tag"
_description = "Note Tag"
_columns = {
'name' : fields.char('Tag Name', required=True),
}
class note_note(osv.osv):
""" Note """
_name = 'note.note'
_inherit = ['mail.thread']
_description = "Note"
#writing method (no modification of values)
def name_create(self, cr, uid, name, context=None):
rec_id = self.create(cr, uid, {'memo': name}, context=context)
return self.name_get(cr, uid, [rec_id], context)[0]
#read the first line (convert hml into text)
def _get_note_first_line(self, cr, uid, ids, name="", args={}, context=None):
res = {}
for note in self.browse(cr, uid, ids, context=context):
res[note.id] = (note.memo and html2plaintext(note.memo) or "").strip().replace('*','').split("\n")[0]
return res
def onclick_note_is_done(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'open': False, 'date_done': fields.date.today()}, context=context)
def onclick_note_not_done(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'open': True}, context=context)
#return the default stage for the uid user
def _get_default_stage_id(self,cr,uid,context=None):
ids = self.pool.get('note.stage').search(cr,uid,[('user_id','=',uid)], context=context)
return ids and ids[0] or False
def _set_stage_per_user(self, cr, uid, id, name, value, args=None, context=None):
note = self.browse(cr, uid, id, context=context)
if not value: return False
stage_ids = [value] + [stage.id for stage in note.stage_ids if stage.user_id.id != uid ]
return self.write(cr, uid, [id], {'stage_ids': [(6, 0, set(stage_ids))]}, context=context)
def _get_stage_per_user(self, cr, uid, ids, name, args, context=None):
result = dict.fromkeys(ids, False)
for record in self.browse(cr, uid, ids, context=context):
for stage in record.stage_ids:
if stage.user_id.id == uid:
result[record.id] = stage.id
return result
_columns = {
'name': fields.function(_get_note_first_line,
string='Note Summary',
type='text', store=True),
'user_id': fields.many2one('res.users', 'Owner'),
'memo': fields.html('Note Content'),
'sequence': fields.integer('Sequence'),
'stage_id': fields.function(_get_stage_per_user,
fnct_inv=_set_stage_per_user,
string='Stage',
type='many2one',
relation='note.stage'),
'stage_ids': fields.many2many('note.stage','note_stage_rel','note_id','stage_id','Stages of Users'),
'open': fields.boolean('Active', track_visibility='onchange'),
'date_done': fields.date('Date done'),
'color': fields.integer('Color Index'),
'tag_ids' : fields.many2many('note.tag','note_tags_rel','note_id','tag_id','Tags'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx=None: uid,
'open' : 1,
'stage_id' : _get_default_stage_id,
}
_order = 'sequence'
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True):
if groupby and groupby[0]=="stage_id":
#search all stages
current_stage_ids = self.pool.get('note.stage').search(cr,uid,[('user_id','=',uid)], context=context)
if current_stage_ids: #if the user have some stages
stages = self.pool['note.stage'].browse(cr, uid, current_stage_ids, context=context)
result = [{ #notes by stage for stages user
'__context': {'group_by': groupby[1:]},
'__domain': domain + [('stage_ids.id', '=', stage.id)],
'stage_id': (stage.id, stage.name),
'stage_id_count': self.search(cr,uid, domain+[('stage_ids', '=', stage.id)], context=context, count=True),
'__fold': stage.fold,
} for stage in stages]
#note without user's stage
nb_notes_ws = self.search(cr,uid, domain+[('stage_ids', 'not in', current_stage_ids)], context=context, count=True)
if nb_notes_ws:
# add note to the first column if it's the first stage
dom_not_in = ('stage_ids', 'not in', current_stage_ids)
if result and result[0]['stage_id'][0] == current_stage_ids[0]:
dom_in = result[0]['__domain'].pop()
result[0]['__domain'] = domain + ['|', dom_in, dom_not_in]
result[0]['stage_id_count'] += nb_notes_ws
else:
# add the first stage column
result = [{
'__context': {'group_by': groupby[1:]},
'__domain': domain + [dom_not_in],
'stage_id': (stages[0].id, stages[0].name),
'stage_id_count':nb_notes_ws,
'__fold': stages[0].name,
}] + result
else: # if stage_ids is empty
#note without user's stage
nb_notes_ws = self.search(cr,uid, domain, context=context, count=True)
if nb_notes_ws:
result = [{ #notes for unknown stage
'__context': {'group_by': groupby[1:]},
'__domain': domain,
'stage_id': False,
'stage_id_count':nb_notes_ws
}]
else:
result = []
return result
else:
return super(note_note, self).read_group(self, cr, uid, domain, fields, groupby,
offset=offset, limit=limit, context=context, orderby=orderby,lazy=lazy)
#upgrade config setting page to configure pad, fancy and tags mode
class note_base_config_settings(osv.osv_memory):
_inherit = 'base.config.settings'
_columns = {
'module_note_pad': fields.boolean('Use collaborative pads (etherpad)'),
'group_note_fancy': fields.boolean('Use fancy layouts for notes', implied_group='note.group_note_fancy'),
}
class res_users(osv.Model):
_name = 'res.users'
_inherit = ['res.users']
def create(self, cr, uid, data, context=None):
user_id = super(res_users, self).create(cr, uid, data, context=context)
note_obj = self.pool['note.stage']
data_obj = self.pool['ir.model.data']
is_employee = self.has_group(cr, user_id, 'base.group_user')
if is_employee:
for n in range(5):
xmlid = 'note_stage_%02d' % (n,)
try:
_model, stage_id = data_obj.get_object_reference(cr, SUPERUSER_ID, 'note', xmlid)
except ValueError:
continue
note_obj.copy(cr, SUPERUSER_ID, stage_id, default={'user_id': user_id}, context=context)
return user_id
|
diogocs1/comps
|
web/addons/note/note.py
|
Python
|
apache-2.0
| 8,900 | 0.01 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-09-17 15:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('reference_data', '0007_auto_20180809_2053'),
]
operations = [
migrations.CreateModel(
name='GeneExpression',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('expression_values', models.TextField(blank=True, null=True)),
('gene', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='reference_data.GeneInfo')),
],
),
]
|
macarthur-lab/seqr
|
reference_data/migrations/0008_geneexpression.py
|
Python
|
agpl-3.0
| 724 | 0.002762 |
#-------------------------------------------------------------------------------
#
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: David C. Morrill
# Date: 10/18/2005
#
#-------------------------------------------------------------------------------
""" Pyface 'DockSizer' support.
This package implements the sizer associated with a Pyface DockWindow
component. The sizer manages the layout of the DockWindow child controls
and the notebook tabs and dragbars associated with the DockWindow.
"""
#-------------------------------------------------------------------------------
# Imports:
#-------------------------------------------------------------------------------
import wx, sys
from enthought.traits.api \
import HasPrivateTraits, Instance, Str, Int, List, Enum, Tuple, Any, \
Range, Property, Callable, Constant, Event, Undefined, Bool, \
cached_property
from enthought.traits.ui.dock_window_theme \
import dock_window_theme
from enthought.traits.ui.wx.helper \
import BufferDC
from enthought.pyface.api import SystemMetrics
from enthought.pyface.image_resource \
import ImageResource
from enthought.util.wx.drag_and_drop \
import PythonDropSource
from enthought.pyface.timer.api \
import do_later, do_after
from idockable \
import IDockable
from ifeature_tool \
import IFeatureTool
# Define version dependent values:
wx_26 = (wx.__version__[:3] == '2.6')
is_mac = (sys.platform == 'darwin')
#-------------------------------------------------------------------------------
# Constants:
#-------------------------------------------------------------------------------
# Standard font text height:
text_dy = 13
# Maximum allowed length of a tab label:
MaxTabLength = 30
# Size of a drag bar (in pixels):
DragBarSize = 14
# Images sizes (in pixels):
CloseTabSize = 10
CloseDragSize = 7
# Tab drawing states:
TabInactive = 0
TabActive = 1
TabHover = 2
NormalStates = ( TabInactive, TabActive )
NotActiveStates = ( TabInactive, TabHover )
# Feature overlay colors:
FeatureBrushColor = ( 255, 255, 255 )
FeaturePenColor = ( 92, 92, 92 )
# Color used to update the screen while dragging a splitter bar:
DragColor = ( 96, 96, 96 )
# Color used to update the screen while showing a docking operation in progress:
DockColorBrush = ( 255, 0, 0, 96 )
# Drop Info kinds:
DOCK_TOP = 0
DOCK_BOTTOM = 1
DOCK_LEFT = 2
DOCK_RIGHT = 3
DOCK_TAB = 4
DOCK_TABADD = 5
DOCK_BAR = 6
DOCK_NONE = 7
DOCK_SPLITTER = 8
DOCK_EXPORT = 9
# Splitter states:
SPLIT_VLEFT = 0
SPLIT_VMIDDLE = 1
SPLIT_VRIGHT = 2
SPLIT_HTOP = 3
SPLIT_HMIDDLE = 4
SPLIT_HBOTTOM = 5
# Empty clipping area:
no_clip = ( 0, 0, 0, 0 )
# Valid sequence types:
SequenceType = ( list, tuple )
# Tab scrolling directions:
SCROLL_LEFT = 1
SCROLL_RIGHT = 2
SCROLL_TO = 3
# Feature modes:
FEATURE_NONE = -1 # Has no features
FEATURE_NORMAL = 0 # Has normal features
FEATURE_CHANGED = 1 # Has changed or new features
FEATURE_DROP = 2 # Has drag data compatible drop features
FEATURE_DISABLED = 3 # Has feature icon, but is currently disabled
FEATURE_VISIBLE = 4 # Has visible features (mouseover mode)
FEATURE_DROP_VISIBLE = 5 # Has visible drop features (mouseover mode)
FEATURE_PRE_NORMAL = 6 # Has normal features (but has not been drawn yet)
FEATURE_EXTERNAL_DRAG = 256 # A drag started in another DockWindow is active
# Feature sets:
NO_FEATURE_ICON = ( FEATURE_NONE, FEATURE_DISABLED, FEATURE_VISIBLE,
FEATURE_DROP_VISIBLE )
FEATURES_VISIBLE = ( FEATURE_VISIBLE, FEATURE_DROP_VISIBLE )
FEATURE_END_DROP = ( FEATURE_DROP, FEATURE_VISIBLE, FEATURE_DROP_VISIBLE )
NORMAL_FEATURES = ( FEATURE_NORMAL, FEATURE_DISABLED )
#-------------------------------------------------------------------------------
# Global data:
#-------------------------------------------------------------------------------
# Standard font used by the DockWindow:
standard_font = None
# The list of available DockWindowFeatures:
features = []
#-------------------------------------------------------------------------------
# Trait definitions:
#-------------------------------------------------------------------------------
# Bounds (i.e. x, y, dx, dy):
Bounds = Tuple( Int, Int, Int, Int )
# Docking drag bar style:
DockStyle = Enum( 'horizontal', 'vertical', 'tab', 'fixed' )
#-------------------------------------------------------------------------------
# Adds a new DockWindowFeature class to the list of available features:
#-------------------------------------------------------------------------------
def add_feature ( feature_class ):
""" Adds a new DockWindowFeature class to the list of available features.
"""
global features
result = (feature_class not in features)
if result:
features.append( feature_class )
# Mark the feature class as having been installed:
if feature_class.state == 0:
feature_class.state = 1
return result
#-------------------------------------------------------------------------------
# Sets the standard font to use for a specified device context:
#-------------------------------------------------------------------------------
def set_standard_font ( dc ):
""" Sets the standard font to use for a specified device context.
"""
global standard_font
if standard_font is None:
standard_font = wx.SystemSettings_GetFont( wx.SYS_DEFAULT_GUI_FONT )
dc.SetFont( standard_font )
return dc
#-------------------------------------------------------------------------------
# Clears a window to the standard background color:
#-------------------------------------------------------------------------------
def clear_window ( window ):
""" Clears a window to the standard background color.
"""
bg_color = SystemMetrics().dialog_background_color
bg_color = wx.Colour(bg_color[0]*255, bg_color[1]*255, bg_color[2]*255)
dx, dy = window.GetSizeTuple()
dc = wx.PaintDC( window )
dc.SetBrush( wx.Brush( bg_color, wx.SOLID ) )
dc.SetPen( wx.TRANSPARENT_PEN )
dc.DrawRectangle( 0, 0, dx, dy )
#-------------------------------------------------------------------------------
# Gets a temporary device context for a specified window to draw in:
#-------------------------------------------------------------------------------
def get_dc ( window ):
""" Gets a temporary device context for a specified window to draw in.
"""
if is_mac:
dc = wx.ClientDC( window )
x, y = window.GetPositionTuple()
dx, dy = window.GetSizeTuple()
while True:
window = window.GetParent()
if window is None:
break
xw, yw = window.GetPositionTuple()
dxw, dyw = window.GetSizeTuple()
dx, dy = min( dx, dxw - x ), min( dy, dyw - y )
x += xw
y += yw
dc.SetClippingRegion( 0, 0, dx, dy )
return ( dc, 0, 0 )
x, y = window.ClientToScreenXY( 0, 0 )
return ( wx.ScreenDC(), x, y )
#-------------------------------------------------------------------------------
# 'DockImages' class:
#-------------------------------------------------------------------------------
class DockImages ( HasPrivateTraits ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Image for closing a tab:
close_tab = Instance( ImageResource, ImageResource( 'close_tab' ) )
# Image for closing a drag bar:
close_drag = Instance( ImageResource, ImageResource( 'close_drag' ) )
#---------------------------------------------------------------------------
# Initalizes the object:
#---------------------------------------------------------------------------
def __init__ ( self, **traits ):
""" Initializes the object.
"""
super( DockImages, self ).__init__( **traits )
self._lazy_init_done = False
def init ( self ):
""" Initializes the parts of the object that depend on the toolkit
selection.
"""
# See if it has already been done.
if self._lazy_init_done:
return
self._lazy_init_done = True
self._close_tab = self.close_tab.create_image().ConvertToBitmap()
self._close_drag = self.close_drag.create_image().ConvertToBitmap()
self._splitter_images = [
ImageResource( name ).create_image().ConvertToBitmap()
for name in [ 'sv_left', 'sv_middle', 'sv_right',
'sh_top', 'sh_middle', 'sh_bottom' ]
]
self._tab_scroller_images = [
ImageResource( name ).create_image().ConvertToBitmap()
for name in [ 'tab_scroll_l', 'tab_scroll_r', 'tab_scroll_lr' ]
]
self._tab_scroller_dx = self._tab_scroller_images[0].GetWidth()
self._tab_scroller_dy = self._tab_scroller_images[0].GetHeight()
self._feature_images = [
ImageResource( name ).create_image().ConvertToBitmap()
for name in [ 'tab_feature_normal', 'tab_feature_changed',
'tab_feature_drop', 'tab_feature_disabled',
'bar_feature_normal', 'bar_feature_changed',
'bar_feature_drop', 'bar_feature_disabled' ]
]
self._tab_feature_width = self._feature_images[0].GetWidth()
self._tab_feature_height = self._feature_images[0].GetHeight()
self._bar_feature_width = self._feature_images[3].GetWidth()
self._bar_feature_height = self._feature_images[3].GetHeight()
#---------------------------------------------------------------------------
# Returns the splitter image to use for a specified splitter state:
#---------------------------------------------------------------------------
def get_splitter_image ( self, state ):
""" Returns the splitter image to use for a specified splitter state.
"""
return self._splitter_images[ state ]
#---------------------------------------------------------------------------
# Returns the feature image to use for a specified feature state:
#---------------------------------------------------------------------------
def get_feature_image ( self, state, is_tab = True ):
""" Returns the feature image to use for a specified feature state.
"""
if is_tab:
return self._feature_images[ state ]
return self._feature_images[ state + 3 ]
# Creates a singleton instance of the class:
DockImages = DockImages()
#-------------------------------------------------------------------------------
# 'DockItem' class:
#-------------------------------------------------------------------------------
class DockItem ( HasPrivateTraits ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# The parent of this item:
parent = Any
# The DockWindow that owns this item:
owner = Property( depends_on = 'parent' )
# Bounds of the item:
bounds = Bounds
# Current width of the item:
width = Int( -1 )
# Current height of the item:
height = Int( -1 )
# Bounds of the item's drag bar or tab:
drag_bounds = Bounds
# The current tab state:
tab_state = Any
# The tab displayable version of the control's UI name:
tab_name = Property( depends_on = 'name' )
# Width of the item's tab:
tab_width = Property( depends_on = 'control, tab_state, tab_name' )
# The DockWindowTheme for this item's DockWindow:
theme = Property
# The theme for the current tab state:
tab_theme = Property
# The current feature mode:
feature_mode = Enum( FEATURE_NONE, FEATURE_NORMAL, FEATURE_CHANGED,
FEATURE_DROP, FEATURE_VISIBLE, FEATURE_DROP_VISIBLE,
FEATURE_DISABLED, FEATURE_PRE_NORMAL )
# The position where the feature popup should appear:
feature_popup_position = Property
# The list of features for this item:
features = List
# The list of drag data compatible drop features for this item:
drop_features = List
# Current active set of features:
active_features = Property
# The name of this item (implemented in subclasses):
# name = Str
# The control associated with this item (implemented in subclasses):
# control = Instance( wx.Control )
#---------------------------------------------------------------------------
# Implementation of the 'owner' property:
#---------------------------------------------------------------------------
def __init__(self, **kw):
super(DockItem, self).__init__(**kw)
@cached_property
def _get_owner ( self ):
if self.parent is None:
return None
return self.parent.owner
#---------------------------------------------------------------------------
# Implementation of the 'tab_name' property:
#---------------------------------------------------------------------------
@cached_property
def _get_tab_name ( self ):
name = self.name
if len( name ) > MaxTabLength:
name = '%s...%s' % ( name[ : MaxTabLength - 23 ], name[ -20: ] )
return name
#---------------------------------------------------------------------------
# Implementation of the 'tab_width' property:
#---------------------------------------------------------------------------
@cached_property
def _get_tab_width ( self ):
if self.control is None:
return 0
self._is_tab = True
# Calculate the size needed by the theme and margins:
theme = self.tab_theme
tw = (theme.image_slice.xleft + theme.image_slice.xright +
theme.content.left + theme.content.right)
# Add feature marker width:
if self.feature_mode != FEATURE_NONE:
tw += DockImages._tab_feature_width + 3
# Add text width:
dc = set_standard_font( wx.ClientDC( self.control ) )
tw += dc.GetTextExtent( self.tab_name )[0]
# Add custom image width:
image = self.get_image()
if image is not None:
tw += (image.GetWidth() + 3)
# Add close button width:
if self.closeable:
tw += (CloseTabSize + 6)
# Return the computed width:
return tw
#---------------------------------------------------------------------------
# Implementation of the 'theme' property:
#---------------------------------------------------------------------------
def _get_theme ( self ):
if self.control is None:
return dock_window_theme()
return self.control.GetParent().owner.theme
#---------------------------------------------------------------------------
# Implementation of the 'tab_theme' property:
#---------------------------------------------------------------------------
def _get_tab_theme ( self ):
if self.tab_state == TabInactive:
return self.theme.tab_inactive
if self.tab_state == TabActive:
return self.theme.tab_active
return self.theme.tab_hover
#---------------------------------------------------------------------------
# Implementation of the 'active_features' property:
#---------------------------------------------------------------------------
def _get_active_features ( self ):
if len( self.drop_features ) > 0:
return self.drop_features
return self.features
#---------------------------------------------------------------------------
# Implementation of the 'feature_popup_position' property:
#---------------------------------------------------------------------------
def _get_feature_popup_position ( self ):
x, y, dx, dy = self.drag_bounds
return wx.Point( x + 5, y + 3 )
#---------------------------------------------------------------------------
# Returns whether or not the item is at a specified window position:
#---------------------------------------------------------------------------
def is_at ( self, x, y, bounds = None ):
""" Returns whether or not the item is at a specified window position.
"""
if bounds is None:
bounds = self.bounds
bx, by, bdx, bdy = bounds
return ((bx <= x < (bx + bdx)) and (by <= y < (by + bdy)))
#---------------------------------------------------------------------------
# Returns whether or not an event is within a specified bounds:
#---------------------------------------------------------------------------
def is_in ( self, event, x, y, dx, dy ):
""" Returns whether or not an event is within a specified bounds.
"""
return ((x <= event.GetX() < (x + dx)) and
(y <= event.GetY() < (y + dy)))
#---------------------------------------------------------------------------
# Sets the control's drag bounds:
#---------------------------------------------------------------------------
def set_drag_bounds ( self, x, y, dx, dy ):
""" Sets the control's drag bounds.
"""
bx, by, bdx, bdy = self.bounds
self.drag_bounds = ( x, y, min( x + dx, bx + bdx ) - x, dy )
#---------------------------------------------------------------------------
# Gets the cursor to use when the mouse is over the item:
#---------------------------------------------------------------------------
def get_cursor ( self, event ):
""" Gets the cursor to use when the mouse is over the item.
"""
if self._is_tab and (not self._is_in_close( event )):
return wx.CURSOR_ARROW
return wx.CURSOR_HAND
#---------------------------------------------------------------------------
# Gets the DockInfo object for a specified window position:
#---------------------------------------------------------------------------
def dock_info_at ( self, x, y, tdx, is_control ):
""" Gets the DockInfo object for a specified window position.
"""
if self.is_at( x, y, self.drag_bounds ):
x, y, dx, dy = self.drag_bounds
control = self
if self._is_tab:
if is_control:
kind = DOCK_TABADD
tab_bounds = ( x, y, dx, dy )
else:
kind = DOCK_TAB
tab_bounds = ( x - (tdx / 2), y, tdx, dy )
else:
if is_control:
kind = DOCK_TABADD
tab_bounds = ( x, y, self.tab_width, dy )
else:
kind = DOCK_TAB
control = None
tab_bounds = ( x + self.tab_width, y, tdx, dy )
return DockInfo( kind = kind,
tab_bounds = tab_bounds,
region = self.parent,
control = control )
return None
#---------------------------------------------------------------------------
# Prepares for drawing into a device context:
#---------------------------------------------------------------------------
def begin_draw ( self, dc, ox = 0, oy = 0 ):
""" Prepares for drawing into a device context.
"""
self._save_clip = dc.GetClippingBox()
x, y, dx, dy = self.bounds
dc.SetClippingRegion( x + ox, y + oy, dx, dy )
#---------------------------------------------------------------------------
# Terminates drawing into a device context:
#---------------------------------------------------------------------------
def end_draw ( self, dc ):
""" Terminates drawing into a device context.
"""
dc.DestroyClippingRegion()
if self._save_clip != no_clip:
dc.SetClippingRegion( *self._save_clip )
self._save_clip = None
#---------------------------------------------------------------------------
# Handles the left mouse button being pressed:
#---------------------------------------------------------------------------
def mouse_down ( self, event ):
""" Handles the left mouse button being pressed.
"""
self._xy = ( event.GetX(), event.GetY() )
self._closing = self._is_in_close( event )
self._dragging = False
#---------------------------------------------------------------------------
# Handles the left mouse button being released:
#---------------------------------------------------------------------------
def mouse_up ( self, event ):
""" Handles the left mouse button being released.
"""
# Handle the user closing a control:
if self._closing:
if self._is_in_close( event ):
self.close()
# Handle the completion of a dragging operation:
elif self._dragging:
window = event.GetEventObject()
dock_info, self._dock_info = self._dock_info, None
self.mark_bounds( False )
control = self
# Check to see if the user is attempting to drag an entire notebook
# region:
if event.AltDown():
control = self.parent
# If the parent is not a notebook, then use the parent's parent:
if (isinstance( control, DockRegion ) and
(not control.is_notebook)):
control = control.parent
# Make sure the target is not contained within the notebook
# group we are trying to move:
region = dock_info.region
while region is not None:
if region is control:
# If it is, the operation is invalid, abort:
return
region = region.parent
# Check to see if the user is attempting to copy the control:
elif event.ControlDown():
owner = window.owner
control = owner.handler.dock_control_for(
*(owner.handler_args + ( window, control )) )
# Complete the docking maneuver:
dock_info.dock( control, window )
# Handle the user clicking on a notebook tab to select it:
elif (self._is_tab and
self.is_at( event.GetX(), event.GetY(), self.drag_bounds )):
self.parent.tab_clicked( self )
#---------------------------------------------------------------------------
# Handles the mouse moving while the left mouse button is pressed:
#---------------------------------------------------------------------------
def mouse_move ( self, event ):
""" Handles the mouse moving while the left mouse button is pressed.
"""
# Exit if control is 'fixed' or a 'close' is pending:
if self._closing or self.locked or (self.style == 'fixed'):
return
window = event.GetEventObject()
# Check to see if we are in 'drag mode' yet:
if not self._dragging:
x, y = self._xy
if (abs( x - event.GetX() ) + abs( y - event.GetY() )) < 3:
return
self._dragging = True
self._dock_info = no_dock_info
self._dock_size = self.tab_width
self.mark_bounds( True )
# Get the window and DockInfo object associated with the event:
cur_dock_info = self._dock_info
self._dock_info = dock_info = \
window.GetSizer().DockInfoAt( event.GetX(), event.GetY(),
self._dock_size, event.ShiftDown() )
# If the DockInfo has not changed, then no update is needed:
if ((cur_dock_info.kind == dock_info.kind) and
(cur_dock_info.region is dock_info.region) and
(cur_dock_info.bounds == dock_info.bounds) and
(cur_dock_info.tab_bounds == dock_info.tab_bounds)):
return
# Make sure the new DockInfo is legal:
region = self.parent
if ((not event.ControlDown()) and
(dock_info.region is region) and
((len( region.contents ) <= 1) or
(DOCK_TAB <= dock_info.kind <= DOCK_BAR) and
(dock_info.control is self))):
self._dock_info = no_dock_info
window.owner.set_cursor( wx.CURSOR_SIZING )
return
# Draw the new region:
dock_info.draw( window, self._drag_bitmap )
# If this is the start of an export (i.e. drag and drop) request:
if ((dock_info.kind == DOCK_EXPORT) and
(self.export != '') and
(self.dockable is not None)):
# Begin the drag and drop operation:
self.mark_bounds( False )
window.owner.set_cursor( wx.CURSOR_ARROW )
window.owner.release_mouse()
try:
window._dragging = True
if (PythonDropSource( window, self ).result in
( wx.DragNone, wx.DragCancel )):
window.owner.handler.open_view_for( self )
finally:
window._dragging = False
else:
# Update the mouse pointer as required:
cursor = wx.CURSOR_SIZING
if dock_info.kind == DOCK_BAR:
cursor = wx.CURSOR_HAND
window.owner.set_cursor( cursor )
#---------------------------------------------------------------------------
# Handles the mouse hovering over the item:
#---------------------------------------------------------------------------
def hover_enter ( self, event ):
""" Handles the mouse hovering over the item.
"""
if self._is_tab and (self.tab_state != TabActive):
self._redraw_tab( TabHover )
#---------------------------------------------------------------------------
# Handles the mouse exiting from hovering over the item:
#---------------------------------------------------------------------------
def hover_exit ( self, event ):
""" Handles the mouse exiting from hovering over the item.
"""
if self._is_tab and (self.tab_state != TabActive):
self._redraw_tab( TabInactive )
#---------------------------------------------------------------------------
# Marks/Unmarks the bounds of the bounding DockWindow:
#---------------------------------------------------------------------------
def mark_bounds ( self, begin ):
""" Marks/Unmarks the bounds of the bounding DockWindow.
"""
window = self.control.GetParent()
if begin:
dc, x, y = get_dc( window )
dx, dy = window.GetSize()
dc2 = wx.MemoryDC()
self._drag_bitmap = wx.EmptyBitmap( dx, dy )
dc2.SelectObject( self._drag_bitmap )
dc2.Blit( 0, 0, dx, dy, dc, x, y )
try:
dc3 = wx.GCDC( dc2 )
dc3.SetBrush( wx.Brush( wx.Colour( 158, 166, 255, 64 ) ) )
dc3.SetPen( wx.TRANSPARENT_PEN )
dc3.DrawRectangle( 0, 0, dx, dy )
except AttributeError:
pass
dc.Blit( x, y, dx, dy, dc2, 0, 0 )
else:
self._drag_bitmap = None
if is_mac:
top_level_window_for( window ).Refresh()
else:
window.Refresh()
def get_bg_color(self):
""" Gets the background color
"""
color = SystemMetrics().dialog_background_color
return wx.Colour( color[0]*255, color[1]*255, color[2]*255 )
#---------------------------------------------------------------------------
# Fills a specified region with the control's background color:
#---------------------------------------------------------------------------
def fill_bg_color ( self, dc, x, y, dx, dy ):
""" Fills a specified region with the control's background color.
"""
dc.SetPen( wx.TRANSPARENT_PEN )
dc.SetBrush( wx.Brush( self.get_bg_color() ) )
dc.DrawRectangle( x, y, dx, dy )
#---------------------------------------------------------------------------
# Draws a notebook tab:
#---------------------------------------------------------------------------
def draw_tab ( self, dc, state ):
global text_dy
""" Draws a notebook tab.
"""
x0, y0, dx, dy = self.drag_bounds
tab_color = self.get_bg_color()
if state == TabActive:
pass
elif state == TabInactive:
r,g,b = tab_color.Get()
tab_color.Set(max(0, r-20), max(0, g-20), max(0, b-20))
else:
r,g,b = tab_color.Get()
tab_color.Set(min(255, r+20), min(255, g+20), min(255, b+20))
self._is_tab = True
self.tab_state = state
theme = self.tab_theme
slice = theme.image_slice
bdc = BufferDC( dc, dx, dy )
self.fill_bg_color(bdc, 0, 0, dx, dy)
if state == TabActive:
# fill the tab bg with the desired color
brush = wx.Brush(tab_color)
bdc.SetBrush(brush)
bdc.SetPen(wx.TRANSPARENT_PEN)
bdc.DrawRectangle(0, 0, dx, dy)
# Draw the left, top, and right side of a rectange around the tab
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNSHADOW))
bdc.SetPen(pen)
bdc.DrawLine(0,dy,0,0) #up
bdc.DrawLine(0,0,dx,0) #right
bdc.DrawLine(dx-1,0,dx-1,dy) #down
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNHIGHLIGHT))
bdc.SetPen(pen)
bdc.DrawLine(1,dy,1,1)
bdc.DrawLine(1,1,dx-2,1)
bdc.DrawLine(dx-2,1,dx-2,dy)
else:
# fill the tab bg with the desired color
brush = wx.Brush(tab_color)
bdc.SetBrush(brush)
bdc.SetPen(wx.TRANSPARENT_PEN)
bdc.DrawRectangle(0, 3, dx, dy)
# Draw the left, top, and right side of a rectange around the tab
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNSHADOW))
bdc.SetPen(pen)
bdc.DrawLine(0,dy,0,3)
bdc.DrawLine(0,3,dx-1,3)
bdc.DrawLine(dx-1,3,dx-1,dy)
# Compute the initial drawing position:
name = self.tab_name
tdx, text_dy = dc.GetTextExtent( name )
tc = theme.content
ox, oy = theme.label.left, theme.label.top
y = (oy + ((dy + slice.xtop + tc.top - slice.xbottom - tc.bottom -
text_dy) / 2))
x = ox + slice.xleft + tc.left
mode = self.feature_mode
if mode == FEATURE_PRE_NORMAL:
mode = self.set_feature_mode( False )
# Draw the feature 'trigger' icon (if necessary):
if mode != FEATURE_NONE:
if mode not in FEATURES_VISIBLE:
bdc.DrawBitmap( DockImages.get_feature_image( mode ), x, y,
True )
x += (DockImages._tab_feature_width + 3)
# Draw the image (if necessary):
image = self.get_image()
if image is not None:
bdc.DrawBitmap( image, x, y, True )
x += (image.GetWidth() + 3)
# Draw the text label:
bdc.DrawText( name, x, y + 1 )
# Draw the close button (if necessary):
if self.closeable:
bdc.DrawBitmap( DockImages._close_tab, x + tdx + 5, y + 2, True )
# Copy the buffer to the display:
bdc.copy( x0, y0 )
#---------------------------------------------------------------------------
# Draws a fixed drag bar:
#---------------------------------------------------------------------------
def draw_fixed ( self, dc ):
""" Draws a fixed drag bar.
"""
pass
#---------------------------------------------------------------------------
# Draws a horizontal drag bar:
#---------------------------------------------------------------------------
def draw_horizontal ( self, dc ):
""" Draws a horizontal drag bar.
"""
self._is_tab = False
x, y, dx, dy = self.drag_bounds
self.fill_bg_color( dc, x, y, dx, dy )
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNHILIGHT))
dc.SetPen(pen)
dc.DrawLine(x, y, x+dx, y)
dc.DrawLine(x, y+2, x+dx, y+2)
#---------------------------------------------------------------------------
# Draws a vertical drag bar:
#---------------------------------------------------------------------------
def draw_vertical ( self, dc ):
""" Draws a vertical drag bar.
"""
self._is_tab = False
x, y, dx, dy = self.drag_bounds
self.fill_bg_color( dc, x, y, dx, dy )
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNHILIGHT))
dc.SetPen(pen)
dc.DrawLine(x, y, x, y+dy)
dc.DrawLine(x+2, y, x+2, y+dy)
#---------------------------------------------------------------------------
# Redraws the control's tab:
#---------------------------------------------------------------------------
def _redraw_tab ( self, state = None ):
if state is None:
state = self.tab_state
region = self.parent
if region is not None:
dc = set_standard_font( wx.ClientDC( self.control.GetParent() ) )
if region.is_notebook:
dc.SetClippingRegion( *region._tab_clip_bounds )
self.draw_tab( dc, state )
dc.DestroyClippingRegion()
else:
self.draw_tab( dc, state )
#---------------------------------------------------------------------------
# Redraws the control's drag bar:
#---------------------------------------------------------------------------
def _redraw_bar ( self ):
dc = wx.ClientDC( self.control )
getattr( self, 'draw_' + self.style )( dc )
#---------------------------------------------------------------------------
# Redraws the control's tab or bar:
#---------------------------------------------------------------------------
def _redraw_control ( self ):
if self._is_tab:
self._redraw_tab()
else:
self._redraw_bar()
#---------------------------------------------------------------------------
# Returns the bounds of the close button (if any):
#---------------------------------------------------------------------------
def _close_bounds ( self ):
global text_dy
if self.closeable and self._is_tab:
x, y, dx, dy = self.drag_bounds
theme = self.tab_theme
slice = theme.image_slice
tc = theme.content
ox, oy = theme.label.left, theme.label.top
# fixme: x calculation seems to be off by -1...
return ( x + dx + ox - slice.xright - tc.right - CloseTabSize,
y + oy + ((dy + slice.xtop + tc.top - slice.xbottom -
tc.bottom - text_dy) / 2) + 3,
CloseTabSize, CloseTabSize )
return ( 0, 0, 0, 0 )
#---------------------------------------------------------------------------
# Returns whether a specified window position is over the close button:
#---------------------------------------------------------------------------
def _is_in_close ( self, event ):
return self.is_in( event, *self._close_bounds() )
#---------------------------------------------------------------------------
# Sets/Returns the 'normal' feature mode for the control based on the
# number of currently active features:
#---------------------------------------------------------------------------
def set_feature_mode ( self, changed = True ):
if (not changed) or (self.feature_mode != FEATURE_PRE_NORMAL):
mode = FEATURE_DROP
features = self.drop_features
if len( features ) == 0:
mode = FEATURE_NORMAL
features = self.features
for feature in features:
if feature.bitmap is not None:
if changed:
self.feature_mode = FEATURE_CHANGED
else:
self.feature_mode = mode
break
else:
self.feature_mode = FEATURE_DISABLED
return self.feature_mode
#---------------------------------------------------------------------------
# Returns whether or not a specified window position is over the feature
# 'trigger' icon, and if so, triggers display of the feature icons:
#---------------------------------------------------------------------------
def feature_activate ( self, event, drag_object = Undefined ):
global text_dy
if (self.feature_mode in NO_FEATURE_ICON) or (not self._is_tab):
return False
# In 'drag' mode, we may get the same coordinate over and over again.
# We don't want to restart the timer, so exit now:
exy = ( event.GetX(), event.GetY() )
if self._feature_popup_xy == exy:
return True
x, y, dx, dy = self.drag_bounds
idx = DockImages._tab_feature_width
idy = DockImages._tab_feature_height
theme = self.tab_theme
slice = theme.image_slice
tc = theme.content
ox, oy = theme.label.left, theme.label.top
y += (oy + ((dy + slice.xtop + tc.top - slice.xbottom - tc.bottom -
text_dy) / 2))
x += ox + slice.xleft + tc.left
result = self.is_in( event, x, y, idx, idy )
# If the pointer is over the feature 'trigger' icon, save the event for
# the popup processing:
if result:
# If this is part of a drag operation, prepare for drag mode:
if drag_object is not Undefined:
self.pre_drag( drag_object, FEATURE_EXTERNAL_DRAG )
# Schedule the popup for later:
self._feature_popup_xy = exy
do_after( 100, self._feature_popup )
return result
#---------------------------------------------------------------------------
# Resets any pending feature popup:
#---------------------------------------------------------------------------
def reset_feature_popup ( self ):
self._feature_popup_xy = None
#---------------------------------------------------------------------------
# Pops up the current features if a feature popup is still pending:
#---------------------------------------------------------------------------
def _feature_popup ( self ):
if self._feature_popup_xy is not None:
# Set the new feature mode:
if self.feature_mode == FEATURE_DROP:
self.feature_mode = FEATURE_DROP_VISIBLE
else:
self.feature_mode = FEATURE_VISIBLE
self.owner.feature_bar_popup( self )
self._feature_popup_xy = None
else:
self.post_drag( FEATURE_EXTERNAL_DRAG )
#---------------------------------------------------------------------------
# Finishes the processing of a feature popup:
#---------------------------------------------------------------------------
def feature_bar_closed ( self ):
if self.feature_mode == FEATURE_DROP_VISIBLE:
self.feature_mode = FEATURE_DROP
else:
self.feature_mode = FEATURE_NORMAL
do_later( self._redraw_control )
#---------------------------------------------------------------------------
# Handles all pre-processing before a feature is dragged:
#---------------------------------------------------------------------------
def pre_drag_all ( self, object ):
""" Prepare all DockControls in the associated DockWindow for being
dragged over.
"""
for control in self.dock_controls:
control.pre_drag( object )
self.pre_drag( object )
def pre_drag ( self, object, tag = 0 ):
""" Prepare this DockControl for being dragged over.
"""
if (self.visible and
(self.feature_mode != FEATURE_NONE) and
(self._feature_mode is None)):
if isinstance( object, IFeatureTool ):
if (object.feature_can_drop_on( self.object ) or
object.feature_can_drop_on_dock_control( self )):
from feature_tool import FeatureTool
self.drop_features = [
FeatureTool( dock_control = self ) ]
else:
self.drop_features = [ f for f in self.features
if f.can_drop( object ) and
(f.bitmap is not None) ]
self._feature_mode = self.feature_mode + tag
if len( self.drop_features ) > 0:
self.feature_mode = FEATURE_DROP
else:
self.feature_mode = FEATURE_DISABLED
self._redraw_control()
#---------------------------------------------------------------------------
# Handles all post-processing after a feature has been dragged:
#---------------------------------------------------------------------------
def post_drag_all ( self ):
""" Restore all DockControls in the associated DockWindow after a drag
operation is completed.
"""
for control in self.dock_controls:
control.post_drag()
self.post_drag()
def post_drag ( self, tag = 0 ):
""" Restore this DockControl after a drag operation is completed.
"""
if ((self._feature_mode is None) or (tag == 0) or
((self._feature_mode & tag) != 0)):
self.drop_features = []
if self.feature_mode != FEATURE_NONE:
if self._feature_mode is not None:
self.feature_mode = self._feature_mode & (~tag)
self._feature_mode = None
else:
self.set_feature_mode( False )
self._redraw_control()
#-------------------------------------------------------------------------------
# 'DockSplitter' class:
#-------------------------------------------------------------------------------
class DockSplitter ( DockItem ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Style of the splitter bar:
style = Enum( 'horizontal', 'vertical' )
# Index of the splitter within its parent:
index = Int
# Current state of the splitter (i.e. its position relative to the things
# it splits):
state = Property
#---------------------------------------------------------------------------
# Override the definition of the inherited 'theme' property:
#---------------------------------------------------------------------------
def _get_theme ( self ):
return self.parent.control.GetParent().owner.theme
#---------------------------------------------------------------------------
# Draws the contents of the splitter:
#---------------------------------------------------------------------------
def draw ( self, dc ):
""" Draws the contents of the splitter.
"""
if (self._live_drag is False) and (self._first_bounds is not None):
x, y, dx, dy = self._first_bounds
else:
x, y, dx, dy = self.bounds
image = DockImages.get_splitter_image( self.state )
idx, idy = image.GetWidth(), image.GetHeight()
self.fill_bg_color( dc, x, y, dx, dy )
if self.style == 'horizontal':
# Draw a line the same color as the system button shadow, which
# should be a darkish color in the users color scheme
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNSHADOW))
dc.SetPen(pen)
dc.DrawLine(x+idx+1,y+dy/2,x+dx-2,y+dy/2)
iy = y+2
ix = x
# sets the hittable area for changing the cursor to be the height of
# the image
dx = idx
else:
# Draw a line the same color as the system button shadow, which
# should be a darkish color in the users color scheme
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNSHADOW))
dc.SetPen(pen)
dc.DrawLine(x+dx/2,y+idy+1,x+dx/2,y+dy-2)
iy = y
ix = x + 2
# sets the hittable area for changing the cursor to be the width of
# the image
dy = idy
dc.DrawBitmap( image, ix, iy, True )
self._hot_spot = ( x, y, dx, dy )
#---------------------------------------------------------------------------
# Gets the cursor to use when the mouse is over the splitter bar:
#---------------------------------------------------------------------------
def get_cursor ( self, event ):
""" Gets the cursor to use when the mouse is over the splitter bar.
"""
if (self._hot_spot is None) or self.is_in( event, *self._hot_spot ):
return wx.CURSOR_ARROW
if self.style == 'horizontal':
return wx.CURSOR_SIZENS
return wx.CURSOR_SIZEWE
#---------------------------------------------------------------------------
# Returns a copy of the splitter 'structure', minus the actual content:
#---------------------------------------------------------------------------
def get_structure ( self ):
""" Returns a copy of the splitter 'structure', minus the actual
content.
"""
return self.clone_traits( [ '_last_bounds' ] )
#---------------------------------------------------------------------------
# Handles the left mouse button being pressed:
#---------------------------------------------------------------------------
def mouse_down ( self, event ):
""" Handles the left mouse button being pressed.
"""
self._live_drag = event.ControlDown()
self._click_pending = ((self._hot_spot is not None) and
self.is_in( event, *self._hot_spot ))
if not self._click_pending:
self._xy = ( event.GetX(), event.GetY() )
self._max_bounds = self.parent.get_splitter_bounds( self )
self._first_bounds = self.bounds
if not self._live_drag:
self._draw_bounds( event, self.bounds )
#---------------------------------------------------------------------------
# Handles the left mouse button being released:
#---------------------------------------------------------------------------
def mouse_up ( self, event ):
""" Handles the left mouse button being released.
"""
if self._click_pending:
hx, hy, hdx, hdy = self._hot_spot
if not self.is_in( event, hx, hy, hdx, hdy ):
return
if self.style == 'horizontal':
if event.GetX() < (hx + (hdx / 2)):
self.collapse(True)
else:
self.collapse(False)
else:
if event.GetY() < (hy + (hdy / 2)):
self.collapse(True)
else:
self.collapse(False)
else:
self._last_bounds, self._first_bounds = self._first_bounds, None
if not self._live_drag:
self._draw_bounds( event )
self.parent.update_splitter( self, event.GetEventObject() )
#---------------------------------------------------------------------------
# Handles the mouse moving while the left mouse button is pressed:
#---------------------------------------------------------------------------
def mouse_move ( self, event ):
""" Handles the mouse moving while the left mouse button is pressed.
"""
if not self._click_pending:
x, y, dx, dy = self._first_bounds
mx, my, mdx, mdy = self._max_bounds
if self.style == 'horizontal':
y = y + event.GetY() - self._xy[1]
y = min( max( y, my ), my + mdy - dy )
else:
x = x + event.GetX() - self._xy[0]
x = min( max( x, mx ), mx + mdx - dx )
bounds = ( x, y, dx, dy )
if bounds != self.bounds:
self.bounds = bounds
if self._live_drag:
self.parent.update_splitter( self, event.GetEventObject() )
else:
self._draw_bounds( event, bounds )
#---------------------------------------------------------------------------
# Collapse/expands a splitter
#---------------------------------------------------------------------------
def collapse ( self, forward ):
""" Move the splitter has far as possible in one direction. 'forward'
is a boolean: True=right/down, False=left/up.
If the splitter is already collapsed, restores it to its previous
position.
"""
is_horizontal = (self.style == 'horizontal')
x, y, dx, dy = self.bounds
if self._last_bounds is not None:
if is_horizontal:
y = self._last_bounds[1]
else:
x = self._last_bounds[0]
state = self.state
contents = self.parent.visible_contents
ix1, iy1, idx1, idy1 = contents[ self.index ].bounds
ix2, iy2, idx2, idy2 = contents[ self.index + 1 ].bounds
if is_horizontal:
if state != SPLIT_HMIDDLE:
if ((y == self.bounds[1]) or
(y < iy1) or
((y + dy) > (iy2 + idy2))):
y = (iy1 + iy2 + idy2 - dy) / 2
else:
self._last_bounds = self.bounds
if forward:
y = iy1
else:
y = iy2 + idy2 - dy
elif state != SPLIT_VMIDDLE:
if ((x == self.bounds[0]) or
(x < ix1) or
((x + dx) > (ix2 + idx2))):
x = (ix1 + ix2 + idx2 - dx) / 2
else:
self._last_bounds = self.bounds
if forward:
x = ix2 + idx2 - dx
else:
x = ix1
self.bounds = ( x, y, dx, dy )
#---------------------------------------------------------------------------
# Handles the mouse hovering over the item:
#---------------------------------------------------------------------------
def hover_enter ( self, event ):
""" Handles the mouse hovering over the item.
"""
pass
#---------------------------------------------------------------------------
# Handles the mouse exiting from hovering over the item:
#---------------------------------------------------------------------------
def hover_exit ( self, event ):
""" Handles the mouse exiting from hovering over the item.
"""
pass
#---------------------------------------------------------------------------
# Draws the splitter bar in a new position while it is being dragged:
#---------------------------------------------------------------------------
def _draw_bounds ( self, event, bounds = None ):
""" Draws the splitter bar in a new position while it is being dragged.
"""
# Set up the drawing environment:
window = event.GetEventObject()
dc, x0, y0 = get_dc( window )
dc.SetLogicalFunction( wx.XOR )
dc.SetPen( wx.TRANSPARENT_PEN )
dc.SetBrush( wx.Brush( wx.Colour( *DragColor ), wx.SOLID ) )
is_horizontal = (self.style == 'horizontal')
nx = ox = None
# Draw the new bounds (if any):
if bounds is not None:
ax = ay = adx = ady = 0
nx, ny, ndx, ndy = bounds
if is_horizontal:
ady = (ndy - 6)
ay = ady / 2
else:
adx = (ndx - 6)
ax = adx / 2
nx += ax
ny += ay
ndx -= adx
ndy -= ady
if self._bounds is not None:
ax = ay = adx = ady = 0
ox, oy, odx, ody = self._bounds
if is_horizontal:
ady = (ody - 6)
ay = ady / 2
else:
adx = (odx - 6)
ax = adx / 2
ox += ax
oy += ay
odx -= adx
ody -= ady
if nx is not None:
tx, ty, tdx, tdy = nx, ny, ndx, ndy
if ox is not None:
if is_horizontal:
yoy = oy - ty
if 0 <= yoy < tdy:
tdy = yoy
elif -ody < yoy <= 0:
ty = oy + ody
tdy = tdy - ody - yoy
else:
xox = ox - tx
if 0 <= xox < tdx:
tdx = xox
elif -odx < xox <= 0:
tx = ox + odx
tdx = tdx - odx - xox
dc.DrawRectangle( tx + x0, ty + y0, tdx, tdy )
# Erase the old bounds (if any):
if ox is not None:
if nx is not None:
if is_horizontal:
yoy = ny - oy
if 0 <= yoy < ody:
ody = yoy
elif -ndy < yoy <= 0:
oy = ny + ndy
ody = ody - ndy - yoy
else:
xox = nx - ox
if 0 <= xox < odx:
odx = xox
elif -ndx < xox <= 0:
ox = nx + ndx
odx = odx - ndx - xox
dc.DrawRectangle( ox + x0, oy + y0, odx, ody )
if is_mac:
window.Refresh(rect=wx.Rect(ox + x0, oy + y0, odx, ody))
# Save the new bounds for the next call:
self._bounds = bounds
#---------------------------------------------------------------------------
# Implementation of the 'state' property:
#---------------------------------------------------------------------------
def _get_state ( self ):
contents = self.parent.contents
x, y, dx, dy = self.bounds
ix1, iy1, idx1, idy1 = contents[ self.index ].bounds
ix2, iy2, idx2, idy2 = contents[ self.index + 1 ].bounds
if self.style == 'horizontal':
if y == iy1:
return SPLIT_HTOP
if (y + dy) == (iy2 + idy2):
return SPLIT_HBOTTOM
return SPLIT_HMIDDLE
else:
if x == ix1:
return SPLIT_VLEFT
if (x + dx) == (ix2 + idx2):
return SPLIT_VRIGHT
return SPLIT_VMIDDLE
#-------------------------------------------------------------------------------
# 'DockControl' class:
#-------------------------------------------------------------------------------
class DockControl ( DockItem ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# The control this object describes:
control = Instance( wx.Window, allow_none = True )
# The number of global DockWindowFeature's that were available the last
# the time the feature set was checked:
num_features = Int
# A feature associated with the DockControl has been changed:
feature_changed = Event
# The image to display for this control:
image = Instance( ImageResource, allow_none = True )
# The UI name of this control:
name = Str
# Has the user set the name of the control?
user_name = Bool( False )
# The object (if any) associated with this control:
object = Property
# The id of this control:
id = Str
# Style of drag bar/tab:
style = DockStyle
# Has the user set the style for this control:
user_style = Bool( False )
# Category of control when it is dragged out of the DockWindow:
export = Str
# Is the control visible?
visible = Bool( True )
# Is the control's drag bar locked?
locked = Bool( False )
# Can the control be resized?
resizable = Bool( True )
# Can the control be closed?
closeable = Bool( False )
# Function to call when a DockControl is requesting to be closed:
on_close = Callable
# (Optional) object that allows the control to be docked with a different
# DockWindow:
dockable = Instance( IDockable, allow_none = True )
# List of all other DockControl's in the same DockWindow:
dock_controls = Property
# Event fired when the control's notebook tab is activated by the user:
activated = Event
#---------------------------------------------------------------------------
# Calculates the minimum size of the control:
#---------------------------------------------------------------------------
def calc_min ( self, use_size = False ):
""" Calculates the minimum size of the control.
"""
self.check_features()
dx, dy = self.width, self.height
if self.control is not None:
if wx_26:
size = self.control.GetBestFittingSize()
else:
size = self.control.GetEffectiveMinSize()
dx = size.GetWidth()
dy = size.GetHeight()
if self.width < 0:
self.width, self.height = dx, dy
if use_size and (self.width >= 0):
return ( self.width, self.height )
return ( dx, dy )
#---------------------------------------------------------------------------
# Layout the contents of the control based on the specified bounds:
#---------------------------------------------------------------------------
def recalc_sizes ( self, x, y, dx, dy ):
""" Layout the contents of the region based on the specified bounds.
"""
self.width = dx = max( 0, dx )
self.height = dy = max( 0, dy )
self.bounds = ( x, y, dx, dy )
# Note: All we really want to do is the 'SetDimensions' call, but the
# other code is needed for Linux/GTK which will not correctly process
# the SetDimensions call if the min size is larger than the specified
# size. So we temporarily set its min size to (0,0), do the
# SetDimensions, then restore the original min size. The restore is
# necessary so that DockWindow itself will correctly draw the 'drag'
# box when performing a docking maneuver...
control = self.control
min_size = control.GetMinSize()
control.SetMinSize( wx.Size( 0, 0 ) )
control.SetDimensions( x, y, dx, dy )
control.SetMinSize( min_size )
#---------------------------------------------------------------------------
# Checks to make sure that all applicable DockWindowFeatures have been
# applied:
#---------------------------------------------------------------------------
def check_features ( self ):
""" Checks to make sure that all applicable DockWindowFeatures have been
applied.
"""
global features
mode = self.feature_mode
n = len( features )
if ((self.num_features < n) and
(self.control is not None) and
isinstance( self.control.GetParent().GetSizer(), DockSizer )):
for i in range( self.num_features, n ):
feature_class = features[i]
feature = feature_class.new_feature_for( self )
if feature is not None:
if not isinstance( feature, SequenceType ):
feature = [ feature ]
self.features.extend( list( feature ) )
if mode == FEATURE_NONE:
self.feature_mode = FEATURE_PRE_NORMAL
if feature_class.state != 1:
for item in feature:
item.disable()
else:
self._tab_width = None
if mode in NORMAL_FEATURES:
self.set_feature_mode()
self.num_features = n
#---------------------------------------------------------------------------
# Sets the visibility of the control:
#---------------------------------------------------------------------------
def set_visibility ( self, visible ):
""" Sets the visibility of the control.
"""
if self.control is not None:
self.control.Show( visible )
#---------------------------------------------------------------------------
# Returns all DockControl objects contained in the control:
#---------------------------------------------------------------------------
def get_controls ( self, visible_only = True ):
""" Returns all DockControl objects contained in the control.
"""
if visible_only and (not self.visible):
return []
return [ self ]
#---------------------------------------------------------------------------
# Gets the image (if any) associated with the control:
#---------------------------------------------------------------------------
def get_image ( self ):
""" Gets the image (if any) associated with the control.
"""
if self._image is None:
if self.image is not None:
self._image = self.image.create_image().ConvertToBitmap()
return self._image
#---------------------------------------------------------------------------
# Hides or shows the control:
#---------------------------------------------------------------------------
def show ( self, visible = True, layout = True ):
""" Hides or shows the control.
"""
if visible != self.visible:
self.visible = visible
self._layout( layout )
#---------------------------------------------------------------------------
# Activates a control (i.e. makes it the active page within its containing
# notebook):
#---------------------------------------------------------------------------
def activate ( self, layout = True ):
""" Activates a control (i.e. makes it the active page within its
containing notebook).
"""
if self.parent is not None:
self.parent.activate( self, layout )
#---------------------------------------------------------------------------
# Closes the control:
#---------------------------------------------------------------------------
def close ( self, layout = True, force = False ):
""" Closes the control.
"""
control = self.control
if control is not None:
window = control.GetParent()
if self.on_close is not None:
# Ask the handler if it is OK to close the control:
if self.on_close( self, force ) is False:
# If not OK to close it, we're done:
return
elif self.dockable is not None:
# Ask the IDockable handler if it is OK to close the control:
if self.dockable.dockable_close( self, force ) is False:
# If not OK to close it, we're done:
return
else:
# No close handler, just destroy the widget ourselves:
control.Destroy()
# Reset all features:
self.reset_features()
# Remove the DockControl from the sizer:
self.parent.remove( self )
# Mark the DockControl as closed (i.e. has no associated widget or
# parent):
self.control = self.parent = None
# If a screen update is requested, lay everything out again now:
if layout:
window.Layout()
window.Refresh()
#---------------------------------------------------------------------------
# Returns the object at a specified window position:
#---------------------------------------------------------------------------
def object_at ( self, x, y ):
""" Returns the object at a specified window position.
"""
return None
#---------------------------------------------------------------------------
# Returns a copy of the control 'structure', minus the actual content:
#---------------------------------------------------------------------------
def get_structure ( self ):
""" Returns a copy of the control 'structure', minus the actual content.
"""
return self.clone_traits( [
'id', 'name', 'user_name', 'style', 'user_style', 'visible',
'locked', 'closeable', 'resizable', 'width', 'height'
] )
#---------------------------------------------------------------------------
# Toggles the 'lock' status of the control:
#---------------------------------------------------------------------------
def toggle_lock ( self ):
""" Toggles the 'lock' status of the control.
"""
self.locked = not self.locked
#---------------------------------------------------------------------------
# Prints the contents of the control:
#---------------------------------------------------------------------------
def dump ( self, indent ):
""" Prints the contents of the control.
"""
print ('%sControl( %08X, name = %s, id = %s,\n%s'
'style = %s, locked = %s,\n%s'
'closeable = %s, resizable = %s, visible = %s\n%s'
'width = %d, height = %d )' % (
' ' * indent, id( self ), self.name, self.id,
' ' * (indent + 9), self.style, self.locked,
' ' * (indent + 9), self.closeable, self.resizable, self.visible,
' ' * (indent + 9), self.width, self.height ))
#---------------------------------------------------------------------------
# Draws the contents of the control:
#---------------------------------------------------------------------------
def draw ( self, dc ):
""" Draws the contents of the control.
"""
pass
#---------------------------------------------------------------------------
# Sets a new name for the control:
#---------------------------------------------------------------------------
def set_name ( self, name, layout = True ):
""" Sets a new name for the control.
"""
if name != self.name:
self.name = name
self._layout( layout )
#---------------------------------------------------------------------------
# Resets the state of the tab:
#---------------------------------------------------------------------------
def reset_tab ( self ):
""" Resets the state of the tab.
"""
self.reset_features()
self._layout()
#---------------------------------------------------------------------------
# Resets all currently defined features:
#---------------------------------------------------------------------------
def reset_features ( self ):
""" Resets all currently defined features.
"""
for feature in self.features:
feature.dispose()
self.features = []
self.num_features = 0
#---------------------------------------------------------------------------
# Forces the containing DockWindow to be laid out:
#---------------------------------------------------------------------------
def _layout ( self, layout = True ):
""" Forces the containing DockWindow to be laid out.
"""
if layout and (self.control is not None):
do_later( self.control.GetParent().owner.update_layout )
#---------------------------------------------------------------------------
# Handles the 'activated' event being fired:
#---------------------------------------------------------------------------
def _activated_fired(self):
""" Notifies the active dockable that the control's tab is being
activated.
"""
if self.dockable is not None:
self.dockable.dockable_tab_activated(self, True)
#---------------------------------------------------------------------------
# Handles the 'feature_changed' trait being changed:
#---------------------------------------------------------------------------
def _feature_changed ( self ):
""" Handles the 'feature_changed' trait being changed
"""
self.set_feature_mode()
#---------------------------------------------------------------------------
# Handles the 'control' trait being changed:
#---------------------------------------------------------------------------
def _control_changed ( self, old, new ):
""" Handles the 'control' trait being changed.
"""
self._tab_width = None
if old is not None:
old._dock_control = None
if new is not None:
new._dock_control = self
self.reset_tab()
#---------------------------------------------------------------------------
# Handles the 'name' trait being changed:
#---------------------------------------------------------------------------
def _name_changed ( self ):
""" Handles the 'name' trait being changed.
"""
self._tab_width = self._tab_name = None
#---------------------------------------------------------------------------
# Handles the 'style' trait being changed:
#---------------------------------------------------------------------------
def _style_changed ( self ):
""" Handles the 'style' trait being changed.
"""
if self.parent is not None:
self.parent._is_notebook = None
#---------------------------------------------------------------------------
# Handles the 'image' trait being changed:
#---------------------------------------------------------------------------
def _image_changed ( self ):
""" Handles the 'image' trait being changed.
"""
self._image = None
#---------------------------------------------------------------------------
# Handles the 'visible' trait being changed:
#---------------------------------------------------------------------------
def _visible_changed ( self ):
""" Handles the 'visible' trait being changed.
"""
if self.parent is not None:
self.parent.show_hide( self )
#---------------------------------------------------------------------------
# Handles the 'dockable' trait being changed:
#---------------------------------------------------------------------------
def _dockable_changed ( self, dockable ):
""" Handles the 'dockable' trait being changed.
"""
if dockable is not None:
dockable.dockable_bind( self )
#---------------------------------------------------------------------------
# Implementation of the 'object' property:
#---------------------------------------------------------------------------
def _get_object ( self ):
return getattr( self.control, '_object', None )
#---------------------------------------------------------------------------
# Implementation of the DockControl's property:
#---------------------------------------------------------------------------
def _get_dock_controls ( self ):
# Get all of the DockControls in the parent DockSizer:
controls = self.control.GetParent().GetSizer().GetContents(
).get_controls( False )
# Remove ourself from the list:
try:
controls.remove( self )
except:
pass
return controls
#-------------------------------------------------------------------------------
# 'DockGroup' class:
#-------------------------------------------------------------------------------
class DockGroup ( DockItem ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# The contents of the group:
contents = List
# The UI name of this group:
name = Property
# Style of drag bar/tab:
style = Property
# Are the contents of the group resizable?
resizable = Property
# Category of control when it is dragged out of the DockWindow:
export = Constant( '' )
# Is the group visible?
visible = Property
# Content items which are visible:
visible_contents = Property
# Can the control be closed?
closeable = Property
# The control associated with this group:
control = Property
# Is the group locked?
locked = Property
# Has the initial layout been performed?
initialized = Bool( False )
#---------------------------------------------------------------------------
# Implementation of the 'name' property:
#---------------------------------------------------------------------------
def _get_name ( self ):
controls = self.get_controls()
n = len( controls )
if n == 0:
return ''
if n == 1:
return controls[0].name
return '%s [%d]' % ( controls[0].name, n )
#---------------------------------------------------------------------------
# Implementation of the 'visible' property:
#---------------------------------------------------------------------------
def _get_visible ( self ):
for item in self.contents:
if item.visible:
return True
return False
#---------------------------------------------------------------------------
# Implementation of the 'visible_contents' property:
#---------------------------------------------------------------------------
def _get_visible_contents ( self ):
return [ item for item in self.contents if item.visible ]
#---------------------------------------------------------------------------
# Implementation of the 'closeable' property:
#---------------------------------------------------------------------------
def _get_closeable ( self ):
for item in self.contents:
if not item.closeable:
return False
return True
#---------------------------------------------------------------------------
# Implementation of the 'style' property:
#---------------------------------------------------------------------------
def _get_style ( self ):
# Make sure there is at least one item in the group:
if len( self.contents ) > 0:
# Return the first item's style:
return self.contents[0].style
# Otherwise, return a default style for an empty group:
return 'horizontal'
#---------------------------------------------------------------------------
# Implementation of the 'resizable' property:
#---------------------------------------------------------------------------
def _get_resizable ( self ):
if self._resizable is None:
self._resizable = False
for control in self.get_controls():
if control.resizable:
self._resizable = True
break
return self._resizable
#---------------------------------------------------------------------------
# Implementation of the 'control' property:
#---------------------------------------------------------------------------
def _get_control ( self ):
if len( self.contents ) == 0:
return None
return self.contents[0].control
#---------------------------------------------------------------------------
# Implementation of the 'locked' property:
#---------------------------------------------------------------------------
def _get_locked ( self ):
return self.contents[0].locked
#---------------------------------------------------------------------------
# Handles 'initialized' being changed:
#---------------------------------------------------------------------------
def _initialized_changed( self ):
""" Handles 'initialized' being changed.
"""
for item in self.contents:
if isinstance( item, DockGroup ):
item.initialized = self.initialized
#---------------------------------------------------------------------------
# Hides or shows the contents of the group:
#---------------------------------------------------------------------------
def show ( self, visible = True, layout = True ):
""" Hides or shows the contents of the group.
"""
for item in self.contents:
item.show( visible, False )
if layout:
window = self.control.GetParent()
window.Layout()
window.Refresh()
#---------------------------------------------------------------------------
# Replaces a specified DockControl by another:
#---------------------------------------------------------------------------
def replace_control ( self, old, new ):
""" Replaces a specified DockControl by another.
"""
for i, item in enumerate( self.contents ):
if isinstance( item, DockControl ):
if item is old:
self.contents[i] = new
new.parent = self
return True
elif item.replace_control( old, new ):
return True
return False
#---------------------------------------------------------------------------
# Returns all DockControl objects contained in the group:
#---------------------------------------------------------------------------
def get_controls ( self, visible_only = True ):
""" Returns all DockControl objects contained in the group.
"""
if visible_only:
contents = self.visible_contents
else:
contents = self.contents
result = []
for item in contents:
result.extend( item.get_controls( visible_only ) )
return result
#---------------------------------------------------------------------------
# Gets the image (if any) associated with the group:
#---------------------------------------------------------------------------
def get_image ( self ):
""" Gets the image (if any) associated with the group.
"""
if len( self.contents ) == 0:
return None
return self.contents[0].get_image()
#---------------------------------------------------------------------------
# Gets the cursor to use when the mouse is over the item:
#---------------------------------------------------------------------------
def get_cursor ( self, event ):
""" Gets the cursor to use when the mouse is over the item.
"""
return wx.CURSOR_ARROW
#---------------------------------------------------------------------------
# Toggles the 'lock' status of every control in the group:
#---------------------------------------------------------------------------
def toggle_lock ( self ):
""" Toggles the 'lock' status of every control in the group.
"""
for item in self.contents:
item.toggle_lock()
#---------------------------------------------------------------------------
# Closes the group:
#---------------------------------------------------------------------------
def close ( self, layout = True, force = False ):
""" Closes the control.
"""
window = self.control.control.GetParent()
for item in self.contents[:]:
item.close( False, force = force )
if layout:
window.Layout()
window.Refresh()
#-------------------------------------------------------------------------------
# 'DockRegion' class:
#-------------------------------------------------------------------------------
class DockRegion ( DockGroup ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Index of the currently active 'contents' DockControl:
active = Int
# Is the region drawn as a notebook or not:
is_notebook = Property
# Index of the tab scroll image to use (-1 = No tab scroll):
tab_scroll_index = Int( -1 )
# The index of the current leftmost visible tab:
left_tab = Int
# The current maximum value for 'left_tab':
max_tab = Int
# Contents have been modified property:
modified = Property
#---------------------------------------------------------------------------
# Calculates the minimum size of the region:
#---------------------------------------------------------------------------
def calc_min ( self, use_size = False ):
""" Calculates the minimum size of the region.
"""
tab_dx = tdx = tdy = 0
contents = self.visible_contents
theme = self.theme
if self.is_notebook:
for item in contents:
dx, dy = item.calc_min( use_size )
tdx = max( tdx, dx )
tdy = max( tdy, dy )
tab_dx += item.tab_width
tis = theme.tab.image_slice
tc = theme.tab.content
tdx = max( tdx, tab_dx ) + (tis.xleft + tis.xright +
tc.left + tc.right)
tdy += (theme.tab_active.image_slice.dy +
tis.xtop + tis.xbottom + tc.top + tc.bottom)
elif len( contents ) > 0:
item = contents[0]
tdx, tdy = item.calc_min( use_size )
if not item.locked:
if item.style == 'horizontal':
tdy += theme.horizontal_drag.image_slice.dy
elif item.style == 'vertical':
tdx += theme.vertical_drag.image_slice.dx
if self.width < 0:
self.width = tdx
self.height = tdy
return ( tdx, tdy )
#---------------------------------------------------------------------------
# Layout the contents of the region based on the specified bounds:
#---------------------------------------------------------------------------
def recalc_sizes ( self, x, y, dx, dy ):
""" Layout the contents of the region based on the specified bounds.
"""
self.width = dx = max( 0, dx )
self.height = dy = max( 0, dy )
self.bounds = ( x, y, dx, dy )
theme = self.theme
contents = self.visible_contents
if self.is_notebook:
tis = theme.tab.image_slice
tc = theme.tab.content
th = theme.tab_active.image_slice.dy
# Layout the region out as a notebook:
x += tis.xleft + tc.left
tx0 = tx = x + theme.tab.label.left
dx -= (tis.xleft + tis.xright + tc.left + tc.right)
ady = dy - th
dy = ady - tis.xtop - tis.xbottom - tc.top - tc.bottom
iy = y + tis.xtop + tc.top
if theme.tabs_at_top:
iy += th
else:
y += ady
for item in contents:
item.recalc_sizes( x, iy, dx, dy )
tdx = item.tab_width
item.set_drag_bounds( tx, y, tdx, th )
tx += tdx
# Calculate the default tab clipping bounds:
cdx = dx + tc.left + tc.right
self._tab_clip_bounds = ( tx0, y, cdx, th )
# Do we need to enable tab scrolling?
xr = tx0 + cdx
if tx > xr:
# Scrolling needed, calculate maximum tab index for scrolling:
self.max_tab = 1
n = len( contents ) - 1
xr -= DockImages._tab_scroller_dx
for i in range( n, -1, -1 ):
xr -= contents[i].tab_width
if xr < tx0:
self.max_tab = min( i + 1, n )
break
# Set the new leftmost tab index:
self.left_tab = min( self.left_tab, self.max_tab )
# Determine which tab scroll image to use:
self.tab_scroll_index = ((self.left_tab < self.max_tab) +
(2 * (self.left_tab > 0))) - 1
# Now adjust each tab's bounds accordingly:
if self.left_tab > 0:
adx = contents[ self.left_tab ].drag_bounds[0] - tx0
for item in contents:
dbx, dby, dbdx, dbdy = item.drag_bounds
item.set_drag_bounds( dbx - adx, dby, item.tab_width,
dbdy )
# Exclude the scroll buttons from the tab clipping region:
self._tab_clip_bounds = ( tx0, y, cdx -
DockImages._tab_scroller_dx, th )
else:
self.tab_scroll_index = -1
self.left_tab = 0
else:
# Lay the region out as a drag bar:
item = contents[0]
drag_bounds = ( 0, 0, 0, 0 )
if not item.locked:
if item.style == 'horizontal':
db_dy = theme.horizontal_drag.image_slice.dy
drag_bounds = ( x, y, dx, db_dy )
y += db_dy
dy -= db_dy
elif item.style == 'vertical':
db_dx = theme.vertical_drag.image_slice.dx
drag_bounds = ( x, y, db_dx, dy )
x += db_dx
dx -= db_dx
item.recalc_sizes( x, y, dx, dy )
item.set_drag_bounds( *drag_bounds )
# Make sure all of the contained controls have the right visiblity:
self._set_visibility()
#---------------------------------------------------------------------------
# Adds a new control before or after a specified control:
#---------------------------------------------------------------------------
def add ( self, control, before = None, after = None, activate = True ):
""" Adds a new control before a specified control.
"""
contents = self.contents
if control.parent is self:
contents.remove( control )
if before is None:
if after is None:
i = len( contents )
else:
i = contents.index( after ) + 1
else:
i = contents.index( before )
contents.insert( i, control )
if activate:
self.active = i
#---------------------------------------------------------------------------
# Removes a specified item:
#---------------------------------------------------------------------------
def remove ( self, item ):
""" Removes a specified item.
"""
contents = self.contents
i = contents.index( item )
if isinstance( item, DockGroup ) and (len( item.contents ) == 1):
item = item.contents[0]
if isinstance( item, DockRegion ):
contents[ i: i + 1 ] = item.contents[:]
else:
contents[ i ] = item
else:
del contents[ i ]
# Change the active selection only if 'item' is in closing mode,
# or was dragged to a new location.
# If this entire dock region is being closed, then all contained
# dock items will be removed and we do not want to change 'active'
# selection.
if item._closing or item._dragging:
if (self.active > i) or (self.active >= len( contents )):
self.active -= 1
# If the active item was removed, then 'active' stays
# unchanged, but it reflects the index of the next page in
# the dock region. Since _active_changed won't be fired now,
# we fire the 'activated' event on the next page.
elif (i == self.active):
control = self.contents[ i ]
if isinstance( control, DockControl ):
control.activated = True
if self.parent is not None:
if len( contents ) == 0:
self.parent.remove( self )
elif ((len( contents ) == 1) and
isinstance( self.parent, DockRegion )):
self.parent.remove( self )
#---------------------------------------------------------------------------
# Returns a copy of the region 'structure', minus the actual content:
#---------------------------------------------------------------------------
def get_structure ( self ):
""" Returns a copy of the region 'structure', minus the actual content.
"""
return self.clone_traits( [ 'active', 'width', 'height' ] ).set(
contents = [ item.get_structure() for item in self.contents ] )
#---------------------------------------------------------------------------
# Toggles the 'lock' status of every control in the group:
#---------------------------------------------------------------------------
def toggle_lock ( self ):
""" Toggles the 'lock' status of every control in the group.
"""
super( DockRegion, self ).toggle_lock()
self._is_notebook = None
#---------------------------------------------------------------------------
# Draws the contents of the region:
#---------------------------------------------------------------------------
def draw ( self, dc ):
""" Draws the contents of the region.
"""
if self._visible is not False:
self.begin_draw( dc )
if self.is_notebook:
# fixme: There seems to be a case where 'draw' is called before
# 'recalc_sizes' (which defines '_tab_clip_bounds'), so we need
# to check to make sure it is defined. If not, it seems safe to
# exit immediately, since in all known cases, the bounds are
# ( 0, 0, 0, 0 ), so there is nothing to draw anyways. The
# question is why 'recalc_sizes' is not being called first.
if self._tab_clip_bounds is None:
self.end_draw( dc )
return
self.fill_bg_color( dc, *self.bounds )
if self.active >= len(self.contents):
# on some platforms, if the active tab was destroyed
# the new active tab may not have been set yet
self.active = len(self.contents) - 1
self._draw_notebook( dc )
active = self.active
# Draw the scroll buttons (if necessary):
x, y, dx, dy = self._tab_clip_bounds
index = self.tab_scroll_index
if index >= 0:
dc.DrawBitmap( DockImages._tab_scroller_images[ index ],
x + dx, y + 2, True )
# Draw all the inactive tabs first:
dc.SetClippingRegion( x, y, dx, dy )
last_inactive = -1
for i, item in enumerate( self.contents ):
if (i != active) and item.visible:
last_inactive = i
state = item.tab_state
if state not in NotActiveStates:
state = TabInactive
item.draw_tab( dc, state )
# Draw the active tab last:
self.contents[ active ].draw_tab( dc, TabActive )
# If the last inactive tab drawn is also the rightmost tab and
# the theme has a 'tab right edge' image, draw the image just
# to the right of the last tab:
if last_inactive > active:
if item.tab_state == TabInactive:
bitmap = self.theme.tab_inactive_edge_bitmap
else:
bitmap = self.theme.tab_hover_edge_bitmap
if bitmap is not None:
x, y, dx, dy = item.drag_bounds
dc.DrawBitmap( bitmap, x + dx, y, True )
else:
item = self.visible_contents[0]
if not item.locked:
getattr( item, 'draw_' + item.style )( dc )
self.end_draw( dc )
# Draw each of the items contained in the region:
for item in self.contents:
if item.visible:
item.draw( dc )
#---------------------------------------------------------------------------
# Returns the object at a specified window position:
#---------------------------------------------------------------------------
def object_at ( self, x, y ):
""" Returns the object at a specified window position.
"""
if (self._visible is not False) and self.is_at( x, y ):
if self.is_notebook and (self.tab_scroll_index >= 0):
cx, cy, cdx, cdy = self._tab_clip_bounds
if self.is_at( x, y, ( cx + cdx, cy + 2,
DockImages._tab_scroller_dx,
DockImages._tab_scroller_dy ) ):
return self
for item in self.visible_contents:
if item.is_at( x, y, item.drag_bounds ):
return item
object = item.object_at( x, y )
if object is not None:
return object
return None
#---------------------------------------------------------------------------
# Gets the DockInfo object for a specified window position:
#---------------------------------------------------------------------------
def dock_info_at ( self, x, y, tdx, is_control ):
""" Gets the DockInfo object for a specified window position.
"""
# Check to see if the point is in our drag bar:
info = super( DockRegion, self ).dock_info_at( x, y, tdx, is_control )
if info is not None:
return info
# If we are not visible, or the point is not contained in us, give up:
if (self._visible is False) or (not self.is_at( x, y )):
return None
# Check to see if the point is in the drag bars of any controls:
contents = self.visible_contents
for item in contents:
object = item.dock_info_at( x, y, tdx, is_control )
if object is not None:
return object
# If we are in 'notebook mode' check to see if the point is in the
# empty region outside of any tabs:
lx, ty, dx, dy = self.bounds
if self.is_notebook:
item = contents[-1]
ix, iy, idx, idy = item.drag_bounds
if (x > (ix + idx)) and (iy <= y < (iy + idy)):
return DockInfo( kind = DOCK_TAB,
tab_bounds = ( ix + idx, iy, tdx, idy ),
region = self )
# Otherwise, figure out which edge the point is closest to, and
# return a DockInfo object describing that edge:
left = x - lx
right = lx + dx - 1 - x
top = y - ty
bottom = ty + dy - 1 - y
choice = min( left, right, top, bottom )
mdx = dx / 3
mdy = dy / 3
if choice == left:
return DockInfo( kind = DOCK_LEFT,
bounds = ( lx, ty, mdx, dy ),
region = self )
if choice == right:
return DockInfo( kind = DOCK_RIGHT,
bounds = ( lx + dx - mdx, ty, mdx, dy ),
region = self )
if choice == top:
return DockInfo( kind = DOCK_TOP,
bounds = ( lx, ty, dx, mdy ),
region = self )
return DockInfo( kind = DOCK_BOTTOM,
bounds = ( lx, ty + dy - mdy, dx, mdy ),
region = self )
#---------------------------------------------------------------------------
# Handles a contained notebook tab being clicked:
#---------------------------------------------------------------------------
def tab_clicked ( self, control ):
""" Handles a contained notebook tab being clicked.
"""
# Find the page that was clicked and mark it as active:
i = self.contents.index( control )
if i != self.active:
self.active = i
# Recalculate the tab layout:
self.recalc_sizes( *self.bounds )
# Force the notebook to be redrawn:
control.control.GetParent().RefreshRect( wx.Rect( *self.bounds ) )
# Fire the 'activated' event on the control:
if isinstance( control, DockControl ):
control.activated = True
#---------------------------------------------------------------------------
# Handles the user clicking an active scroll button:
#---------------------------------------------------------------------------
def scroll ( self, type, left_tab = 0 ):
""" Handles the user clicking an active scroll button.
"""
if type == SCROLL_LEFT:
left_tab = min( self.left_tab + 1, self.max_tab )
elif type == SCROLL_RIGHT:
left_tab = max( self.left_tab - 1, 0 )
if left_tab != self.left_tab:
# Calculate the amount we need to adjust each tab by:
contents = self.visible_contents
adx = (contents[ left_tab ].drag_bounds[0] -
contents[ self.left_tab ].drag_bounds[0])
# Set the new leftmost tab index:
self.left_tab = left_tab
# Determine which tab scroll image to use:
self.tab_scroll_index = ((left_tab < self.max_tab) +
(2 * (left_tab > 0))) - 1
# Now adjust each tab's bounds accordingly:
for item in contents:
dbx, dby, dbdx, dbdy = item.drag_bounds
item.set_drag_bounds( dbx - adx, dby, item.tab_width, dbdy )
# Finally, force a redraw of the affected part of the window:
x, y, dx, dy = self._tab_clip_bounds
item.control.GetParent().RefreshRect(
wx.Rect( x, y, dx + DockImages._tab_scroller_dx, dy ) )
#---------------------------------------------------------------------------
# Handles the left mouse button being pressed:
#---------------------------------------------------------------------------
def mouse_down ( self, event ):
""" Handles the left mouse button being pressed.
"""
self._scroll = self._get_scroll_button( event )
#---------------------------------------------------------------------------
# Handles the left mouse button being released:
#---------------------------------------------------------------------------
def mouse_up ( self, event ):
""" Handles the left mouse button being released.
"""
if ((self._scroll is not None) and
(self._scroll == self._get_scroll_button( event ))):
self.scroll( self._scroll )
else:
super( DockRegion, self ).mouse_up( event )
#---------------------------------------------------------------------------
# Handles the mouse moving while the left mouse button is pressed:
#---------------------------------------------------------------------------
def mouse_move ( self, event ):
""" Handles the mouse moving while the left mouse button is pressed.
"""
pass
#---------------------------------------------------------------------------
# Sets the visibility of the region:
#---------------------------------------------------------------------------
def set_visibility ( self, visible ):
""" Sets the visibility of the region.
"""
self._visible = visible
active = self.active
for i, item in enumerate( self.contents ):
item.set_visibility( visible and (i == active) )
#---------------------------------------------------------------------------
# Activates a specified control (i.e. makes it the current notebook tab):
#---------------------------------------------------------------------------
def activate ( self, control, layout = True ):
""" Activates a specified control (i.e. makes it the current notebook
tab).
"""
if control.visible and self.is_notebook:
active = self.contents.index( control )
if active != self.active:
self.active = active
self.make_active_tab_visible()
window = control.control.GetParent()
if layout:
do_later( window.owner.update_layout )
else:
window.RefreshRect( wx.Rect( *self.bounds ) )
else:
# Fire the activated event for the control.
if isinstance( control, DockControl ):
control.activated = True
#---------------------------------------------------------------------------
# Makes sure the active control's tab is completely visible (if possible):
#---------------------------------------------------------------------------
def make_active_tab_visible ( self ):
""" Makes sure the active control's tab is completely visible (if
possible).
"""
active = self.active
if active < self.left_tab:
self.scroll( SCROLL_TO, active )
else:
x, y, dx, dy = self.contents[ active ].drag_bounds
if not self.is_at( x + dx - 1, y + dy - 1, self._tab_clip_bounds ):
self.scroll( SCROLL_TO, min( active, self.max_tab ) )
#---------------------------------------------------------------------------
# Handles a contained DockControl item being hidden or shown:
#---------------------------------------------------------------------------
def show_hide ( self, control ):
""" Handles a contained DockControl item being hidden or shown.
"""
i = self.contents.index( control )
if i == self.active:
self._update_active()
elif (self.active < 0) and control.visible:
self.active = i
self._is_notebook = None
#---------------------------------------------------------------------------
# Prints the contents of the region:
#---------------------------------------------------------------------------
def dump ( self, indent ):
""" Prints the contents of the region.
"""
print '%sRegion( %08X, active = %s, width = %d, height = %d )' % (
' ' * indent, id( self ), self.active, self.width, self.height )
for item in self.contents:
item.dump( indent + 3 )
#---------------------------------------------------------------------------
# Returns which scroll button (if any) the pointer is currently over:
#---------------------------------------------------------------------------
def _get_scroll_button ( self, event ):
""" Returns which scroll button (if any) the pointer is currently over.
"""
x, y, dx, dy = self._tab_clip_bounds
if self.is_in( event, x + dx, y + 2, DockImages._tab_scroller_dx,
DockImages._tab_scroller_dy ):
if (event.GetX() - (x + dx)) < (DockImages._tab_scroller_dx / 2):
return SCROLL_LEFT
return SCROLL_RIGHT
return None
#---------------------------------------------------------------------------
# Updates the currently active page after a change:
#---------------------------------------------------------------------------
def _update_active ( self, active = None ):
""" Updates the currently active page after a change.
"""
if active is None:
active = self.active
contents = self.contents
for i in (range( active, len( contents ) ) +
range( active - 1, -1, -1 )):
if contents[ i ].visible:
self.active = i
return
self.active = -1
#---------------------------------------------------------------------------
# Handles the 'active' trait being changed:
#---------------------------------------------------------------------------
def _active_changed ( self, old, new ):
self._set_visibility()
# Set the correct tab state for each tab:
for i, item in enumerate( self.contents ):
item.tab_state = NormalStates[ i == new ]
n = len( self.contents )
if 0 <= old < n:
# Notify the previously active dockable that the control's tab is
# being deactivated:
control = self.contents[ old ]
if (isinstance( control, DockControl ) and
(control.dockable is not None)):
control.dockable.dockable_tab_activated( control, False )
if 0 <= new < n:
# Notify the new dockable that the control's tab is being
# activated:
control = self.contents[ new ]
if isinstance( control, DockControl ):
control.activated = True
#---------------------------------------------------------------------------
# Handles the 'contents' trait being changed:
#---------------------------------------------------------------------------
def _contents_changed ( self ):
""" Handles the 'contents' trait being changed.
"""
self._is_notebook = None
for item in self.contents:
item.parent = self
self.calc_min( True )
self.modified = True
def _contents_items_changed ( self, event ):
""" Handles the 'contents' trait being changed.
"""
self._is_notebook = None
for item in event.added:
item.parent = self
self.calc_min( True )
self.modified = True
#---------------------------------------------------------------------------
# Set the proper visiblity for all contained controls:
#---------------------------------------------------------------------------
def _set_visibility ( self ):
""" Set the proper visiblity for all contained controls.
"""
active = self.active
for i, item in enumerate( self.contents ):
item.set_visibility( i == active )
#---------------------------------------------------------------------------
# Implementation of the 'modified' property:
#---------------------------------------------------------------------------
def _set_modified ( self, value ):
if self.parent is not None:
self.parent.modified = True
#---------------------------------------------------------------------------
# Implementation of the 'is_notebook' property:
#---------------------------------------------------------------------------
def _get_is_notebook ( self ):
if self._is_notebook is None:
contents = self.visible_contents
n = len( contents )
self._is_notebook = (n > 1)
if n == 1:
self._is_notebook = (contents[0].style == 'tab')
return self._is_notebook
#---------------------------------------------------------------------------
# Draws the notebook body:
#---------------------------------------------------------------------------
def _draw_notebook ( self, dc ):
""" Draws the notebook body.
"""
theme = self.theme
tab_height = theme.tab_active.image_slice.dy
x, y, dx, dy = self.bounds
self.fill_bg_color( dc, x, y, dx, dy )
# Draws a box around the frame containing the tab contents, starting
# below the tab
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNSHADOW))
dc.SetPen(pen)
dc.DrawRectangle(x, y+tab_height, dx, dy-tab_height)
# draw highlight
pen = wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNHIGHLIGHT))
dc.SetPen(pen)
dc.DrawLine(x+1, y+tab_height+1, x+dx-1, y+tab_height+1)
# Erases the line under the active tab
x0 = x + self.tab_theme.label.left
x1 = x0
for i in range(self.active+1):
x0 = x1 + 1
x1 += self.contents[i].tab_width
dc.SetPen(wx.Pen(self.get_bg_color()))
dc.DrawLine(x0, y+tab_height, x1, y+tab_height)
dc.DrawLine(x0, y+tab_height+1, x1, y+tab_height+1)
#-------------------------------------------------------------------------------
# 'DockSection' class:
#-------------------------------------------------------------------------------
class DockSection ( DockGroup ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Is this a row (or a column)?
is_row = Bool( True )
# Bounds of any splitter bars associated with the region:
splitters = List( DockSplitter )
# The DockWindow that owns this section (set on top level section only):
dock_window = Instance( 'enthought.pyface.dock.dock_window.DockWindow' )
# Contents of the section have been modified property:
modified = Property
#---------------------------------------------------------------------------
# Re-implementation of the 'owner' property:
#---------------------------------------------------------------------------
@cached_property
def _get_owner ( self ):
if self.dock_window is not None:
return self.dock_window
if self.parent is None:
return None
return self.parent.owner
#---------------------------------------------------------------------------
# Calculates the minimum size of the section:
#---------------------------------------------------------------------------
def calc_min ( self, use_size = False ):
""" Calculates the minimum size of the section.
"""
tdx = tdy = 0
contents = self.visible_contents
n = len( contents )
if self.is_row:
# allow 10 pixels for the splitter
sdx = 10
for item in contents:
dx, dy = item.calc_min( use_size )
tdx += dx
tdy = max( tdy, dy )
if self.resizable:
tdx += ((n - 1) * sdx)
else:
tdx += ((n + 1) * 3)
tdy += 6
else:
# allow 10 pixels for the splitter
sdy = 10
for item in contents:
dx, dy = item.calc_min( use_size )
tdx = max( tdx, dx )
tdy += dy
if self.resizable:
tdy += ((n - 1) * sdy)
else:
tdx += 6
tdy += ((n + 1) * 3)
if self.width < 0:
self.width = tdx
self.height = tdy
return ( tdx, tdy )
#---------------------------------------------------------------------------
# Perform initial layout of the section based on the specified bounds:
#---------------------------------------------------------------------------
def initial_recalc_sizes ( self, x, y, dx, dy ):
""" Layout the contents of the section based on the specified bounds.
"""
self.width = dx = max( 0, dx )
self.height = dy = max( 0, dy )
self.bounds = ( x, y, dx, dy )
# If none of the contents are resizable, use the fixed layout method
if not self.resizable:
self.recalc_sizes_fixed( x, y, dx, dy )
return
contents = self.visible_contents
n = len( contents ) - 1
splitters = []
# Find out how much space is available.
splitter_size = 10
sizes = []
if self.is_row:
total = dx - (n * splitter_size)
else:
total = dy - (n * splitter_size)
# Get requested sizes from the items.
for item in contents:
size = -1.0
for dock_control in item.get_controls():
dockable = dock_control.dockable
if dockable is not None and dockable.element is not None:
if self.is_row:
size = max( size, dockable.element.width )
else:
size = max( size, dockable.element.height )
sizes.append( size )
# Allocate requested space.
avail = total
remain = 0
for i, sz in enumerate( sizes ):
if avail <= 0:
break
if sz >= 0:
if sz >= 1:
sz = min( sz, avail )
else:
sz *= total
sz = int( sz )
sizes[i] = sz
avail -= sz
else:
remain += 1
# Allocate the remainder to those parts that didn't request a width.
if remain > 0:
remain = int( avail / remain )
for i, sz in enumerate( sizes ):
if sz < 0:
sizes[i] = remain
# If all requested a width, allocate the remainder to the last item.
else:
sizes[-1] += avail
# Resize contents and add splitters
if self.is_row:
for i, item in enumerate( contents ):
idx = int( sizes[i] )
item.recalc_sizes( x, y, idx, dy )
x += idx
if i < n:
splitters.append(
DockSplitter( bounds = ( x, y, splitter_size, dy ),
style = 'vertical',
parent = self,
index = i ) )
x += splitter_size
else:
for i, item in enumerate( contents ):
idy = int( sizes[i] )
item.recalc_sizes( x, y, dx, idy )
y += idy
if i < n:
splitters.append(
DockSplitter( bounds = ( x, y, dx, splitter_size ),
style = 'horizontal',
parent = self,
index = i ) )
y += splitter_size
# Preserve the current internal '_last_bounds' for all splitters if
# possible:
cur_splitters = self.splitters
for i in range( min( len( splitters ), len( cur_splitters ) ) ):
splitters[i]._last_bounds = cur_splitters[i]._last_bounds
# Save the new set of splitter bars:
self.splitters = splitters
# Set the visibility for all contained items:
self._set_visibility()
#---------------------------------------------------------------------------
# Layout the contents of the section based on the specified bounds:
#---------------------------------------------------------------------------
def recalc_sizes ( self, x, y, dx, dy ):
""" Layout the contents of the section based on the specified bounds.
"""
# Check if we need to perform initial layout
if not self.initialized:
self.initial_recalc_sizes( x, y, dx, dy )
self.initialized = True
return
self.width = dx = max( 0, dx )
self.height = dy = max( 0, dy )
self.bounds = ( x, y, dx, dy )
# If none of the contents are resizable, use the fixed layout method:
if not self.resizable:
self.recalc_sizes_fixed( x, y, dx, dy )
return
contents = self.visible_contents
n = len( contents ) - 1
splitters = []
# Perform a horizontal layout:
if self.is_row:
# allow 10 pixels for the splitter
sdx = 10
dx -= (n * sdx)
cdx = 0
# Calculate the current and minimum width:
for item in contents:
cdx += item.width
cdx = max( 1, cdx )
# Calculate the delta between the current and new width:
delta = remaining = dx - cdx
# Allocate the change (plus or minus) proportionally based on each
# item's current size:
for i, item in enumerate( contents ):
if i < n:
idx = int( round( float( item.width * delta ) / cdx ) )
else:
idx = remaining
remaining -= idx
idx += item.width
item.recalc_sizes( x, y, idx, dy )
x += idx
# Define the splitter bar between adjacent items:
if i < n:
splitters.append(
DockSplitter( bounds = ( x, y, sdx, dy ),
style = 'vertical',
parent = self,
index = i ) )
x += sdx
# Perform a vertical layout:
else:
# allow 10 pixels for the splitter
sdy = 10
dy -= (n * sdy)
cdy = 0
# Calculate the current and minimum height:
for item in contents:
cdy += item.height
cdy = max( 1, cdy )
# Calculate the delta between the current and new height:
delta = remaining = dy - cdy
# Allocate the change (plus or minus) proportionally based on each
# item's current size:
for i, item in enumerate( contents ):
if i < n:
idy = int( round( float( item.height * delta ) / cdy ) )
else:
idy = remaining
remaining -= idy
idy += item.height
item.recalc_sizes( x, y, dx, idy )
y += idy
# Define the splitter bar between adjacent items:
if i < n:
splitters.append(
DockSplitter( bounds = ( x, y, dx, sdy ),
style = 'horizontal',
parent = self,
index = i ) )
y += sdy
# Preserve the current internal '_last_bounds' for all splitters if
# possible:
cur_splitters = self.splitters
for i in range( min( len( splitters ), len( cur_splitters ) ) ):
splitters[i]._last_bounds = cur_splitters[i]._last_bounds
# Save the new set of splitter bars:
self.splitters = splitters
# Set the visibility for all contained items:
self._set_visibility()
#---------------------------------------------------------------------------
# Layout the contents of the section based on the specified bounds using
# the minimum requested size for each item:
#---------------------------------------------------------------------------
def recalc_sizes_fixed ( self, x, y, dx, dy ):
""" Layout the contents of the section based on the specified bounds
using the minimum requested size for each item.
"""
self.splitters = []
x += 3
y += 3
dx = max( 0, dx - 3 )
dy = max( 0, dy - 3 )
# Perform a horizontal layout:
if self.is_row:
# Allocate the space for each item based on its minimum size until
# the space runs out:
for item in self.visible_contents:
idx, idy = item.calc_min()
idx = min( dx, idx )
idy = min( dy, idy )
dx = max( 0, dx - idx - 3 )
item.recalc_sizes( x, y, idx, idy )
x += idx + 3
# Perform a vertical layout:
else:
# Allocate the space for each item based on its minimum size until
# the space runs out:
for item in self.visible_contents:
idx, idy = item.calc_min()
idx = min( dx, idx )
idy = min( dy, idy )
dy = max( 0, dy - idy - 3 )
item.recalc_sizes( x, y, idx, idy )
y += idy + 3
# Set the visibility for all contained items:
self._set_visibility()
#---------------------------------------------------------------------------
# Draws the contents of the section:
#---------------------------------------------------------------------------
def draw ( self, dc ):
""" Draws the contents of the section.
"""
if self._visible is not False:
contents = self.visible_contents
x, y, dx, dy = self.bounds
self.fill_bg_color( dc, x, y, dx, dy )
for item in contents:
item.draw( dc )
self.begin_draw( dc )
for item in self.splitters:
item.draw( dc )
self.end_draw( dc )
#---------------------------------------------------------------------------
# Returns the object at a specified window position:
#---------------------------------------------------------------------------
def object_at ( self, x, y, force = False ):
""" Returns the object at a specified window position.
"""
if self._visible is not False:
for item in self.splitters:
if item.is_at( x, y ):
return item
for item in self.visible_contents:
object = item.object_at( x, y )
if object is not None:
return object
if force and self.is_at( x, y ):
return self
return None
#---------------------------------------------------------------------------
# Gets the DockInfo object for a specified window position:
#---------------------------------------------------------------------------
def dock_info_at ( self, x, y, tdx, is_control, force = False ):
""" Gets the DockInfo object for a specified window position.
"""
# Check to see if the point is in our drag bar:
info = super( DockSection, self ).dock_info_at( x, y, tdx, is_control )
if info is not None:
return info
if self._visible is False:
return None
for item in self.splitters:
if item.is_at( x, y ):
return DockInfo( kind = DOCK_SPLITTER )
for item in self.visible_contents:
object = item.dock_info_at( x, y, tdx, is_control )
if object is not None:
return object
# Check to see if we must return a DockInfo object:
if not force:
return None
# Otherwise, figure out which edge the point is closest to, and
# return a DockInfo object describing that edge:
lx, ty, dx, dy = self.bounds
left = lx - x
right = x - lx - dx + 1
top = ty - y
bottom = y - ty - dy + 1
# If the point is way outside of the section, mark it is a drag and
# drop candidate:
if max( left, right, top, bottom ) > 20:
return DockInfo( kind = DOCK_EXPORT )
left = abs( left )
right = abs( right )
top = abs( top )
bottom = abs( bottom )
choice = min( left, right, top, bottom )
mdx = dx / 3
mdy = dy / 3
if choice == left:
return DockInfo( kind = DOCK_LEFT,
bounds = ( lx, ty, mdx, dy ) )
if choice == right:
return DockInfo( kind = DOCK_RIGHT,
bounds = ( lx + dx - mdx, ty, mdx, dy ) )
if choice == top:
return DockInfo( kind = DOCK_TOP,
bounds = ( lx, ty, dx, mdy ) )
return DockInfo( kind = DOCK_BOTTOM,
bounds = ( lx, ty + dy - mdy, dx, mdy ) )
#---------------------------------------------------------------------------
# Adds a control to the section at the edge of the region specified:
#---------------------------------------------------------------------------
def add ( self, control, region, kind ):
""" Adds a control to the section at the edge of the region specified.
"""
contents = self.contents
new_region = control
if not isinstance( control, DockRegion ):
new_region = DockRegion( contents = [ control ] )
i = contents.index( region )
if self.is_row:
if (kind == DOCK_TOP) or (kind == DOCK_BOTTOM):
if kind == DOCK_TOP:
new_contents = [ new_region, region ]
else:
new_contents = [ region, new_region ]
contents[ i ] = DockSection( is_row = False ).set(
contents = new_contents )
else:
if new_region.parent is self:
contents.remove( new_region )
i = contents.index( region )
if kind == DOCK_RIGHT:
i += 1
contents.insert( i, new_region )
else:
if (kind == DOCK_LEFT) or (kind == DOCK_RIGHT):
if kind == DOCK_LEFT:
new_contents = [ new_region, region ]
else:
new_contents = [ region, new_region ]
contents[ i ] = DockSection( is_row = True ).set(
contents = new_contents )
else:
if new_region.parent is self:
contents.remove( new_region )
i = contents.index( region )
if kind == DOCK_BOTTOM:
i += 1
contents.insert( i, new_region )
#---------------------------------------------------------------------------
# Removes a specified region or section from the section:
#---------------------------------------------------------------------------
def remove ( self, item ):
""" Removes a specified region or section from the section.
"""
contents = self.contents
if isinstance( item, DockGroup ) and (len( item.contents ) == 1):
contents[ contents.index( item ) ] = item.contents[0]
else:
contents.remove( item )
if self.parent is not None:
if len( contents ) <= 1:
self.parent.remove( self )
elif (len( contents ) == 0) and (self.dock_window is not None):
self.dock_window.dock_window_empty()
#---------------------------------------------------------------------------
# Sets the visibility of the group:
#---------------------------------------------------------------------------
def set_visibility ( self, visible ):
""" Sets the visibility of the group.
"""
self._visible = visible
for item in self.contents:
item.set_visibility( visible )
#---------------------------------------------------------------------------
# Returns a copy of the section 'structure', minus the actual content:
#---------------------------------------------------------------------------
def get_structure ( self ):
""" Returns a copy of the section 'structure', minus the actual content.
"""
return self.clone_traits( [ 'is_row', 'width', 'height' ] ).set(
contents = [ item.get_structure() for item in self.contents ],
splitters = [ item.get_structure() for item in self.splitters ] )
#---------------------------------------------------------------------------
# Gets the maximum bounds that a splitter bar is allowed to be dragged:
#---------------------------------------------------------------------------
def get_splitter_bounds ( self, splitter ):
""" Gets the maximum bounds that a splitter bar is allowed to be dragged.
"""
x, y, dx, dy = splitter.bounds
i = self.splitters.index( splitter )
contents = self.visible_contents
item1 = contents[ i ]
item2 = contents[ i + 1 ]
bx, by, bdx, bdy = item2.bounds
if self.is_row:
x = item1.bounds[0]
dx = bx + bdx - x
else:
y = item1.bounds[1]
dy = by + bdy - y
return ( x, y, dx, dy )
#---------------------------------------------------------------------------
# Updates the affected regions when a splitter bar is released:
#---------------------------------------------------------------------------
def update_splitter ( self, splitter, window ):
""" Updates the affected regions when a splitter bar is released.
"""
x, y, dx, dy = splitter.bounds
i = self.splitters.index( splitter )
contents = self.visible_contents
item1 = contents[ i ]
item2 = contents[ i + 1 ]
ix1, iy1, idx1, idy1 = item1.bounds
ix2, iy2, idx2, idy2 = item2.bounds
window.Freeze()
if self.is_row:
item1.recalc_sizes( ix1, iy1, x - ix1, idy1 )
item2.recalc_sizes( x + dx, iy2, ix2 + idx2 - x - dx, idy2 )
else:
item1.recalc_sizes( ix1, iy1, idx1, y - iy1 )
item2.recalc_sizes( ix2, y + dy, idx2, iy2 + idy2 - y - dy )
window.Thaw()
if splitter.style == 'horizontal':
dx = 0
else:
dy = 0
window.RefreshRect( wx.Rect( ix1 - dx, iy1 - dy,
ix2 + idx2 - ix1 + 2 * dx, iy2 + idy2 - iy1 + 2 * dy ) )
#---------------------------------------------------------------------------
# Prints the contents of the section:
#---------------------------------------------------------------------------
def dump ( self, indent = 0 ):
""" Prints the contents of the section.
"""
print '%sSection( %08X, is_row = %s, width = %d, height = %d )' % (
' ' * indent, id( self ), self.is_row, self.width, self.height )
for item in self.contents:
item.dump( indent + 3 )
#---------------------------------------------------------------------------
# Sets the correct visiblity for all contained items:
#---------------------------------------------------------------------------
def _set_visibility ( self ):
""" Sets the correct visiblity for all contained items.
"""
for item in self.contents:
item.set_visibility( item.visible )
#---------------------------------------------------------------------------
# Handles the 'contents' trait being changed:
#---------------------------------------------------------------------------
def _contents_changed ( self ):
""" Handles the 'contents' trait being changed.
"""
for item in self.contents:
item.parent = self
self.calc_min( True )
self.modified = True
def _contents_items_changed ( self, event ):
""" Handles the 'contents' trait being changed.
"""
for item in event.added:
item.parent = self
self.calc_min( True )
self.modified = True
#---------------------------------------------------------------------------
# Handles the 'splitters' trait being changed:
#---------------------------------------------------------------------------
def _splitters_changed ( self ):
""" Handles the 'splitters' trait being changed.
"""
for item in self.splitters:
item.parent = self
def _splitters_items_changed ( self, event ):
""" Handles the 'splitters' trait being changed.
"""
for item in event.added:
item.parent = self
#---------------------------------------------------------------------------
# Implementation of the 'modified' property:
#---------------------------------------------------------------------------
def _set_modified ( self, value ):
self._resizable = None
if self.parent is not None:
self.parent.modified = True
#-------------------------------------------------------------------------------
# 'DockInfo' class:
#-------------------------------------------------------------------------------
class DockInfo ( HasPrivateTraits ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Dock kind:
kind = Range( DOCK_TOP, DOCK_EXPORT )
# Dock bounds:
bounds = Bounds
# Tab bounds (if needed):
tab_bounds = Bounds
# Dock Region:
region = Instance( DockRegion )
# Dock Control:
control = Instance( DockItem )
def __init__(self, **kw):
super(DockInfo, self).__init__(**kw)
#---------------------------------------------------------------------------
# Draws the DockInfo on the display:
#---------------------------------------------------------------------------
def draw ( self, window, bitmap = None ):
""" Draws the DockInfo on the display.
"""
if DOCK_TOP <= self.kind <= DOCK_TABADD:
if bitmap is None:
bitmap = self._bitmap
if bitmap is None:
return
else:
self._bitmap = bitmap
sdc, bx, by = get_dc( window )
bdc = wx.MemoryDC()
bdc2 = wx.MemoryDC()
bdx, bdy = bitmap.GetWidth(), bitmap.GetHeight()
bitmap2 = wx.EmptyBitmap( bdx, bdy )
bdc.SelectObject( bitmap )
bdc2.SelectObject( bitmap2 )
bdc2.Blit( 0, 0, bdx, bdy, bdc, 0, 0 )
try:
bdc3 = wx.GCDC( bdc2 )
bdc3.SetPen( wx.TRANSPARENT_PEN )
bdc3.SetBrush( wx.Brush( wx.Colour( *DockColorBrush ) ) )
x, y, dx, dy = self.bounds
if DOCK_TAB <= self.kind <= DOCK_TABADD:
tx, ty, tdx, tdy = self.tab_bounds
bdc3.DrawRoundedRectangle( tx, ty, tdx, tdy, 4 )
else:
bdc3.DrawRoundedRectangle( x, y, dx, dy, 8 )
except Exception:
pass
sdc.Blit( bx, by, bdx, bdy, bdc2, 0, 0 )
#---------------------------------------------------------------------------
# Docks the specified control:
#---------------------------------------------------------------------------
def dock ( self, control, window ):
""" Docks the specified control.
"""
the_control = control
kind = self.kind
if kind < DOCK_NONE:
the_parent = control.parent
region = self.region
if (kind == DOCK_TAB) or (kind == DOCK_BAR):
region.add( control, self.control )
elif kind == DOCK_TABADD:
item = self.control
if isinstance( item, DockControl ):
if isinstance( control, DockControl ):
control = DockRegion( contents = [ control ] )
i = region.contents.index( item )
region.contents[ i ] = item = DockSection(
contents = [ DockRegion( contents = [ item ] ),
control ],
is_row = True )
elif isinstance( item, DockSection ):
if (isinstance( control, DockSection ) and
(item.is_row == control.is_row)):
item.contents.extend( control.contents )
else:
if isinstance( control, DockControl ):
control = DockRegion( contents = [ control ] )
item.contents.append( control )
else:
item.contents.append( control )
region.active = region.contents.index( item )
elif region is not None:
region.parent.add( control, region, kind )
else:
sizer = window.GetSizer()
section = sizer._contents
if ((section.is_row and
((kind == DOCK_TOP) or (kind == DOCK_BOTTOM))) or
((not section.is_row) and
((kind == DOCK_LEFT) or (kind == DOCK_RIGHT)))):
if len( section.contents ) > 0:
sizer._contents = section = DockSection(
is_row = not section.is_row ).set(
contents = [ section ] )
if len( section.contents ) > 0:
i = 0
if (kind == DOCK_RIGHT) or (kind == DOCK_BOTTOM):
i = -1
section.add( control, section.contents[ i ], kind )
else:
section.is_row = not section.is_row
section.contents = [ DockRegion( contents = [ control ] ) ]
section = None
if ((the_parent is not None) and
(the_parent is not the_control.parent)):
the_parent.remove( the_control )
# Force the main window to be laid out and redrawn:
window.Layout()
window.Refresh()
# Create a reusable DockInfo indicating no information available:
no_dock_info = DockInfo( kind = DOCK_NONE )
#-------------------------------------------------------------------------------
# 'SetStructureHandler' class
#-------------------------------------------------------------------------------
class SetStructureHandler ( object ):
#---------------------------------------------------------------------------
# Resolves an unresolved DockControl id:
#---------------------------------------------------------------------------
def resolve_id ( self, id ):
""" Resolves an unresolved DockControl id.
"""
return None
#---------------------------------------------------------------------------
# Resolves extra, unused DockControls not referenced by the structure:
#---------------------------------------------------------------------------
def resolve_extras ( self, structure, extras ):
""" Resolves extra, unused DockControls not referenced by the structure.
"""
for dock_control in extras:
if dock_control.control is not None:
dock_control.control.Show( False )
#-------------------------------------------------------------------------------
# 'DockSizer' class:
#-------------------------------------------------------------------------------
class DockSizer ( wx.PySizer ):
#---------------------------------------------------------------------------
# Initializes the object:
#---------------------------------------------------------------------------
def __init__ ( self, contents = None ):
super( DockSizer, self ).__init__()
# Make sure the DockImages singleton has been initialized:
DockImages.init()
# Finish initializing the sizer itself:
self._contents = self._structure = self._max_structure = None
if contents is not None:
self.SetContents( contents )
#---------------------------------------------------------------------------
# Calculates the minimum size needed by the sizer:
#---------------------------------------------------------------------------
def CalcMin ( self ):
if self._contents is None:
return wx.Size( 20, 20 )
dx, dy = self._contents.calc_min()
return wx.Size( dx, dy )
#---------------------------------------------------------------------------
# Layout the contents of the sizer based on the sizer's current size and
# position:
#---------------------------------------------------------------------------
def RecalcSizes ( self ):
""" Layout the contents of the sizer based on the sizer's current size
and position.
"""
if self._contents is None:
return
x, y = self.GetPositionTuple()
dx, dy = self.GetSizeTuple()
self._contents.recalc_sizes( x, y, dx, dy )
#---------------------------------------------------------------------------
# Returns the current sizer contents:
#---------------------------------------------------------------------------
def GetContents ( self ):
""" Returns the current sizer contents.
"""
return self._contents
#---------------------------------------------------------------------------
# Initializes the layout of a DockWindow from a content list:
#---------------------------------------------------------------------------
def SetContents ( self, contents ):
""" Initializes the layout of a DockWindow from a content list.
"""
if isinstance( contents, DockGroup ):
self._contents = contents
elif isinstance( contents, tuple ):
self._contents = self._set_region( contents )
elif isinstance( contents, list ):
self._contents = self._set_section( contents, True )
elif isinstance( contents, DockControl ):
self._contents = self._set_section( [ contents ], True )
else:
raise TypeError
# Set the owner DockWindow for the top-level group (if possible)
# so that it can notify the owner when the DockWindow becomes empty:
control = self._contents.control
if control is not None:
self._contents.dock_window = control.GetParent().owner
# If no saved structure exists yet, save the current one:
if self._structure is None:
self._structure = self.GetStructure()
def _set_region ( self, contents ):
items = []
for item in contents:
if isinstance( item, tuple ):
items.append( self._set_region( item ) )
elif isinstance( item, list ):
items.append( self._set_section( item, True ) )
elif isinstance( item, DockItem ):
items.append( item )
else:
raise TypeError
return DockRegion( contents = items )
def _set_section ( self, contents, is_row ):
items = []
for item in contents:
if isinstance( item, tuple ):
items.append( self._set_region( item ) )
elif isinstance( item, list ):
items.append( self._set_section( item, not is_row ) )
elif isinstance( item, DockControl ):
items.append( DockRegion( contents = [ item ] ) )
else:
raise TypeError
return DockSection( is_row = is_row ).set( contents = items )
#---------------------------------------------------------------------------
# Returns a copy of the layout 'structure', minus the actual content
# (i.e. controls, splitters, bounds). This method is intended for use in
# persisting the current user layout, so that it can be restored in a
# future session:
#---------------------------------------------------------------------------
def GetStructure ( self ):
""" Returns a copy of the layout 'structure', minus the actual content
(i.e. controls, splitters, bounds). This method is intended for use
in persisting the current user layout, so that it can be restored in
a future session.
"""
if self._contents is not None:
return self._contents.get_structure()
return DockSection()
#---------------------------------------------------------------------------
# Takes a previously saved 'GetStructure' result and applies it to the
# contents of the sizer in order to restore a previous layout using a
# new set of controls:
#---------------------------------------------------------------------------
def SetStructure ( self, window, structure, handler = None ):
""" Takes a previously saved 'GetStructure' result and applies it to the
contents of the sizer in order to restore a previous layout using a
new set of controls.
"""
section = self._contents
if (section is None) or (not isinstance( structure, DockGroup )):
return
# Make sure that DockSections, which have a separate layout algorithm
# for the first layout, are set as initialized.
structure.initialized = True
# Save the current structure in case a 'ResetStructure' call is made
# later:
self._structure = self.GetStructure()
extras = []
# Create a mapping for all the DockControls in the new structure:
map = {}
for control in structure.get_controls( False ):
if control.id in map:
control.parent.remove( control )
else:
map[ control.id ] = control
# Try to map each current item into an equivalent item in the saved
# preferences:
for control in section.get_controls( False ):
mapped_control = map.get( control.id )
if mapped_control is not None:
control.set( **mapped_control.get( 'visible', 'locked',
'closeable', 'resizable', 'width', 'height' ) )
if mapped_control.user_name:
control.name = mapped_control.name
if mapped_control.user_style:
control.style = mapped_control.style
structure.replace_control( mapped_control, control )
del map[ control.id ]
else:
extras.append( control )
# Try to resolve all unused saved items:
for id, item in map.items():
# If there is a handler, see if it can resolve it:
if handler is not None:
control = handler.resolve_id( id )
if control is not None:
item.control = control
continue
# If nobody knows what it is, just remove it:
item.parent.remove( item )
# Check if there are any new items that we have never seen before:
if len( extras ) > 0:
if handler is not None:
# Allow the handler to decide their fate:
handler.resolve_extras( structure, extras )
else:
# Otherwise, add them to the top level as a new region (let the
# user re-arrange them):
structure.contents.append( DockRegion( contents = extras ) )
# Finally, replace the original structure with the updated structure:
self.SetContents( structure )
#---------------------------------------------------------------------------
# Restores the previously saved structure (if any):
#---------------------------------------------------------------------------
def ResetStructure ( self, window ):
""" Restores the previously saved structure (if any).
"""
if self._structure is not None:
self.SetStructure( window, self._structure )
#---------------------------------------------------------------------------
# Toggles the current 'lock' setting of the contents:
#---------------------------------------------------------------------------
def ToggleLock ( self ):
""" Toggles the current 'lock' setting of the contents.
"""
if self._contents is not None:
self._contents.toggle_lock()
#---------------------------------------------------------------------------
# Draws the contents of the sizer:
#---------------------------------------------------------------------------
def Draw ( self, window ):
""" Draws the contents of the sizer.
"""
if self._contents is not None:
self._contents.draw( set_standard_font( wx.PaintDC( window ) ) )
else:
clear_window( window )
#---------------------------------------------------------------------------
# Returns the object at a specified x, y position:
#---------------------------------------------------------------------------
def ObjectAt ( self, x, y, force = False ):
""" Returns the object at a specified window position.
"""
if self._contents is not None:
return self._contents.object_at( x, y, force )
return None
#---------------------------------------------------------------------------
# Gets a DockInfo object at a specified x, y position:
#---------------------------------------------------------------------------
def DockInfoAt ( self, x, y, size, is_control ):
""" Gets a DockInfo object at a specified x, y position.
"""
if self._contents is not None:
return self._contents.dock_info_at( x, y, size, is_control, True )
return no_dock_info
#---------------------------------------------------------------------------
# Minimizes/Maximizes a specified DockControl:
#---------------------------------------------------------------------------
def MinMax ( self, window, dock_control ):
""" Minimizes/Maximizes a specified DockControl.
"""
if self._max_structure is None:
self._max_structure = self.GetStructure()
for control in self.GetContents().get_controls():
control.visible = (control is dock_control)
else:
self.Reset( window )
#---------------------------------------------------------------------------
# Resets the DockSizer to a known state:
#---------------------------------------------------------------------------
def Reset ( self, window ):
""" Resets the DockSizer to a known state.
"""
if self._max_structure is not None:
self.SetStructure( window, self._max_structure )
self._max_structure = None
#---------------------------------------------------------------------------
# Returns whether the sizer can be maximized now:
#---------------------------------------------------------------------------
def IsMaximizable ( self ):
""" Returns whether the sizer can be maximized now.
"""
return (self._max_structure is None)
def top_level_window_for ( control ):
""" Returns the top-level window for a specified control.
"""
parent = control.GetParent()
while parent is not None:
control = parent
parent = control.GetParent()
return control
|
enthought/traitsgui
|
enthought/pyface/dock/dock_sizer.py
|
Python
|
bsd-3-clause
| 155,245 | 0.020265 |
from django.contrib import admin
from courses.models import Course, Instructor, Page, Enrollment
class CourseAdmin(admin.ModelAdmin):
list_display = ['title', 'instructor', 'language', 'popularity', 'is_public', 'deleted']
prepopulated_fields = {
'slug': ('title', )
}
def queryset(self, request):
qs = self.model.all_objects.get_query_set()
# the following is needed from the superclass
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
class InstructorAdmin(admin.ModelAdmin):
list_display = ['user', 'popularity']
class PageAdmin(admin.ModelAdmin):
pass
admin.site.register(Course, CourseAdmin)
admin.site.register(Instructor, InstructorAdmin)
admin.site.register(Page, PageAdmin)
admin.site.register(Enrollment)
|
Uberlearner/uberlearner
|
uberlearner/courses/admin.py
|
Python
|
mit
| 848 | 0.008255 |
import parsley
_grammar = r"""
parse = pair:head (' '+ pair)*:tail -> dict([head] + tail)
pair = ident:i '=' value:v -> (i, v)
ident = <letter letterOrDigit*>
value = string | regex | number | word
string = '"' (escapedChar | ~'"' anything)*:c '"' -> ''.join(c)
| "'" (escapedChar | ~"'" anything)*:c "'" -> ''.join(c)
regex = '/' (escapedChar | ~'/' anything)*:c '/' -> '/' + ''.join(c) + '/'
word = <(~' ' anything)+>
# A number is optionally a negative sign, followed by an intPart, and then
# maybe a floatPart.
number = ('-' | -> ''):sign
( (intPart:i floatPart:f -> float(sign + i + f ))
| (intPart:i -> int(sign + i))
| (floatPart:f -> float(sign + '0' + f)))
digit = :x ?(x in '0123456789') -> x
digit1_9 = :x ?(x in '123456789') -> x
intPart = (digit1_9:first <digit+>:rest -> first + rest)
| digit
floatPart = <'.' digit+>
# This matches a *single* backslash, followed by something else, which it returns.
escapedChar = "\\\\" anything
"""
learn_grammar = parsley.makeGrammar(_grammar, {})
|
hamperbot/factoids2
|
hamper_factoids/parser.py
|
Python
|
mpl-2.0
| 1,049 | 0.000953 |
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://mpdev.mattew.se'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = False
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
mattew/mattew.github.io-src
|
publishconf.py
|
Python
|
mit
| 531 | 0.00565 |
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from webkitpy.layout_tests.layout_package import json_results_generator
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
class JSONLayoutResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
"""A JSON results generator for layout tests."""
LAYOUT_TESTS_PATH = "LayoutTests"
# Additional JSON fields.
WONTFIX = "wontfixCounts"
FAILURE_TO_CHAR = {test_expectations.PASS: json_results_generator.JSONResultsGeneratorBase.PASS_RESULT,
test_expectations.SKIP: json_results_generator.JSONResultsGeneratorBase.SKIP_RESULT,
test_expectations.FAIL: "Y",
test_expectations.CRASH: "C",
test_expectations.TIMEOUT: "T",
test_expectations.IMAGE: "I",
test_expectations.TEXT: "F",
test_expectations.MISSING: "O",
test_expectations.AUDIO: "A",
test_expectations.IMAGE_PLUS_TEXT: "Z"}
def __init__(self, port, builder_name, build_name, build_number,
results_file_base_path, builder_base_url,
test_timings, expectations, result_summary, all_tests,
test_results_server=None, test_type="", master_name=""):
"""Modifies the results.json file. Grabs it off the archive directory
if it is not found locally.
Args:
result_summary: ResultsSummary object storing the summary of the test
results.
"""
super(JSONLayoutResultsGenerator, self).__init__(
port, builder_name, build_name, build_number, results_file_base_path,
builder_base_url, {}, port.repository_paths(),
test_results_server, test_type, master_name)
self._expectations = expectations
self._result_summary = result_summary
self._failures = dict((test_name, result_summary.results[test_name].type) for test_name in result_summary.failures)
self._all_tests = all_tests
self._test_timings = dict((test_tuple.test_name, test_tuple.test_run_time) for test_tuple in test_timings)
self.generate_json_output()
def _get_path_relative_to_layout_test_root(self, test):
"""Returns the path of the test relative to the layout test root.
For example, for:
src/third_party/WebKit/LayoutTests/fast/forms/foo.html
We would return
fast/forms/foo.html
"""
index = test.find(self.LAYOUT_TESTS_PATH)
if index is not -1:
index += len(self.LAYOUT_TESTS_PATH)
if index is -1:
# Already a relative path.
relativePath = test
else:
relativePath = test[index + 1:]
# Make sure all paths are unix-style.
return relativePath.replace('\\', '/')
# override
def _get_test_timing(self, test_name):
if test_name in self._test_timings:
# Floor for now to get time in seconds.
return int(self._test_timings[test_name])
return 0
# override
def _get_failed_test_names(self):
return set(self._failures.keys())
# override
def _get_modifier_char(self, test_name):
if test_name not in self._all_tests:
return self.NO_DATA_RESULT
if test_name in self._failures:
return self.FAILURE_TO_CHAR[self._failures[test_name]]
return self.PASS_RESULT
# override
def _get_result_char(self, test_name):
return self._get_modifier_char(test_name)
# override
def _insert_failure_summaries(self, results_for_builder):
summary = self._result_summary
self._insert_item_into_raw_list(results_for_builder,
len((set(summary.failures.keys()) |
summary.tests_by_expectation[test_expectations.SKIP]) &
summary.tests_by_timeline[test_expectations.NOW]),
self.FIXABLE_COUNT)
self._insert_item_into_raw_list(results_for_builder,
self._get_failure_summary_entry(test_expectations.NOW),
self.FIXABLE)
self._insert_item_into_raw_list(results_for_builder,
len(self._expectations.get_tests_with_timeline(
test_expectations.NOW)), self.ALL_FIXABLE_COUNT)
self._insert_item_into_raw_list(results_for_builder,
self._get_failure_summary_entry(test_expectations.WONTFIX),
self.WONTFIX)
# override
def _normalize_results_json(self, test, test_name, tests):
super(JSONLayoutResultsGenerator, self)._normalize_results_json(
test, test_name, tests)
# Remove tests that don't exist anymore.
full_path = self._filesystem.join(self._port.layout_tests_dir(), test_name)
full_path = self._filesystem.normpath(full_path)
if not self._filesystem.exists(full_path):
del tests[test_name]
def _get_failure_summary_entry(self, timeline):
"""Creates a summary object to insert into the JSON.
Args:
summary ResultSummary object with test results
timeline current test_expectations timeline to build entry for
(e.g., test_expectations.NOW, etc.)
"""
entry = {}
summary = self._result_summary
timeline_tests = summary.tests_by_timeline[timeline]
entry[self.SKIP_RESULT] = len(
summary.tests_by_expectation[test_expectations.SKIP] &
timeline_tests)
entry[self.PASS_RESULT] = len(
summary.tests_by_expectation[test_expectations.PASS] &
timeline_tests)
for failure_type in summary.tests_by_expectation.keys():
if failure_type not in self.FAILURE_TO_CHAR:
continue
count = len(summary.tests_by_expectation[failure_type] &
timeline_tests)
entry[self.FAILURE_TO_CHAR[failure_type]] = count
return entry
|
cs-au-dk/Artemis
|
WebKit/Tools/Scripts/webkitpy/layout_tests/layout_package/json_layout_results_generator.py
|
Python
|
gpl-3.0
| 7,573 | 0.002377 |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Logit'] , ['LinearTrend'] , ['Seasonal_DayOfMonth'] , ['SVR'] );
|
antoinecarme/pyaf
|
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_LinearTrend_Seasonal_DayOfMonth_SVR.py
|
Python
|
bsd-3-clause
| 160 | 0.05 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.