text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
new_default = orm.Extension._meta.get_field_by_name('icon')[0].default
for ext in orm.Extension.objects.filter(icon=""):
ext.icon = new_default
ext.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'extensions.extension': {
'Meta': {'object_name': 'Extension'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'icon': ('django.db.models.fields.files.ImageField', [], {'default': "'/static/images/plugin.png'", 'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'screenshot': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': 'None', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'})
},
'extensions.extensionversion': {
'Meta': {'unique_together': "(('extension', 'version'),)", 'object_name': 'ExtensionVersion'},
'extension': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'versions'", 'to': "orm['extensions.Extension']"}),
'extra_json_fields': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shell_versions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['extensions.ShellVersion']", 'symmetrical': 'False'}),
'source': ('django.db.models.fields.files.FileField', [], {'max_length': '223'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'extensions.shellversion': {
'Meta': {'object_name': 'ShellVersion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'major': ('django.db.models.fields.PositiveIntegerField', [], {}),
'minor': ('django.db.models.fields.PositiveIntegerField', [], {}),
'point': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['extensions']
| magcius/sweettooth | sweettooth/extensions/migrations/0008_new_icon_default.py | Python | agpl-3.0 | 6,118 | 0.007192 |
"""
Visualize possible stitches with the outcome of the validator.
"""
import math
import random
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import stitcher
SPACE = 25
TYPE_FORMAT = {'a': '^', 'b': 's', 'c': 'v'}
def show(graphs, request, titles, prog='neato', size=None,
type_format=None, filename=None):
"""
Display the results using matplotlib.
"""
if not size:
size = _get_size(len(graphs))
fig, axarr = plt.subplots(size[0], size[1], figsize=(18, 10))
fig.set_facecolor('white')
x_val = 0
y_val = 0
index = 0
if size[0] == 1:
axarr = np.array(axarr).reshape((1, size[1]))
for candidate in graphs:
# axarr[x_val, y_val].axis('off')
axarr[x_val, y_val].xaxis.set_major_formatter(plt.NullFormatter())
axarr[x_val, y_val].yaxis.set_major_formatter(plt.NullFormatter())
axarr[x_val, y_val].xaxis.set_ticks([])
axarr[x_val, y_val].yaxis.set_ticks([])
axarr[x_val, y_val].set_title(titles[index])
# axarr[x_val, y_val].set_axis_bgcolor("white")
if not type_format:
type_format = TYPE_FORMAT
_plot_subplot(candidate, request.nodes(), prog, type_format,
axarr[x_val, y_val])
y_val += 1
if y_val > size[1] - 1:
y_val = 0
x_val += 1
index += 1
fig.tight_layout()
if filename is not None:
plt.savefig(filename)
else:
plt.show()
plt.close()
def _plot_subplot(graph, new_nodes, prog, type_format, axes):
"""
Plot a single candidate graph.
"""
pos = nx.nx_agraph.graphviz_layout(graph, prog=prog)
# draw the nodes
for node, values in graph.nodes(data=True):
shape = 'o'
if values[stitcher.TYPE_ATTR] in type_format:
shape = type_format[values[stitcher.TYPE_ATTR]]
color = 'g'
alpha = 0.8
if node in new_nodes:
color = 'b'
alpha = 0.2
elif 'rank' in values and values['rank'] > 7:
color = 'r'
elif 'rank' in values and values['rank'] < 7 and values['rank'] > 3:
color = 'y'
nx.draw_networkx_nodes(graph, pos, nodelist=[node], node_color=color,
node_shape=shape, alpha=alpha, ax=axes)
# draw the edges
dotted_line = []
normal_line = []
for src, trg in graph.edges():
if src in new_nodes and trg not in new_nodes:
dotted_line.append((src, trg))
else:
normal_line.append((src, trg))
nx.draw_networkx_edges(graph, pos, edgelist=dotted_line, style='dotted',
ax=axes)
nx.draw_networkx_edges(graph, pos, edgelist=normal_line, ax=axes)
# draw labels
nx.draw_networkx_labels(graph, pos, ax=axes)
def show_3d(graphs, request, titles, prog='neato', filename=None):
"""
Show the candidates in 3d - the request elevated above the container.
"""
fig = plt.figure(figsize=(18, 10))
fig.set_facecolor('white')
i = 0
size = _get_size(len(graphs))
for graph in graphs:
axes = fig.add_subplot(size[0], size[1], i+1,
projection=Axes3D.name)
axes.set_title(titles[i])
axes._axis3don = False
_plot_3d_subplot(graph, request, prog, axes)
i += 1
fig.tight_layout()
if filename is not None:
plt.savefig(filename)
else:
plt.show()
plt.close()
def _plot_3d_subplot(graph, request, prog, axes):
"""
Plot a single candidate graph in 3d.
"""
cache = {}
tmp = graph.copy()
for node in request.nodes():
tmp.remove_node(node)
pos = nx.nx_agraph.graphviz_layout(tmp, prog=prog)
# the container
for item in tmp.nodes():
axes.plot([pos[item][0]], [pos[item][1]], [0], linestyle="None",
marker="o", color='gray')
axes.text(pos[item][0], pos[item][1], 0, item)
for src, trg in tmp.edges():
axes.plot([pos[src][0], pos[trg][0]],
[pos[src][1], pos[trg][1]],
[0, 0], color='gray')
# the new nodes
for item in graph.nodes():
if item in request.nodes():
for nghb in graph.neighbors(item):
if nghb in tmp.nodes():
x_val = pos[nghb][0]
y_val = pos[nghb][1]
if (x_val, y_val) in list(cache.values()):
x_val = pos[nghb][0] + random.randint(10, SPACE)
y_val = pos[nghb][0] + random.randint(10, SPACE)
cache[item] = (x_val, y_val)
# edge
axes.plot([x_val, pos[nghb][0]],
[y_val, pos[nghb][1]],
[SPACE, 0], color='blue')
axes.plot([x_val], [y_val], [SPACE], linestyle="None", marker="o",
color='blue')
axes.text(x_val, y_val, SPACE, item)
for src, trg in request.edges():
if trg in cache and src in cache:
axes.plot([cache[src][0], cache[trg][0]],
[cache[src][1], cache[trg][1]],
[SPACE, SPACE], color='blue')
def _get_size(n_items):
"""
Calculate the size of the subplot layouts based on number of items.
"""
n_cols = math.ceil(math.sqrt(n_items))
n_rows = math.floor(math.sqrt(n_items))
if n_cols * n_rows < n_items:
n_cols += 1
return int(n_rows), int(n_cols)
| tmetsch/graph_stitcher | stitcher/vis.py | Python | mit | 5,618 | 0 |
from gpiozero import Button
from picamera import PiCamera
from datetime import datetime
from signal import pause
button = Button(2)
camera = PiCamera()
def capture():
timestamp = datetime.now().isoformat()
camera.capture('/home/pi/{timestamp}.jpg'.format(timestamp=timestamp))
button.when_pressed = capture
pause()
| waveform80/gpio-zero | docs/examples/button_camera_1.py | Python | bsd-3-clause | 327 | 0.006116 |
""":mod:`earthreader.web.exceptions` --- Exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from flask import jsonify
from werkzeug.exceptions import HTTPException
class IteratorNotFound(ValueError):
"""Raised when the iterator does not exist"""
class JsonException(HTTPException):
"""Base exception to return json response when raised.
Exceptions inherit this class must declare `error` and `message`.
"""
def get_response(self, environ=None):
r = jsonify(error=self.error, message=self.message)
r.status_code = 404
return r
class InvalidCategoryID(ValueError, JsonException):
"""Raised when the category ID is not valid."""
error = 'category-id-invalid'
message = 'Given category id is not valid'
class FeedNotFound(ValueError, JsonException):
"""Raised when the feed is not reachable."""
error = 'feed-not-found'
message = 'The feed you request does not exsist'
class EntryNotFound(ValueError, JsonException):
"""Raised when the entry is not reachable."""
error = 'entry-not-found'
message = 'The entry you request does not exist'
class WorkerNotRunning(ValueError, JsonException):
"""Raised when the worker thread is not running."""
error = 'worker-not-running'
message = 'The worker thread that crawl feeds in background is not' \
'running.'
| earthreader/web | earthreader/web/exceptions.py | Python | agpl-3.0 | 1,387 | 0 |
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <headingcell level=2>
# Usage of IC 7433
# <codecell>
from __future__ import print_function
from BinPy import *
# <codecell>
# Usage of IC 7433:
ic = IC_7433()
print(ic.__doc__)
# <codecell>
# The Pin configuration is:
inp = {2: 0, 3: 0, 5: 0, 6: 0, 7: 0, 8: 1, 9: 1, 11: 1, 12: 1, 14: 1}
# Pin initinalization
# Powering up the IC - using -- ic.setIC({14: 1, 7: 0})
ic.setIC({14: 1, 7: 0})
# Setting the inputs of the ic
ic.setIC(inp)
# Draw the IC with the current configuration\n
ic.drawIC()
# <codecell>
# Run the IC with the current configuration using -- print ic.run() --
# Note that the ic.run() returns a dict of pin configuration similar to
print (ic.run())
# <codecell>
# Seting the outputs to the current IC configuration using --
# ic.setIC(ic.run()) --\n
ic.setIC(ic.run())
# Draw the final configuration
ic.drawIC()
# <codecell>
# Seting the outputs to the current IC configuration using --
# ic.setIC(ic.run()) --
ic.setIC(ic.run())
# Draw the final configuration
ic.drawIC()
# Run the IC
print (ic.run())
# <codecell>
# Connector Outputs
c = Connector()
# Set the output connector to a particular pin of the ic
ic.setOutput(1, c)
print(c)
| daj0ker/BinPy | BinPy/examples/source/ic/Series_7400/IC7433.py | Python | bsd-3-clause | 1,247 | 0.001604 |
"""
Django settings for kore project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9j++(0=dc&6w&113d4bofcjy1xy-pe$frla&=s*8w94=0ym0@&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'grappelli',
'nested_admin',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'raven.contrib.django.raven_compat',
'django_extensions',
'rest_framework',
'corsheaders',
'modeltranslation',
'leaflet',
'munigeo',
'schools',
'django_filters'
]
if DEBUG:
# INSTALLED_APPS.insert(0, 'devserver')
# INSTALLED_APPS.insert(0, 'debug_toolbar')
pass
MIDDLEWARE_CLASSES = (
'django.middleware.locale.LocaleMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'kore.urls'
WSGI_APPLICATION = 'kore.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'kore',
}
}
# Munigeo
# https://github.com/City-of-Helsinki/munigeo
PROJECTION_SRID = 3067
# If no country specified (for example through a REST API call), use this
# as default.
DEFAULT_COUNTRY = 'fi'
# The word used for municipality in the OCD identifiers in the default country.
DEFAULT_OCD_MUNICIPALITY = 'kunta'
BOUNDING_BOX = [-548576, 6291456, 1548576, 8388608]
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
gettext = lambda s: s
LANGUAGES = (
('fi', gettext('Finnish')),
('sv', gettext('Swedish')),
('en', gettext('English')),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "var", "static")
LOCALE_PATH = os.path.join(BASE_DIR, "schools", "locale")
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 20,
'MAX_PAGINATE_BY': 1000, # Maximum limit allowed when using `?page_size=xxx`.
'DEFAULT_FILTER_BACKENDS':
('rest_framework.filters.DjangoFilterBackend',),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
)
}
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
CORS_ORIGIN_ALLOW_ALL = True
# local_settings.py can be used to override environment-specific settings
# like database and email that differ between development and production.
try:
from local_settings import *
except ImportError:
pass
| City-of-Helsinki/kore | kore/settings.py | Python | agpl-3.0 | 4,485 | 0.000892 |
import _plotly_utils.basevalidators
class TextfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="textfont", parent_name="funnelarea", **kwargs):
super(TextfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Textfont"),
data_docs=kwargs.pop(
"data_docs",
"""
color
colorsrc
Sets the source reference on Chart Studio Cloud
for color .
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for family .
size
sizesrc
Sets the source reference on Chart Studio Cloud
for size .
""",
),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/funnelarea/_textfont.py | Python | mit | 1,867 | 0.000536 |
# Copyright 2012 Big Switch Networks, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mandeep Dhami, Big Switch Networks, Inc.
# @author: Sumit Naiksatam, sumitnaiksatam@gmail.com, Big Switch Networks, Inc.
"""
Neutron REST Proxy Plug-in for Big Switch and FloodLight Controllers.
NeutronRestProxy provides a generic neutron plugin that translates all plugin
function calls to equivalent authenticated REST calls to a set of redundant
external network controllers. It also keeps persistent store for all neutron
state to allow for re-sync of the external controller(s), if required.
The local state on the plugin also allows for local response and fast-fail
semantics where it can be determined based on the local persistent store.
Network controller specific code is decoupled from this plugin and expected
to reside on the controller itself (via the REST interface).
This allows for:
- independent authentication and redundancy schemes between neutron and the
network controller
- independent upgrade/development cycles between neutron and the controller
as it limits the proxy code upgrade requirement to neutron release cycle
and the controller specific code upgrade requirement to controller code
- ability to sync the controller with neutron for independent recovery/reset
External REST API used by proxy is the same API as defined for neutron (JSON
subset) with some additional parameters (gateway on network-create and macaddr
on port-attach) on an additional PUT to do a bulk dump of all persistent data.
"""
import copy
import functools
import httplib
import re
import eventlet
from oslo.config import cfg
from sqlalchemy.orm import exc as sqlexc
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.api import extensions as neutron_extensions
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.common import constants as const
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context as qcontext
from neutron.db import agents_db
from neutron.db import agentschedulers_db
from neutron.db import allowedaddresspairs_db as addr_pair_db
from neutron.db import api as db
from neutron.db import db_base_plugin_v2
from neutron.db import dhcp_rpc_base
from neutron.db import external_net_db
from neutron.db import extradhcpopt_db
from neutron.db import l3_db
from neutron.db import models_v2
from neutron.db import securitygroups_db as sg_db
from neutron.db import securitygroups_rpc_base as sg_rpc_base
from neutron.extensions import allowedaddresspairs as addr_pair
from neutron.extensions import external_net
from neutron.extensions import extra_dhcp_opt as edo_ext
from neutron.extensions import l3
from neutron.extensions import portbindings
from neutron import manager
from neutron.openstack.common import excutils
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.plugins.bigswitch import config as pl_config
from neutron.plugins.bigswitch.db import porttracker_db
from neutron.plugins.bigswitch import extensions
from neutron.plugins.bigswitch import routerrule_db
from neutron.plugins.bigswitch import servermanager
from neutron.plugins.bigswitch import version
LOG = logging.getLogger(__name__)
SYNTAX_ERROR_MESSAGE = _('Syntax error in server config file, aborting plugin')
METADATA_SERVER_IP = '169.254.169.254'
class AgentNotifierApi(n_rpc.RpcProxy,
sg_rpc.SecurityGroupAgentRpcApiMixin):
BASE_RPC_API_VERSION = '1.1'
def __init__(self, topic):
super(AgentNotifierApi, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
self.topic_port_update = topics.get_topic_name(
topic, topics.PORT, topics.UPDATE)
def port_update(self, context, port):
self.fanout_cast(context,
self.make_msg('port_update',
port=port),
topic=self.topic_port_update)
class RestProxyCallbacks(n_rpc.RpcCallback,
sg_rpc_base.SecurityGroupServerRpcCallbackMixin,
dhcp_rpc_base.DhcpRpcCallbackMixin):
RPC_API_VERSION = '1.1'
def get_port_from_device(self, device):
port_id = re.sub(r"^tap", "", device)
port = self.get_port_and_sgs(port_id)
if port:
port['device'] = device
return port
def get_port_and_sgs(self, port_id):
"""Get port from database with security group info."""
LOG.debug(_("get_port_and_sgs() called for port_id %s"), port_id)
session = db.get_session()
sg_binding_port = sg_db.SecurityGroupPortBinding.port_id
with session.begin(subtransactions=True):
query = session.query(
models_v2.Port,
sg_db.SecurityGroupPortBinding.security_group_id
)
query = query.outerjoin(sg_db.SecurityGroupPortBinding,
models_v2.Port.id == sg_binding_port)
query = query.filter(models_v2.Port.id.startswith(port_id))
port_and_sgs = query.all()
if not port_and_sgs:
return
port = port_and_sgs[0][0]
plugin = manager.NeutronManager.get_plugin()
port_dict = plugin._make_port_dict(port)
port_dict['security_groups'] = [
sg_id for port_, sg_id in port_and_sgs if sg_id]
port_dict['security_group_rules'] = []
port_dict['security_group_source_groups'] = []
port_dict['fixed_ips'] = [ip['ip_address']
for ip in port['fixed_ips']]
return port_dict
class NeutronRestProxyV2Base(db_base_plugin_v2.NeutronDbPluginV2,
external_net_db.External_net_db_mixin,
routerrule_db.RouterRule_db_mixin):
supported_extension_aliases = ["binding"]
servers = None
def _get_all_data(self, get_ports=True, get_floating_ips=True,
get_routers=True):
admin_context = qcontext.get_admin_context()
networks = []
# this method is used by the ML2 driver so it can't directly invoke
# the self.get_(ports|networks) methods
plugin = manager.NeutronManager.get_plugin()
all_networks = plugin.get_networks(admin_context) or []
for net in all_networks:
mapped_network = self._get_mapped_network_with_subnets(net)
flips_n_ports = mapped_network
if get_floating_ips:
flips_n_ports = self._get_network_with_floatingips(
mapped_network)
if get_ports:
ports = []
net_filter = {'network_id': [net.get('id')]}
net_ports = plugin.get_ports(admin_context,
filters=net_filter) or []
for port in net_ports:
mapped_port = self._map_state_and_status(port)
mapped_port['attachment'] = {
'id': port.get('device_id'),
'mac': port.get('mac_address'),
}
mapped_port = self._extend_port_dict_binding(admin_context,
mapped_port)
ports.append(mapped_port)
flips_n_ports['ports'] = ports
if flips_n_ports:
networks.append(flips_n_ports)
data = {'networks': networks}
if get_routers:
routers = []
all_routers = self.get_routers(admin_context) or []
for router in all_routers:
interfaces = []
mapped_router = self._map_state_and_status(router)
router_filter = {
'device_owner': [const.DEVICE_OWNER_ROUTER_INTF],
'device_id': [router.get('id')]
}
router_ports = self.get_ports(admin_context,
filters=router_filter) or []
for port in router_ports:
net_id = port.get('network_id')
subnet_id = port['fixed_ips'][0]['subnet_id']
intf_details = self._get_router_intf_details(admin_context,
net_id,
subnet_id)
interfaces.append(intf_details)
mapped_router['interfaces'] = interfaces
routers.append(mapped_router)
data.update({'routers': routers})
return data
def _send_all_data(self, send_ports=True, send_floating_ips=True,
send_routers=True, timeout=None,
triggered_by_tenant=None):
"""Pushes all data to network ctrl (networks/ports, ports/attachments).
This gives the controller an option to re-sync it's persistent store
with neutron's current view of that data.
"""
data = self._get_all_data(send_ports, send_floating_ips, send_routers)
data['triggered_by_tenant'] = triggered_by_tenant
errstr = _("Unable to update remote topology: %s")
return self.servers.rest_action('PUT', servermanager.TOPOLOGY_PATH,
data, errstr, timeout=timeout)
def _get_network_with_floatingips(self, network, context=None):
if context is None:
context = qcontext.get_admin_context()
net_id = network['id']
net_filter = {'floating_network_id': [net_id]}
fl_ips = self.get_floatingips(context,
filters=net_filter) or []
network['floatingips'] = fl_ips
return network
def _get_all_subnets_json_for_network(self, net_id, context=None):
if context is None:
context = qcontext.get_admin_context()
# start a sub-transaction to avoid breaking parent transactions
with context.session.begin(subtransactions=True):
subnets = self._get_subnets_by_network(context,
net_id)
subnets_details = []
if subnets:
for subnet in subnets:
subnet_dict = self._make_subnet_dict(subnet)
mapped_subnet = self._map_state_and_status(subnet_dict)
subnets_details.append(mapped_subnet)
return subnets_details
def _get_mapped_network_with_subnets(self, network, context=None):
# if context is not provided, admin context is used
if context is None:
context = qcontext.get_admin_context()
network = self._map_state_and_status(network)
subnets = self._get_all_subnets_json_for_network(network['id'],
context)
network['subnets'] = subnets
for subnet in (subnets or []):
if subnet['gateway_ip']:
# FIX: For backward compatibility with wire protocol
network['gateway'] = subnet['gateway_ip']
break
else:
network['gateway'] = ''
network[external_net.EXTERNAL] = self._network_is_external(
context, network['id'])
# include ML2 segmentation types
network['segmentation_types'] = getattr(self, "segmentation_types", "")
return network
def _send_create_network(self, network, context=None):
tenant_id = network['tenant_id']
mapped_network = self._get_mapped_network_with_subnets(network,
context)
self.servers.rest_create_network(tenant_id, mapped_network)
def _send_update_network(self, network, context=None):
net_id = network['id']
tenant_id = network['tenant_id']
mapped_network = self._get_mapped_network_with_subnets(network,
context)
net_fl_ips = self._get_network_with_floatingips(mapped_network,
context)
self.servers.rest_update_network(tenant_id, net_id, net_fl_ips)
def _send_delete_network(self, network, context=None):
net_id = network['id']
tenant_id = network['tenant_id']
self.servers.rest_delete_network(tenant_id, net_id)
def _map_state_and_status(self, resource):
resource = copy.copy(resource)
resource['state'] = ('UP' if resource.pop('admin_state_up',
True) else 'DOWN')
resource.pop('status', None)
return resource
def _warn_on_state_status(self, resource):
if resource.get('admin_state_up', True) is False:
LOG.warning(_("Setting admin_state_up=False is not supported "
"in this plugin version. Ignoring setting for "
"resource: %s"), resource)
if 'status' in resource:
if resource['status'] != const.NET_STATUS_ACTIVE:
LOG.warning(_("Operational status is internally set by the "
"plugin. Ignoring setting status=%s."),
resource['status'])
def _get_router_intf_details(self, context, intf_id, subnet_id):
# we will use the network id as interface's id
net_id = intf_id
network = self.get_network(context, net_id)
subnet = self.get_subnet(context, subnet_id)
mapped_network = self._get_mapped_network_with_subnets(network)
mapped_subnet = self._map_state_and_status(subnet)
data = {
'id': intf_id,
"network": mapped_network,
"subnet": mapped_subnet
}
return data
def _extend_port_dict_binding(self, context, port):
cfg_vif_type = cfg.CONF.NOVA.vif_type.lower()
if not cfg_vif_type in (portbindings.VIF_TYPE_OVS,
portbindings.VIF_TYPE_IVS):
LOG.warning(_("Unrecognized vif_type in configuration "
"[%s]. Defaulting to ovs."),
cfg_vif_type)
cfg_vif_type = portbindings.VIF_TYPE_OVS
# In ML2, the host_id is already populated
if portbindings.HOST_ID in port:
hostid = port[portbindings.HOST_ID]
else:
hostid = porttracker_db.get_port_hostid(context, port['id'])
if hostid:
port[portbindings.HOST_ID] = hostid
override = self._check_hostvif_override(hostid)
if override:
cfg_vif_type = override
port[portbindings.VIF_TYPE] = cfg_vif_type
port[portbindings.VIF_DETAILS] = {
# TODO(rkukura): Replace with new VIF security details
portbindings.CAP_PORT_FILTER:
'security-group' in self.supported_extension_aliases,
portbindings.OVS_HYBRID_PLUG: True
}
return port
def _check_hostvif_override(self, hostid):
for v in cfg.CONF.NOVA.vif_types:
if hostid in getattr(cfg.CONF.NOVA, "node_override_vif_" + v, []):
return v
return False
def _get_port_net_tenantid(self, context, port):
net = super(NeutronRestProxyV2Base,
self).get_network(context, port["network_id"])
return net['tenant_id']
def async_port_create(self, tenant_id, net_id, port):
try:
self.servers.rest_create_port(tenant_id, net_id, port)
except servermanager.RemoteRestError as e:
# 404 should never be received on a port create unless
# there are inconsistencies between the data in neutron
# and the data in the backend.
# Run a sync to get it consistent.
if (cfg.CONF.RESTPROXY.auto_sync_on_failure and
e.status == httplib.NOT_FOUND and
servermanager.NXNETWORK in e.reason):
LOG.error(_("Iconsistency with backend controller "
"triggering full synchronization."))
# args depend on if we are operating in ML2 driver
# or as the full plugin
topoargs = self.servers.get_topo_function_args
self._send_all_data(
send_ports=topoargs['get_ports'],
send_floating_ips=topoargs['get_floating_ips'],
send_routers=topoargs['get_routers'],
triggered_by_tenant=tenant_id
)
# If the full sync worked, the port will be created
# on the controller so it can be safely marked as active
else:
# Any errors that don't result in a successful auto-sync
# require that the port be placed into the error state.
LOG.error(
_("NeutronRestProxyV2: Unable to create port: %s"), e)
try:
self._set_port_status(port['id'], const.PORT_STATUS_ERROR)
except exceptions.PortNotFound:
# If port is already gone from DB and there was an error
# creating on the backend, everything is already consistent
pass
return
new_status = (const.PORT_STATUS_ACTIVE if port['state'] == 'UP'
else const.PORT_STATUS_DOWN)
try:
self._set_port_status(port['id'], new_status)
except exceptions.PortNotFound:
# This port was deleted before the create made it to the controller
# so it now needs to be deleted since the normal delete request
# would have deleted an non-existent port.
self.servers.rest_delete_port(tenant_id, net_id, port['id'])
# NOTE(kevinbenton): workaround for eventlet/mysql deadlock
@utils.synchronized('bsn-port-barrier')
def _set_port_status(self, port_id, status):
session = db.get_session()
try:
port = session.query(models_v2.Port).filter_by(id=port_id).one()
port['status'] = status
session.flush()
except sqlexc.NoResultFound:
raise exceptions.PortNotFound(port_id=port_id)
def put_context_in_serverpool(f):
@functools.wraps(f)
def wrapper(self, context, *args, **kwargs):
self.servers.set_context(context)
return f(self, context, *args, **kwargs)
return wrapper
class NeutronRestProxyV2(NeutronRestProxyV2Base,
addr_pair_db.AllowedAddressPairsMixin,
extradhcpopt_db.ExtraDhcpOptMixin,
agentschedulers_db.DhcpAgentSchedulerDbMixin,
sg_rpc_base.SecurityGroupServerRpcMixin):
_supported_extension_aliases = ["external-net", "router", "binding",
"router_rules", "extra_dhcp_opt", "quotas",
"dhcp_agent_scheduler", "agent",
"security-group", "allowed-address-pairs"]
@property
def supported_extension_aliases(self):
if not hasattr(self, '_aliases'):
aliases = self._supported_extension_aliases[:]
sg_rpc.disable_security_group_extension_by_config(aliases)
self._aliases = aliases
return self._aliases
def __init__(self):
super(NeutronRestProxyV2, self).__init__()
LOG.info(_('NeutronRestProxy: Starting plugin. Version=%s'),
version.version_string_with_vcs())
pl_config.register_config()
self.evpool = eventlet.GreenPool(cfg.CONF.RESTPROXY.thread_pool_size)
# Include the Big Switch Extensions path in the api_extensions
neutron_extensions.append_api_extensions_path(extensions.__path__)
self.add_meta_server_route = cfg.CONF.RESTPROXY.add_meta_server_route
# init network ctrl connections
self.servers = servermanager.ServerPool()
self.servers.get_topo_function = self._get_all_data
self.servers.get_topo_function_args = {'get_ports': True,
'get_floating_ips': True,
'get_routers': True}
self.network_scheduler = importutils.import_object(
cfg.CONF.network_scheduler_driver
)
# setup rpc for security and DHCP agents
self._setup_rpc()
if cfg.CONF.RESTPROXY.sync_data:
self._send_all_data()
LOG.debug(_("NeutronRestProxyV2: initialization done"))
def _setup_rpc(self):
self.conn = n_rpc.create_connection(new=True)
self.topic = topics.PLUGIN
self.notifier = AgentNotifierApi(topics.AGENT)
# init dhcp agent support
self._dhcp_agent_notifier = dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
self.agent_notifiers[const.AGENT_TYPE_DHCP] = (
self._dhcp_agent_notifier
)
self.endpoints = [RestProxyCallbacks(),
agents_db.AgentExtRpcCallback()]
self.conn.create_consumer(self.topic, self.endpoints,
fanout=False)
# Consume from all consumers in threads
self.conn.consume_in_threads()
@put_context_in_serverpool
def create_network(self, context, network):
"""Create a network.
Network represents an L2 network segment which can have a set of
subnets and ports associated with it.
:param context: neutron api request context
:param network: dictionary describing the network
:returns: a sequence of mappings with the following signature:
{
"id": UUID representing the network.
"name": Human-readable name identifying the network.
"tenant_id": Owner of network. NOTE: only admin user can specify
a tenant_id other than its own.
"admin_state_up": Sets admin state of network.
if down, network does not forward packets.
"status": Indicates whether network is currently operational
(values are "ACTIVE", "DOWN", "BUILD", and "ERROR")
"subnets": Subnets associated with this network.
}
:raises: RemoteRestError
"""
LOG.debug(_("NeutronRestProxyV2: create_network() called"))
self._warn_on_state_status(network['network'])
with context.session.begin(subtransactions=True):
self._ensure_default_security_group(
context,
network['network']["tenant_id"]
)
# create network in DB
new_net = super(NeutronRestProxyV2, self).create_network(context,
network)
self._process_l3_create(context, new_net, network['network'])
# create network on the network controller
self._send_create_network(new_net, context)
# return created network
return new_net
@put_context_in_serverpool
def update_network(self, context, net_id, network):
"""Updates the properties of a particular Virtual Network.
:param context: neutron api request context
:param net_id: uuid of the network to update
:param network: dictionary describing the updates
:returns: a sequence of mappings with the following signature:
{
"id": UUID representing the network.
"name": Human-readable name identifying the network.
"tenant_id": Owner of network. NOTE: only admin user can
specify a tenant_id other than its own.
"admin_state_up": Sets admin state of network.
if down, network does not forward packets.
"status": Indicates whether network is currently operational
(values are "ACTIVE", "DOWN", "BUILD", and "ERROR")
"subnets": Subnets associated with this network.
}
:raises: exceptions.NetworkNotFound
:raises: RemoteRestError
"""
LOG.debug(_("NeutronRestProxyV2.update_network() called"))
self._warn_on_state_status(network['network'])
session = context.session
with session.begin(subtransactions=True):
new_net = super(NeutronRestProxyV2, self).update_network(
context, net_id, network)
self._process_l3_update(context, new_net, network['network'])
# update network on network controller
self._send_update_network(new_net, context)
return new_net
# NOTE(kevinbenton): workaround for eventlet/mysql deadlock
@utils.synchronized('bsn-port-barrier')
@put_context_in_serverpool
def delete_network(self, context, net_id):
"""Delete a network.
:param context: neutron api request context
:param id: UUID representing the network to delete.
:returns: None
:raises: exceptions.NetworkInUse
:raises: exceptions.NetworkNotFound
:raises: RemoteRestError
"""
LOG.debug(_("NeutronRestProxyV2: delete_network() called"))
# Validate args
orig_net = super(NeutronRestProxyV2, self).get_network(context, net_id)
with context.session.begin(subtransactions=True):
self._process_l3_delete(context, net_id)
ret_val = super(NeutronRestProxyV2, self).delete_network(context,
net_id)
self._send_delete_network(orig_net, context)
return ret_val
@put_context_in_serverpool
def create_port(self, context, port):
"""Create a port, which is a connection point of a device
(e.g., a VM NIC) to attach to a L2 Neutron network.
:param context: neutron api request context
:param port: dictionary describing the port
:returns:
{
"id": uuid represeting the port.
"network_id": uuid of network.
"tenant_id": tenant_id
"mac_address": mac address to use on this port.
"admin_state_up": Sets admin state of port. if down, port
does not forward packets.
"status": dicates whether port is currently operational
(limit values to "ACTIVE", "DOWN", "BUILD", and "ERROR")
"fixed_ips": list of subnet ID"s and IP addresses to be used on
this port
"device_id": identifies the device (e.g., virtual server) using
this port.
}
:raises: exceptions.NetworkNotFound
:raises: exceptions.StateInvalid
:raises: RemoteRestError
"""
LOG.debug(_("NeutronRestProxyV2: create_port() called"))
# Update DB in new session so exceptions rollback changes
with context.session.begin(subtransactions=True):
self._ensure_default_security_group_on_port(context, port)
sgids = self._get_security_groups_on_port(context, port)
# non-router port status is set to pending. it is then updated
# after the async rest call completes. router ports are synchronous
if port['port']['device_owner'] == l3_db.DEVICE_OWNER_ROUTER_INTF:
port['port']['status'] = const.PORT_STATUS_ACTIVE
else:
port['port']['status'] = const.PORT_STATUS_BUILD
dhcp_opts = port['port'].get(edo_ext.EXTRADHCPOPTS, [])
new_port = super(NeutronRestProxyV2, self).create_port(context,
port)
self._process_port_create_security_group(context, new_port, sgids)
if (portbindings.HOST_ID in port['port']
and 'id' in new_port):
host_id = port['port'][portbindings.HOST_ID]
porttracker_db.put_port_hostid(context, new_port['id'],
host_id)
new_port[addr_pair.ADDRESS_PAIRS] = (
self._process_create_allowed_address_pairs(
context, new_port,
port['port'].get(addr_pair.ADDRESS_PAIRS)))
self._process_port_create_extra_dhcp_opts(context, new_port,
dhcp_opts)
new_port = self._extend_port_dict_binding(context, new_port)
net = super(NeutronRestProxyV2,
self).get_network(context, new_port["network_id"])
if self.add_meta_server_route:
if new_port['device_owner'] == const.DEVICE_OWNER_DHCP:
destination = METADATA_SERVER_IP + '/32'
self._add_host_route(context, destination, new_port)
# create on network ctrl
mapped_port = self._map_state_and_status(new_port)
# ports have to be created synchronously when creating a router
# port since adding router interfaces is a multi-call process
if mapped_port['device_owner'] == l3_db.DEVICE_OWNER_ROUTER_INTF:
self.servers.rest_create_port(net["tenant_id"],
new_port["network_id"],
mapped_port)
else:
self.evpool.spawn_n(self.async_port_create, net["tenant_id"],
new_port["network_id"], mapped_port)
self.notify_security_groups_member_updated(context, new_port)
return new_port
def get_port(self, context, id, fields=None):
with context.session.begin(subtransactions=True):
port = super(NeutronRestProxyV2, self).get_port(context, id,
fields)
self._extend_port_dict_binding(context, port)
return self._fields(port, fields)
def get_ports(self, context, filters=None, fields=None):
with context.session.begin(subtransactions=True):
ports = super(NeutronRestProxyV2, self).get_ports(context, filters,
fields)
for port in ports:
self._extend_port_dict_binding(context, port)
return [self._fields(port, fields) for port in ports]
@put_context_in_serverpool
def update_port(self, context, port_id, port):
"""Update values of a port.
:param context: neutron api request context
:param id: UUID representing the port to update.
:param port: dictionary with keys indicating fields to update.
:returns: a mapping sequence with the following signature:
{
"id": uuid represeting the port.
"network_id": uuid of network.
"tenant_id": tenant_id
"mac_address": mac address to use on this port.
"admin_state_up": sets admin state of port. if down, port
does not forward packets.
"status": dicates whether port is currently operational
(limit values to "ACTIVE", "DOWN", "BUILD", and "ERROR")
"fixed_ips": list of subnet ID's and IP addresses to be used on
this port
"device_id": identifies the device (e.g., virtual server) using
this port.
}
:raises: exceptions.StateInvalid
:raises: exceptions.PortNotFound
:raises: RemoteRestError
"""
LOG.debug(_("NeutronRestProxyV2: update_port() called"))
self._warn_on_state_status(port['port'])
# Validate Args
orig_port = super(NeutronRestProxyV2, self).get_port(context, port_id)
with context.session.begin(subtransactions=True):
# Update DB
new_port = super(NeutronRestProxyV2,
self).update_port(context, port_id, port)
ctrl_update_required = False
if addr_pair.ADDRESS_PAIRS in port['port']:
ctrl_update_required |= (
self.update_address_pairs_on_port(context, port_id, port,
orig_port, new_port))
self._update_extra_dhcp_opts_on_port(context, port_id, port,
new_port)
old_host_id = porttracker_db.get_port_hostid(context,
orig_port['id'])
if (portbindings.HOST_ID in port['port']
and 'id' in new_port):
host_id = port['port'][portbindings.HOST_ID]
porttracker_db.put_port_hostid(context, new_port['id'],
host_id)
if old_host_id != host_id:
ctrl_update_required = True
if (new_port.get("device_id") != orig_port.get("device_id") and
orig_port.get("device_id")):
ctrl_update_required = True
if ctrl_update_required:
# tenant_id must come from network in case network is shared
net_tenant_id = self._get_port_net_tenantid(context, new_port)
new_port = self._extend_port_dict_binding(context, new_port)
mapped_port = self._map_state_and_status(new_port)
self.servers.rest_update_port(net_tenant_id,
new_port["network_id"],
mapped_port)
agent_update_required = self.update_security_group_on_port(
context, port_id, port, orig_port, new_port)
agent_update_required |= self.is_security_group_member_updated(
context, orig_port, new_port)
# return new_port
return new_port
# NOTE(kevinbenton): workaround for eventlet/mysql deadlock
@utils.synchronized('bsn-port-barrier')
@put_context_in_serverpool
def delete_port(self, context, port_id, l3_port_check=True):
"""Delete a port.
:param context: neutron api request context
:param id: UUID representing the port to delete.
:raises: exceptions.PortInUse
:raises: exceptions.PortNotFound
:raises: exceptions.NetworkNotFound
:raises: RemoteRestError
"""
LOG.debug(_("NeutronRestProxyV2: delete_port() called"))
# if needed, check to see if this is a port owned by
# and l3-router. If so, we should prevent deletion.
if l3_port_check:
self.prevent_l3_port_deletion(context, port_id)
with context.session.begin(subtransactions=True):
router_ids = self.disassociate_floatingips(
context, port_id, do_notify=False)
self._delete_port_security_group_bindings(context, port_id)
port = super(NeutronRestProxyV2, self).get_port(context, port_id)
# Tenant ID must come from network in case the network is shared
tenid = self._get_port_net_tenantid(context, port)
self._delete_port(context, port_id)
self.servers.rest_delete_port(tenid, port['network_id'], port_id)
# now that we've left db transaction, we are safe to notify
self.notify_routers_updated(context, router_ids)
@put_context_in_serverpool
def create_subnet(self, context, subnet):
LOG.debug(_("NeutronRestProxyV2: create_subnet() called"))
self._warn_on_state_status(subnet['subnet'])
with context.session.begin(subtransactions=True):
# create subnet in DB
new_subnet = super(NeutronRestProxyV2,
self).create_subnet(context, subnet)
net_id = new_subnet['network_id']
orig_net = super(NeutronRestProxyV2,
self).get_network(context, net_id)
# update network on network controller
self._send_update_network(orig_net, context)
return new_subnet
@put_context_in_serverpool
def update_subnet(self, context, id, subnet):
LOG.debug(_("NeutronRestProxyV2: update_subnet() called"))
self._warn_on_state_status(subnet['subnet'])
with context.session.begin(subtransactions=True):
# update subnet in DB
new_subnet = super(NeutronRestProxyV2,
self).update_subnet(context, id, subnet)
net_id = new_subnet['network_id']
orig_net = super(NeutronRestProxyV2,
self).get_network(context, net_id)
# update network on network controller
self._send_update_network(orig_net, context)
return new_subnet
# NOTE(kevinbenton): workaround for eventlet/mysql deadlock
@utils.synchronized('bsn-port-barrier')
@put_context_in_serverpool
def delete_subnet(self, context, id):
LOG.debug(_("NeutronRestProxyV2: delete_subnet() called"))
orig_subnet = super(NeutronRestProxyV2, self).get_subnet(context, id)
net_id = orig_subnet['network_id']
with context.session.begin(subtransactions=True):
# delete subnet in DB
super(NeutronRestProxyV2, self).delete_subnet(context, id)
orig_net = super(NeutronRestProxyV2, self).get_network(context,
net_id)
# update network on network controller - exception will rollback
self._send_update_network(orig_net, context)
def _get_tenant_default_router_rules(self, tenant):
rules = cfg.CONF.ROUTER.tenant_default_router_rule
defaultset = []
tenantset = []
for rule in rules:
items = rule.split(':')
if len(items) == 5:
(tenantid, source, destination, action, nexthops) = items
elif len(items) == 4:
(tenantid, source, destination, action) = items
nexthops = ''
else:
continue
parsedrule = {'source': source,
'destination': destination, 'action': action,
'nexthops': nexthops.split(',')}
if parsedrule['nexthops'][0] == '':
parsedrule['nexthops'] = []
if tenantid == '*':
defaultset.append(parsedrule)
if tenantid == tenant:
tenantset.append(parsedrule)
if tenantset:
return tenantset
return defaultset
@put_context_in_serverpool
def create_router(self, context, router):
LOG.debug(_("NeutronRestProxyV2: create_router() called"))
self._warn_on_state_status(router['router'])
tenant_id = self._get_tenant_id_for_create(context, router["router"])
# set default router rules
rules = self._get_tenant_default_router_rules(tenant_id)
router['router']['router_rules'] = rules
with context.session.begin(subtransactions=True):
# create router in DB
new_router = super(NeutronRestProxyV2, self).create_router(context,
router)
mapped_router = self._map_state_and_status(new_router)
self.servers.rest_create_router(tenant_id, mapped_router)
# return created router
return new_router
@put_context_in_serverpool
def update_router(self, context, router_id, router):
LOG.debug(_("NeutronRestProxyV2.update_router() called"))
self._warn_on_state_status(router['router'])
orig_router = super(NeutronRestProxyV2, self).get_router(context,
router_id)
tenant_id = orig_router["tenant_id"]
with context.session.begin(subtransactions=True):
new_router = super(NeutronRestProxyV2,
self).update_router(context, router_id, router)
router = self._map_state_and_status(new_router)
# update router on network controller
self.servers.rest_update_router(tenant_id, router, router_id)
# return updated router
return new_router
# NOTE(kevinbenton): workaround for eventlet/mysql deadlock.
# delete_router ends up calling _delete_port instead of delete_port.
@utils.synchronized('bsn-port-barrier')
@put_context_in_serverpool
def delete_router(self, context, router_id):
LOG.debug(_("NeutronRestProxyV2: delete_router() called"))
with context.session.begin(subtransactions=True):
orig_router = self._get_router(context, router_id)
tenant_id = orig_router["tenant_id"]
# Ensure that the router is not used
router_filter = {'router_id': [router_id]}
fips = self.get_floatingips_count(context.elevated(),
filters=router_filter)
if fips:
raise l3.RouterInUse(router_id=router_id)
device_owner = l3_db.DEVICE_OWNER_ROUTER_INTF
device_filter = {'device_id': [router_id],
'device_owner': [device_owner]}
ports = self.get_ports_count(context.elevated(),
filters=device_filter)
if ports:
raise l3.RouterInUse(router_id=router_id)
ret_val = super(NeutronRestProxyV2,
self).delete_router(context, router_id)
# delete from network ctrl
self.servers.rest_delete_router(tenant_id, router_id)
return ret_val
@put_context_in_serverpool
def add_router_interface(self, context, router_id, interface_info):
LOG.debug(_("NeutronRestProxyV2: add_router_interface() called"))
# Validate args
router = self._get_router(context, router_id)
tenant_id = router['tenant_id']
with context.session.begin(subtransactions=True):
# create interface in DB
new_intf_info = super(NeutronRestProxyV2,
self).add_router_interface(context,
router_id,
interface_info)
port = self._get_port(context, new_intf_info['port_id'])
net_id = port['network_id']
subnet_id = new_intf_info['subnet_id']
# we will use the port's network id as interface's id
interface_id = net_id
intf_details = self._get_router_intf_details(context,
interface_id,
subnet_id)
# create interface on the network controller
self.servers.rest_add_router_interface(tenant_id, router_id,
intf_details)
return new_intf_info
@put_context_in_serverpool
def remove_router_interface(self, context, router_id, interface_info):
LOG.debug(_("NeutronRestProxyV2: remove_router_interface() called"))
# Validate args
router = self._get_router(context, router_id)
tenant_id = router['tenant_id']
# we will first get the interface identifier before deleting in the DB
if not interface_info:
msg = _("Either subnet_id or port_id must be specified")
raise exceptions.BadRequest(resource='router', msg=msg)
if 'port_id' in interface_info:
port = self._get_port(context, interface_info['port_id'])
interface_id = port['network_id']
elif 'subnet_id' in interface_info:
subnet = self._get_subnet(context, interface_info['subnet_id'])
interface_id = subnet['network_id']
else:
msg = _("Either subnet_id or port_id must be specified")
raise exceptions.BadRequest(resource='router', msg=msg)
with context.session.begin(subtransactions=True):
# remove router in DB
del_ret = super(NeutronRestProxyV2,
self).remove_router_interface(context,
router_id,
interface_info)
# create router on the network controller
self.servers.rest_remove_router_interface(tenant_id, router_id,
interface_id)
return del_ret
@put_context_in_serverpool
def create_floatingip(self, context, floatingip):
LOG.debug(_("NeutronRestProxyV2: create_floatingip() called"))
with context.session.begin(subtransactions=True):
# create floatingip in DB
new_fl_ip = super(NeutronRestProxyV2,
self).create_floatingip(context, floatingip)
# create floatingip on the network controller
try:
if 'floatingip' in self.servers.get_capabilities():
self.servers.rest_create_floatingip(
new_fl_ip['tenant_id'], new_fl_ip)
else:
self._send_floatingip_update(context)
except servermanager.RemoteRestError as e:
with excutils.save_and_reraise_exception():
LOG.error(
_("NeutronRestProxyV2: Unable to create remote "
"floating IP: %s"), e)
# return created floating IP
return new_fl_ip
@put_context_in_serverpool
def update_floatingip(self, context, id, floatingip):
LOG.debug(_("NeutronRestProxyV2: update_floatingip() called"))
with context.session.begin(subtransactions=True):
# update floatingip in DB
new_fl_ip = super(NeutronRestProxyV2,
self).update_floatingip(context, id, floatingip)
# update network on network controller
if 'floatingip' in self.servers.get_capabilities():
self.servers.rest_update_floatingip(new_fl_ip['tenant_id'],
new_fl_ip, id)
else:
self._send_floatingip_update(context)
return new_fl_ip
@put_context_in_serverpool
def delete_floatingip(self, context, id):
LOG.debug(_("NeutronRestProxyV2: delete_floatingip() called"))
with context.session.begin(subtransactions=True):
# delete floating IP in DB
old_fip = super(NeutronRestProxyV2, self).get_floatingip(context,
id)
super(NeutronRestProxyV2, self).delete_floatingip(context, id)
# update network on network controller
if 'floatingip' in self.servers.get_capabilities():
self.servers.rest_delete_floatingip(old_fip['tenant_id'], id)
else:
self._send_floatingip_update(context)
@put_context_in_serverpool
def disassociate_floatingips(self, context, port_id, do_notify=True):
LOG.debug(_("NeutronRestProxyV2: diassociate_floatingips() called"))
router_ids = super(NeutronRestProxyV2, self).disassociate_floatingips(
context, port_id, do_notify=do_notify)
self._send_floatingip_update(context)
return router_ids
# overriding method from l3_db as original method calls
# self.delete_floatingip() which in turn calls self.delete_port() which
# is locked with 'bsn-port-barrier'
@put_context_in_serverpool
def delete_disassociated_floatingips(self, context, network_id):
query = self._model_query(context, l3_db.FloatingIP)
query = query.filter_by(floating_network_id=network_id,
fixed_port_id=None,
router_id=None)
for fip in query:
context.session.delete(fip)
self._delete_port(context.elevated(), fip['floating_port_id'])
def _send_floatingip_update(self, context):
try:
ext_net_id = self.get_external_network_id(context)
if ext_net_id:
# Use the elevated state of the context for the ext_net query
admin_context = context.elevated()
ext_net = super(NeutronRestProxyV2,
self).get_network(admin_context, ext_net_id)
# update external network on network controller
self._send_update_network(ext_net, admin_context)
except exceptions.TooManyExternalNetworks:
# get_external_network can raise errors when multiple external
# networks are detected, which isn't supported by the Plugin
LOG.error(_("NeutronRestProxyV2: too many external networks"))
def _add_host_route(self, context, destination, port):
subnet = {}
for fixed_ip in port['fixed_ips']:
subnet_id = fixed_ip['subnet_id']
nexthop = fixed_ip['ip_address']
subnet['host_routes'] = [{'destination': destination,
'nexthop': nexthop}]
updated_subnet = self.update_subnet(context,
subnet_id,
{'subnet': subnet})
payload = {'subnet': updated_subnet}
self._dhcp_agent_notifier.notify(context, payload,
'subnet.update.end')
LOG.debug(_("Adding host route: "))
LOG.debug(_("Destination:%(dst)s nexthop:%(next)s"),
{'dst': destination, 'next': nexthop})
| shakamunyi/neutron-dvr | neutron/plugins/bigswitch/plugin.py | Python | apache-2.0 | 51,022 | 0.000098 |
from troposphere import Tags,FindInMap, Ref, Template, Parameter,ImportValue, Ref, Output
from troposphere.efs import FileSystem, MountTarget
from troposphere.ec2 import SecurityGroup, SecurityGroupRule, Instance, Subnet
from create import export_ref, import_ref
from create.network import AclFactory, assoc_nacl_subnet
def efs_setup(template, ops, app_cfn_options, stack_name, stack_setup):
# Variable Declarations
vpc_id=ops.get('vpc_id')
efs_sg = app_cfn_options.network_names['tcpstacks'][stack_name]['sg_name']
efs_acl = app_cfn_options.network_names['tcpstacks'][stack_name]['nacl_name']
# Create EFS FIleSystem
efs_fs=FileSystem(
title='{}{}'.format(ops.app_name, stack_name),
FileSystemTags=Tags(Name='{}-{}'.format(ops.app_name, stack_name))
)
template.add_resource(efs_fs)
export_ref(template, '{}{}{}'.format(ops.app_name,stack_name,"Endpoint"), value=Ref(efs_fs), desc="Endpoint for EFS FileSystem")
# EFS FS Security Groups
efs_security_group=SecurityGroup(
title=efs_sg,
GroupDescription='Allow Access',
VpcId=vpc_id,
Tags=Tags(Name=efs_sg)
)
template.add_resource(efs_security_group)
export_ref(template, efs_sg, value=Ref(efs_sg), desc="Export for EFS Security Group")
# Create Network ACL for EFS Stack
efs_nacl = AclFactory(
template,
name=efs_acl,
vpc_id=ops.vpc_id,
in_networks=[val for key, val in sorted(ops.app_networks.items())],
in_ports=stack_setup['ports'],
out_ports=ops.out_ports,
out_networks=[val for key, val in sorted(ops.app_networks.items())],
ssh_hosts=ops.get("deploy_hosts"),
)
export_ref(
template,
export_name=efs_acl,
value=Ref(efs_acl),
desc="{}{} stack".format("NetACL for", stack_name)
)
# Create Subnets for Mount Targets
for k, v in ops['tcpstacks']['EFS']['networks'].items():
efs_subnet=Subnet(
title='{}{}{}{}'.format(ops.app_name, stack_name, "MountTargetSubnet", k.split("-")[-1]),
AvailabilityZone=k,
CidrBlock=v,
VpcId=vpc_id,
Tags=Tags(Name='{}-{}-{}-{}'.format(ops.app_name, stack_name, "MountTargetSubnet", k.split("-")[-1]))
)
template.add_resource(efs_subnet)
assoc_name = '{}{}{}'.format(stack_name,"AclAssoc",k.split("-")[-1])
assoc_nacl_subnet(template, assoc_name, Ref(efs_acl), Ref(efs_subnet))
efs_mount_target=MountTarget(
title='{}{}{}'.format(ops.app_name, "EFSMountTarget", k.split("-")[-1]),
FileSystemId=Ref(efs_fs),
SecurityGroups=[Ref(efs_security_group)],
SubnetId=Ref(efs_subnet)
)
template.add_resource(efs_mount_target)
| gotropo/gotropo | create/efs.py | Python | gpl-3.0 | 2,807 | 0.006769 |
import os
import sys
import logging
from pyomo.environ import *
from pyomo.opt import TerminationCondition
import numpy as np
import pandas as pd
class CALVIN():
def __init__(self, linksfile, ic=None, log_name="calvin"):
"""
Initialize CALVIN model object.
:param linksfile: (string) CSV file containing network link information
:param ic: (dict) Initial storage conditions for surface reservoirs
only used for annual optimization
:param log_name: A name for a logger - will be used to keep logs from different model runs separate in files.
Defaults to "calvin", which results in a log file in the current working directory named "calvin.log".
You can change this each time you instantiate the CALVIN class if you want to output separate logs
for different runs. Otherwise, all results will be appended to the log file (not overwritten). If you
run multiple copies of CALVIN simultaneously, make sure to change this, or you could get errors writing
to the log file.
Do not provide a full path to a log file here because this value is also used in a way that is *not* a
file path. If being able to specify a full path is important for your workflow, please raise a GitHub
issue. It could be supported, but there is no need at this moment.
:returns: CALVIN model object
"""
# set up logging code
self.log = logging.getLogger(log_name)
if not self.log.hasHandlers(): # hasHandlers will only be True if someone already called CALVIN with the same log_name in the same session
self.log.setLevel("DEBUG")
screen_handler = logging.StreamHandler(sys.stdout)
screen_handler.setLevel(logging.INFO)
screen_formatter = logging.Formatter('%(levelname)s - %(message)s')
screen_handler.setFormatter(screen_formatter)
self.log.addHandler(screen_handler)
file_handler = logging.FileHandler("{}.log".format(log_name))
file_handler.setLevel(logging.DEBUG)
file_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(file_formatter)
self.log.addHandler(file_handler)
df = pd.read_csv(linksfile)
df['link'] = df.i.map(str) + '_' + df.j.map(str) + '_' + df.k.map(str)
df.set_index('link', inplace=True)
self.df = df
self.linksfile = os.path.splitext(linksfile)[0] # filename w/o extension
# self.T = len(self.df)
SR_stats = pd.read_csv('calvin/data/SR_stats.csv', index_col=0).to_dict()
self.min_storage = SR_stats['min']
self.max_storage = SR_stats['max']
if ic:
self.apply_ic(ic)
# a few network fixes to make things work
self.add_ag_region_sinks()
self.fix_hydropower_lbs()
self.nodes = pd.unique(df[['i','j']].values.ravel()).tolist()
self.links = list(zip(df.i,df.j,df.k))
self.networkcheck() # make sure things aren't broken
def apply_ic(self, ic):
"""
Set initial storage conditions.
:param ic: (dict) initial storage values
:returns: nothing, but modifies the model object
"""
for k in ic:
ix = (self.df.i.str.contains('INITIAL') &
self.df.j.str.contains(k))
self.df.loc[ix, ['lower_bound','upper_bound']] = ic[k]
def inflow_multiplier(self, x):
"""
Multiply all network inflows by a constant.
:param x: (float) value to multiply inflows
:returns: nothing, but modifies the model object
"""
ix = self.df.i.str.contains('INFLOW')
self.df.loc[ix, ['lower_bound','upper_bound']] *= x
def eop_constraint_multiplier(self, x):
"""
Set end-of-period storage constraints as a fraction of maximum
available storage. Needed for limited foresight (annual) optimization.
:param x: (float) fraction of maximum storage to set lower bound
:returns: nothing, but modifies the model object
"""
for k in self.max_storage:
ix = (self.df.i.str.contains(k) &
self.df.j.str.contains('FINAL'))
lb = self.min_storage[k] + (self.max_storage[k]-self.min_storage[k])*x
self.df.loc[ix,'lower_bound'] = lb
self.df.loc[ix,'upper_bound'] = self.max_storage[k]
def no_gw_overdraft(self):
"""
Impose constraints to prevent groundwater overdraft
(not currently implemented)
"""
pass
def networkcheck(self):
"""
Confirm constraint feasibility for the model object.
(No inputs or outputs)
:raises: ValueError when infeasibilities are identified.
"""
nodes = self.nodes
links = self.df.values
num_in = {n: 0 for n in nodes}
num_out = {n: 0 for n in nodes}
lb_in = {n: 0 for n in nodes}
lb_out = {n: 0 for n in nodes}
ub_in = {n: 0 for n in nodes}
ub_out = {n: 0 for n in nodes}
# loop over links
for l in links:
lb = float(l[5])
ub = float(l[6])
num_in[l[1]] += 1
lb_in[l[1]] += lb
ub_in[l[1]] += ub
num_out[l[0]] += 1
lb_out[l[0]] += lb
ub_out[l[0]] += ub
if lb > ub:
raise ValueError('lb > ub for link %s' % (l[0]+'-'+l[1]))
for n in nodes:
if num_in[n] == 0 and n not in ['SOURCE','SINK']:
raise ValueError('no incoming link for ' + n)
if num_out[n] == 0 and n not in ['SOURCE','SINK']:
raise ValueError('no outgoing link for ' + n)
if ub_in[n] < lb_out[n]:
raise ValueError('ub_in < lb_out for %s (%d < %d)' % (n, ub_in[n], lb_out[n]))
if lb_in[n] > ub_out[n]:
raise ValueError('lb_in > ub_out for %s (%d > %d)' % (n, lb_in[n], ub_out[n]))
def add_ag_region_sinks(self):
"""
Hack to get rid of surplus water at no cost from agricultural regions.
Called internally when model is initialized.
:returns: nothing, but modifies the model object
"""
df = self.df
links = df[df.i.str.contains('HSU') & ~df.j.str.contains('DBUG')].copy(deep=True)
if not links.empty:
maxub = links.upper_bound.max()
links.j = links.apply(lambda l: 'SINK.'+l.i.split('.')[1], axis=1)
links.cost = 0.0
links.amplitude = 1.0
links.lower_bound = 0.0
links.upper_bound = maxub
links['link'] = links.i.map(str) + '_' + links.j.map(str) + '_' + links.k.map(str)
links.set_index('link', inplace=True)
self.df = self.df.append(links.drop_duplicates())
def fix_hydropower_lbs(self):
"""
Hack to fix lower bound constraints on piecewise hydropower links.
Storage piecewise links > 0 should have 0.0 lower bound, and
the k=0 pieces should always have lb = dead pool.
:returns: nothing, but modifies the model object
"""
def get_lb(link):
if link.i.split('.')[0] == link.j.split('.')[0]:
if link.k > 0:
return 0.0
elif link.i.split('.')[0] in self.min_storage:
return min(self.min_storage[link.i.split('.')[0]], link.lower_bound)
return link.lower_bound
ix = (self.df.i.str.contains('SR_') & self.df.j.str.contains('SR_'))
self.df.loc[ix, 'lower_bound'] = self.df.loc[ix].apply(get_lb, axis=1)
def remove_debug_links(self):
"""
Remove debug links from model object.
:returns: dataframe of links, excluding debug links.
"""
df = self.df
ix = df.index[df.index.str.contains('DBUG')]
df.drop(ix, inplace=True, axis=0)
self.nodes = pd.unique(df[['i','j']].values.ravel()).tolist()
self.links = list(zip(df.i,df.j,df.k))
return df
def create_pyomo_model(self, debug_mode=False, debug_cost=2e7):
"""
Use link data to create Pyomo model (constraints and objective function)
But do not solve yet.
:param debug_mode: (boolean) Whether to run in debug mode.
Use when there may be infeasibilities in the network.
:param debug_cost: When in debug mode, assign this cost ($/AF) to flow on debug links.
This should be an arbitrarily high number.
:returns: nothing, but creates the model object (self.model)
"""
# work on a local copy of the dataframe
if not debug_mode and self.df.index.str.contains('DBUG').any():
# previously ran in debug mode, but now done
df = self.remove_debug_links()
df.to_csv(self.linksfile + '-final.csv')
else:
df = self.df
self.log.info('Creating Pyomo Model (debug=%s)' % debug_mode)
model = ConcreteModel()
model.N = Set(initialize=self.nodes)
model.k = Set(initialize=range(15))
model.A = Set(within=model.N*model.N*model.k,
initialize=self.links, ordered=True)
model.source = Param(initialize='SOURCE')
model.sink = Param(initialize='SINK')
def init_params(p):
if p == 'cost' and debug_mode:
return (lambda model,i,j,k: debug_cost
if ('DBUG' in str(i)+'_'+str(j))
else 1.0)
else:
return lambda model,i,j,k: df.loc[str(i)+'_'+str(j)+'_'+str(k)][p]
model.u = Param(model.A, initialize=init_params('upper_bound'), mutable=True)
model.l = Param(model.A, initialize=init_params('lower_bound'), mutable=True)
model.a = Param(model.A, initialize=init_params('amplitude'))
model.c = Param(model.A, initialize=init_params('cost'))
# The flow over each arc
model.X = Var(model.A, within=Reals)
# Minimize total cost
def obj_fxn(model):
return sum(model.c[i,j,k]*model.X[i,j,k] for (i,j,k) in model.A)
model.total = Objective(rule=obj_fxn, sense=minimize)
# Enforce an upper bound limit on the flow across each arc
def limit_rule_upper(model, i, j, k):
return model.X[i,j,k] <= model.u[i,j,k]
model.limit_upper = Constraint(model.A, rule=limit_rule_upper)
# Enforce a lower bound limit on the flow across each arc
def limit_rule_lower(model, i, j, k):
return model.X[i,j,k] >= model.l[i,j,k]
model.limit_lower = Constraint(model.A, rule=limit_rule_lower)
# To speed up creating the mass balance constraints, first
# create dictionaries of arcs_in and arcs_out of every node
# These are NOT Pyomo data, and Pyomo does not use "model._" at all
arcs_in = {}
arcs_out = {}
def arc_list_hack(model, i,j,k):
if j not in arcs_in:
arcs_in[j] = []
arcs_in[j].append((i,j,k))
if i not in arcs_out:
arcs_out[i] = []
arcs_out[i].append((i,j,k))
return [0]
model._ = Set(model.A, initialize=arc_list_hack)
# Enforce flow through each node (mass balance)
def flow_rule(model, node):
if node in [value(model.source), value(model.sink)]:
return Constraint.Skip
outflow = sum(model.X[i,j,k]/model.a[i,j,k] for i,j,k in arcs_out[node])
inflow = sum(model.X[i,j,k] for i,j,k in arcs_in[node])
return inflow == outflow
model.flow = Constraint(model.N, rule=flow_rule)
model.dual = Suffix(direction=Suffix.IMPORT)
self.model = model
def solve_pyomo_model(self, solver='glpk', nproc=1, debug_mode=False, maxiter=10):
"""
Solve Pyomo model (must be called after create_pyomo_model)
:param solver: (string) solver name. glpk, cplex, cbc, gurobi.
:param nproc: (int) number of processors. 1=serial.
:param debug_mode: (boolean) Whether to run in debug mode.
Use when there may be infeasibilities in the network.
:param maxiter: (int) maximum iterations for debug mode.
:returns: nothing, but assigns results to self.model.solutions.
:raises: RuntimeError, if problem is found to be infeasible.
"""
from pyomo.opt import SolverFactory
opt = SolverFactory(solver)
if nproc > 1 and solver is not 'glpk':
opt.options['threads'] = nproc
if debug_mode:
run_again = True
i = 0
vol_total = 0
while run_again and i < maxiter:
self.log.info('-----Solving Pyomo Model (debug=%s)' % debug_mode)
self.results = opt.solve(self.model)
self.log.info('Finished. Fixing debug flows...')
run_again,vol = self.fix_debug_flows()
i += 1
vol_total += vol
if run_again:
self.log.info(('Warning: Debug mode maximum iterations reached.'
' Will still try to solve without debug mode.'))
else:
self.log.info('All debug flows eliminated (iter=%d, vol=%0.2f)' % (i,vol_total))
else:
self.log.info('-----Solving Pyomo Model (debug=%s)' % debug_mode)
self.results = opt.solve(self.model, tee=False)
if self.results.solver.termination_condition == TerminationCondition.optimal:
self.log.info('Optimal Solution Found (debug=%s).' % debug_mode)
self.model.solutions.load_from(self.results)
else:
raise RuntimeError('Problem Infeasible. Run again starting from debug mode.')
def fix_debug_flows(self, tol=1e-7):
"""
Find infeasible constraints where debug flows occur.
Fix them by either raising the UB, or lowering the LB.
:param tol: (float) Tolerance to identify nonzero debug flows
:returns run_again: (boolean) whether debug mode needs to run again
:returns vol: (float) total volume of constraint changes
also modifies the model object.
"""
df, model = self.df, self.model
dbix = (df.i.str.contains('DBUGSRC') | df.j.str.contains('DBUGSNK'))
debuglinks = df[dbix].values
run_again = False
vol_total = 0
for dbl in debuglinks:
s = tuple(dbl[0:3])
if model.X[s].value > tol:
run_again = True
# if we need to get rid of extra water,
# raise some upper bounds (just do them all)
if 'DBUGSNK' in dbl[1]:
raiselinks = df[(df.i == dbl[0]) & ~ df.j.str.contains('DBUGSNK')].values
for l in raiselinks:
s2 = tuple(l[0:3])
iv = model.u[s2].value
v = model.X[s].value*1.2
model.u[s2].value += v
vol_total += v
self.log.info('%s UB raised by %0.2f (%0.2f%%)' % (l[0]+'_'+l[1], v, v*100/iv))
df.loc['_'.join(str(x) for x in l[0:3]), 'upper_bound'] = model.u[s2].value
# if we need to bring in extra water
# this is a much more common problem
# want to avoid reducing carryover requirements. look downstream instead.
max_depth = 10
if 'DBUGSRC' in dbl[0]:
vol_to_reduce = max(model.X[s].value*1.2, 0.5)
self.log.info('Volume to reduce: %.2e' % vol_to_reduce)
children = [dbl[1]]
for i in range(max_depth):
children += df[df.i.isin(children)
& ~ df.j.str.contains('DBUGSNK')].j.tolist()
children = set(children)
reducelinks = (df[df.i.isin(children)
& (df.lower_bound > 0)]
.sort_values(by='lower_bound', ascending=False).values)
if reducelinks.size == 0:
raise RuntimeError(('Not possible to reduce LB on links'
' with origin %s by volume %0.2f' %
(dbl[1],vol_to_reduce)))
for l in reducelinks:
s2 = tuple(l[0:3])
iv = model.l[s2].value
dl = model.dual[model.limit_lower[s2]] if s2 in model.limit_lower else 0.0
if iv > 0 and vol_to_reduce > 0 and dl > 1e6:
v = min(vol_to_reduce, iv)
# don't allow big reductions on carryover links
carryover = ['SR_', 'INITIAL', 'FINAL', 'GW_']
if any(c in l[0] for c in carryover) and any(c in l[1] for c in carryover):
v = min(v, max(25.0, 0.1*iv))
model.l[s2].value -= v
vol_to_reduce -= v
vol_total += v
self.log.info('%s LB reduced by %.2e (%0.2f%%). Dual=%.2e' % (l[0]+'_'+l[1], v, v*100/iv, dl))
df.loc['_'.join(str(x) for x in l[0:3]), 'lower_bound'] = model.l[s2].value
if vol_to_reduce == 0:
break
if vol_to_reduce > 0:
self.log.info('Debug -> %s: could not reduce full amount (%.2e left)' % (dbl[1],vol_to_reduce))
self.df, self.model = df, model
return run_again, vol_total
| msdogan/pyvin | calvin/calvin.py | Python | mit | 16,224 | 0.013745 |
from collections import defaultdict
class CategoricalVariableEncoder(object):
def convert_categorical_variables(self, data_matrix, category_indices, category_value_mapping=None):
if len(category_indices) == 0:
return data_matrix
if category_value_mapping == None:
category_value_mapping = self.get_category_value_mapping(data_matrix, category_indices)
for i in xrange(len(data_matrix)):
for category_index in category_indices:
current_data_value = data_matrix[i][category_index]
updated_data_value = category_value_mapping[category_index][current_data_value]
data_matrix[i][category_index] = updated_data_value
return data_matrix
def get_category_value_mapping(self, data_matrix, category_indices):
categories = self.get_category_values(data_matrix, category_indices)
category_value_mapping = {}
for category_index, values_set in categories.iteritems():
category_value_mapping[category_index] = self.create_value_map(values_set)
return category_value_mapping
def get_category_values(self, data_matrix, category_indices):
categories = {}
for category_index in category_indices:
categories[category_index] = set()
for i in xrange(len(data_matrix)):
for category_index in category_indices:
category_value = data_matrix[i][category_index]
categories[category_index].add(category_value)
return categories
@classmethod
def create_value_map(self, values_set):
sorted_values_set = sorted(values_set)
value_mapping = {}
for i in xrange(len(sorted_values_set)):
value_mapping[sorted_values_set[i]] = i
return value_mapping
| wangjohn/wallace | wallace/categorical_variable_encoder.py | Python | mit | 1,836 | 0.003268 |
from ai import action
class MoveAction(action.Action):
def __init__(self, performer, direction):
super().__init__(performer)
self.direction = direction
def prerequisite(self):
if not self.direction:
return False
return self.performer.can_move(*self.direction)
def perform(self):
self.performer.move(*self.direction)
| JoshuaSkelly/lunch-break-rl | ai/actions/moveaction.py | Python | mit | 385 | 0 |
from tornado.web import HTTPError
import datetime
import threading
from astral.api.client import TicketsAPI
from astral.api.handlers.base import BaseHandler
from astral.api.handlers.tickets import TicketsHandler
from astral.models import Ticket, Node, Stream, session
import logging
log = logging.getLogger(__name__)
class TicketHandler(BaseHandler):
def _load_ticket(self, stream_slug, destination_uuid):
stream = Stream.get_by(slug=stream_slug)
if not destination_uuid:
return Ticket.get_by(stream=stream, destination=Node.me())
node = Node.get_by(uuid=destination_uuid)
return Ticket.query.filter_by(stream=stream, destination=node).first()
def delete(self, stream_slug, destination_uuid=None):
"""Stop forwarding the stream to the requesting node."""
ticket = self._load_ticket(stream_slug, destination_uuid)
if not ticket:
raise HTTPError(404)
ticket.delete()
session.commit()
TicketDeletionPropagationThread(ticket, self.request).start()
def get(self, stream_slug, destination_uuid=None):
ticket = self._load_ticket(stream_slug, destination_uuid)
if ticket:
# TODO this block is somewhat duplicated from TicketsHandler.post,
# where we refresh an existing ticket.
if not ticket.source == Node.me():
log.info("Refreshing %s with the source", ticket)
ticket = TicketsHandler._request_stream_from_node(ticket.stream,
ticket.source, ticket.destination,
existing_ticket=ticket)
if ticket:
ticket.refreshed = datetime.datetime.now()
# In case we lost the tunnel, just make sure it exists
ticket.queue_tunnel_creation()
session.commit()
# TODO this is unideal, but we need to get the new port if it
# changed. combination of sleep and db flush seems to do it
# somewhat reliably, but it's still a race condition.
import time
time.sleep(0.5)
ticket = self._load_ticket(stream_slug, destination_uuid)
self.write({'ticket': ticket.to_dict()})
def put(self, stream_slug, destination_uuid=None):
"""Edit tickets, most likely just confirming them."""
ticket = self._load_ticket(stream_slug, destination_uuid)
if ticket:
ticket.confirmed = self.get_json_argument('confirmed')
if ticket.confirmed:
log.info("Confirmed %s", ticket)
session.commit()
class TicketDeletionPropagationThread(threading.Thread):
"""When a ticket is deleted, we may need to inform other nodes or find a
replacement for ourselves. We don't want to do this in-band with the delete
request because it can cause dead locking of API requests between nodes.
"""
def __init__(self, ticket, request):
super(TicketDeletionPropagationThread, self).__init__()
self.ticket = ticket
self.request = request
def run(self):
if self.ticket.confirmed and not self.ticket.source == Node.me():
if self.ticket.destination == Node.me():
if self.request.remote_ip == '127.0.0.1':
log.info("User is canceling %s -- must inform sender",
self.ticket)
TicketsAPI(self.ticket.source.uri()).cancel(
self.ticket.absolute_url())
else:
log.info("%s is being deleted, we need to find another for "
"ourselves", self.ticket)
try:
TicketsHandler.handle_ticket_request(self.ticket.stream,
self.ticket.destination)
except HTTPError, e:
log.warning("We lost %s and couldn't find a "
"replacement to failover -- our stream is "
"dead: %s", self.ticket, e)
elif self.request.remote_ip == self.ticket.source.ip_address:
log.info("%s is being deleted by the source, must inform the "
"target %s", self.ticket, self.ticket.destination)
TicketsAPI(self.ticket.destination.uri()).cancel(
self.ticket.absolute_url())
elif self.request.remote_ip == self.ticket.destination.ip_address:
log.info("%s is being deleted by the destination, must inform "
"the source %s", self.ticket, self.ticket.source)
TicketsAPI(self.ticket.source.uri()).cancel(
self.ticket.absolute_url())
| peplin/astral | astral/api/handlers/ticket.py | Python | mit | 4,822 | 0.002489 |
import tkinter.filedialog as tkFileDialog
import numpy as np
from numpy import sin,cos
import os
def InnerOrientation(mat1,mat2):
"""
mat1 为像素坐标,4*2,mat2为理论坐标4*2,
h0,h1,h2,k0,k1,k2,这六个参数由下列矩阵定义:
[x]=[h0]+[h1 h2] [i]
[y]=[k0]+[k1 k2] [j]
返回6个定向参数的齐次矩阵,x方向单位权方差,y方向单位权方差
[h1 h2 h0]
[k1 k2 k0]
[0 0 1 ]
"""
# mat1=np.matrix(mat1)
# mat2=np.matrix(mat2)
y=mat2.ravel()
y=y.T
xlist=[]
for i in range(int(y.size/2)):
x0=np.matrix([[1,mat1[i,0],mat1[i,1],0,0,0],[0,0,0,1,mat1[i,0],mat1[i,1]]])
xlist.append(x0)
x=np.vstack(xlist)
# print(x)
N=np.linalg.inv(x.T @ x)
beta=N @ x.T @ y
# print(beta)
r=(np.size(y)-6)
e=y-x@beta
ex=e[0::2]
ey=e[1::2]
sigmax=(np.linalg.norm(ex)/r)
sigmay=(np.linalg.norm(ey)/r)
# print(sigmax)
# print(sigmay)
return(np.matrix([[beta[1,0],beta[2,0],beta[0,0]],[beta[4,0],beta[5,0],beta[3,0]],[0,0,1]]),sigmax,sigmay)
def openkbfile():
#default_dir = r"C:\Users\lenovo\Desktop" # 设置默认打开目录
fname = tkFileDialog.askopenfilename(title=u"选择文件",filetypes=[("kb file", "*.kb"), ("all", "*.*")],initialdir=r"D:\学习\摄影测量\摄影测量实验数据-后方交会、前方交会")
f=open(fname,mode='r')
lines=f.readlines()
f.close()
mat=[]
for line in lines:
t=line.split()
mat.append([float(t[0]),float(t[1])])
#initialdir=(os.path.expanduser(default_dir))
# print(fname) # 返回文件全路径
mat1=mat[0::2]
mat2=mat[1::2]
mat,sigmax2,sigmay2=InnerOrientation(np.matrix(mat1),np.matrix(mat2))
print(mat,sigmax2,sigmay2)
# def transform(mat,coormat):
# """
# mat:齐次矩阵,由InnerOrientation获得
# coormat:齐次坐标:即每列第三个元素为1,每个坐标均为列向量。列数不限。
# 返回:转换后的坐标
# """
# return mat@coormat
# def openaofile():
# fname = tkFileDialog.askopenfilename(title=u"选择文件",filetypes=[("ao.txt file", "*.txt"), ("all", "*.*")],initialdir=r"D:\学习\摄影测量\摄影测量实验数据-后方交会、前方交会")
# f=open(fname,mode='r')
# lines=f.readlines()
# f.close()
# matimage=[]
# matground=[]
# for line in lines[1:]:
# t=line.split()
# matimage.append([float(t[0]),float(t[1])])
# matground.append([float(t[2]),float(t[3]),float(t[4])])
# return(np.matrix(matimage),np.matrix(matground))
# def resection():
# matimage,matground=openaofile()
# dist=np.linalg.norm(matimage[1]-matimage[0])
# Dist=np.linalg.norm(matground[1]-matground[0])
# matimage=matimage.T
# matground=matground.T
# n=dist.shape[0]
# assert n==5
# m=Dist/dist
# x0,y0,f=0,0,210.681 #均以毫米作单位
# Xs0,Ys0,H=np.average(matground,axis=0)
# H+=m*f
# phi,omega,kappa=0,0,0
# R=np.zeros((3,3))
# R[0,0]=cos(phi)*cos(kappa)-sin(phi)*sin(omega)*sin(kappa)
# R[0,1]=-cos(phi)*sin(kappa)-sin(phi)*sin(omega)*cos(kappa)
# R[0,2]=-sin(phi)*cos(omega)
# R[1,0]=cos(omega)*sin(kappa)
# R[1,1]=cos(omega)*cos(kappa)
# R[1,2]=-sin(omega)
# R[2,0]=sin(phi)*cos(kappa)+cos(phi)*sin(omega)*sin(kappa)
# R[2,1]=-sin(phi)*sin(kappa)+cos(phi)*sin(omega)*cos(kappa)
# R[2,2]=cos(phi)*cos(omega)
# matimage1=np.zeros((2,5))
# S=np.matrix([Xs0,Ys0,H]).T
# Alist=[]
# Vlist=[]
# Llist=[]
# for i in range(5):
# u=matground[:,i]-S
# matimage1[0,i]=-f*np.dot(R[0],u)/np.dot(R[2],u)
# matimage1[1,i]=-f*np.dot(R[1],u)/np.dot(R[2],u)
# zba=np.dot(R[2],u)
# A=np.zeros(2,6)
# # A[0,0]=(R[0,0]*f+R[0,2]*matimage[])[]
if __name__=="__main__":
openkbfile() | YU6326/YU6326.github.io | code/photogrammetry/inner_orientation.py | Python | mit | 3,899 | 0.020569 |
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
default_app_config = 'accounts.apps.AccountsConfig'
| ta2-1/pootle | pootle/apps/accounts/__init__.py | Python | gpl-3.0 | 328 | 0 |
import sys
from PIL import Image
img = Image.open(sys.argv[1])
width, height = img.size
xblock = 5
yblock = 5
w_width = width / xblock
w_height = height / yblock
blockmap = [(xb*w_width, yb*w_height, (xb+1)*w_width, (yb+1)*w_height)
for xb in xrange(xblock) for yb in xrange(yblock)]
newblockmap = list(blockmap)
newblockmap[0] = blockmap[14]
newblockmap[1] = blockmap[13]
newblockmap[2] = blockmap[12]
newblockmap[3] = blockmap[11]
newblockmap[4] = blockmap[10]
newblockmap[5] = blockmap[24]
newblockmap[6] = blockmap[23]
newblockmap[7] = blockmap[22]
newblockmap[8] = blockmap[21]
newblockmap[9] = blockmap[20]
newblockmap[10] = blockmap[4]
newblockmap[11] = blockmap[3]
newblockmap[12] = blockmap[2]
newblockmap[13] = blockmap[1]
newblockmap[14] = blockmap[0]
newblockmap[15] = blockmap[19]
newblockmap[16] = blockmap[18]
newblockmap[17] = blockmap[17]
newblockmap[18] = blockmap[16]
newblockmap[19] = blockmap[15]
newblockmap[20] = blockmap[9]
newblockmap[21] = blockmap[8]
newblockmap[22] = blockmap[7]
newblockmap[23] = blockmap[6]
newblockmap[24] = blockmap[5]
result = Image.new(img.mode, (width, height))
for box, sbox in zip(blockmap, newblockmap):
c = img.crop(sbox)
result.paste(c, box)
result.save(sys.argv[1])
| BilalDev/HolyScrap | src/hsimage.py | Python | apache-2.0 | 1,247 | 0.000802 |
from django.db import models
from django.conf import settings
from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class FoiSite(models.Model):
country_code = models.CharField(_('Country Code'), max_length=5)
country_name = models.CharField(_('Country Name'), max_length=255)
name = models.CharField(_('Name'), max_length=255)
url = models.CharField(_('URL'), max_length=255)
text = models.TextField(_('Text'), blank=True)
enabled = models.BooleanField(_('Enabled'), default=True)
class Meta:
verbose_name = _('FOI Site')
verbose_name_plural = _('FOI Sites')
def __str__(self):
return u'%s (%s)' % (self.name, self.country_name)
def save(self, *args, **kwargs):
self.country_code = self.country_code.upper()
super(FoiSite, self).save(*args, **kwargs)
try:
from django.contrib.gis.geoip import GeoIP
except ImportError:
GeoIP = None # noqa
class SiteAdivsor(object):
def __init__(self):
self.geoip = GeoIP()
self.sites = None
def update(self):
sites = FoiSite.objects.filter(enabled=True)
self.sites = dict([(f.country_code, f) for f in sites])
def refresh(self):
self.sites = None
def get_site(self, ip):
if self.sites is None:
self.update()
result = self.geoip.country(ip)
return self.sites.get(result['country_code'], None)
class DummyAdvisor(object):
def refresh(self):
pass
def get_site(self, ip):
pass
if GeoIP and getattr(settings, 'GEOIP_PATH', False):
advisor = SiteAdivsor()
else:
advisor = DummyAdvisor()
@receiver(models.signals.post_save, sender=FoiSite,
dispatch_uid="foisite_saved")
def foisite_saved(instance=None, created=False, **kwargs):
advisor.refresh()
| LilithWittmann/froide | froide/foisite/models.py | Python | mit | 1,944 | 0.001029 |
import os
def remove_fname_extension(fname):
return os.path.splitext(fname)[0]
def change_fname_extension(fname, extension):
return remove_fname_extension(fname) + '.' + extension
def concat(path, fname):
return path + '/' + fname | matt77hias/FileUtils | src/name.py | Python | gpl-3.0 | 249 | 0.02008 |
#!/usr/bin/python
import psutil
import signal
#From https://github.com/getchar/rbb_article
target = "HelpfulAppStore"
# scan through processes
for proc in psutil.process_iter():
if proc.name() == target:
print(" match")
proc.send_signal(signal.SIGUSR1)
| jamesnw/HelpfulAppStoreBot | kill_bot.py | Python | gpl-2.0 | 277 | 0.00361 |
"""Layout provider for Ansible source."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ... import types as t
from . import (
ContentLayout,
LayoutProvider,
)
class AnsibleLayout(LayoutProvider):
"""Layout provider for Ansible source."""
@staticmethod
def is_content_root(path): # type: (str) -> bool
"""Return True if the given path is a content root for this provider."""
return os.path.exists(os.path.join(path, 'setup.py')) and os.path.exists(os.path.join(path, 'bin/ansible-test'))
def create(self, root, paths): # type: (str, t.List[str]) -> ContentLayout
"""Create a Layout using the given root and paths."""
plugin_paths = dict((p, os.path.join('lib/ansible/plugins', p)) for p in self.PLUGIN_TYPES)
plugin_paths.update(dict(
modules='lib/ansible/modules',
module_utils='lib/ansible/module_utils',
))
return ContentLayout(root,
paths,
plugin_paths=plugin_paths,
integration_path='test/integration',
unit_path='test/units',
unit_module_path='test/units/modules',
unit_module_utils_path='test/units/module_utils',
)
| amenonsen/ansible | test/lib/ansible_test/_internal/provider/layout/ansible.py | Python | gpl-3.0 | 1,396 | 0.002149 |
# Copyright 2012, Intel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Client side of the volume RPC API.
"""
from oslo_config import cfg
from oslo_serialization import jsonutils
from cinder import quota
from cinder import rpc
from cinder.volume import utils
CONF = cfg.CONF
QUOTAS = quota.QUOTAS
class VolumeAPI(rpc.RPCAPI):
"""Client side of the volume rpc API.
API version history:
.. code-block:: none
1.0 - Initial version.
1.1 - Adds clone volume option to create_volume.
1.2 - Add publish_service_capabilities() method.
1.3 - Pass all image metadata (not just ID) in copy_volume_to_image.
1.4 - Add request_spec, filter_properties and
allow_reschedule arguments to create_volume().
1.5 - Add accept_transfer.
1.6 - Add extend_volume.
1.7 - Adds host_name parameter to attach_volume()
to allow attaching to host rather than instance.
1.8 - Add migrate_volume, rename_volume.
1.9 - Add new_user and new_project to accept_transfer.
1.10 - Add migrate_volume_completion, remove rename_volume.
1.11 - Adds mode parameter to attach_volume()
to support volume read-only attaching.
1.12 - Adds retype.
1.13 - Adds create_export.
1.14 - Adds reservation parameter to extend_volume().
1.15 - Adds manage_existing and unmanage_only flag to delete_volume.
1.16 - Removes create_export.
1.17 - Add replica option to create_volume, promote_replica and
sync_replica.
1.18 - Adds create_consistencygroup, delete_consistencygroup,
create_cgsnapshot, and delete_cgsnapshot. Also adds
the consistencygroup_id parameter in create_volume.
1.19 - Adds update_migrated_volume
1.20 - Adds support for sending objects over RPC in create_snapshot()
and delete_snapshot()
1.21 - Adds update_consistencygroup.
1.22 - Adds create_consistencygroup_from_src.
1.23 - Adds attachment_id to detach_volume.
1.24 - Removed duplicated parameters: snapshot_id, image_id,
source_volid, source_replicaid, consistencygroup_id and
cgsnapshot_id from create_volume. All off them are already
passed either in request_spec or available in the DB.
1.25 - Add source_cg to create_consistencygroup_from_src.
1.26 - Adds support for sending objects over RPC in
create_consistencygroup(), create_consistencygroup_from_src(),
update_consistencygroup() and delete_consistencygroup().
1.27 - Adds support for replication V2
1.28 - Adds manage_existing_snapshot
1.29 - Adds get_capabilities.
1.30 - Adds remove_export
1.31 - Updated: create_consistencygroup_from_src(), create_cgsnapshot()
and delete_cgsnapshot() to cast method only with necessary
args. Forwarding CGSnapshot object instead of CGSnapshot_id.
1.32 - Adds support for sending objects over RPC in create_volume().
1.33 - Adds support for sending objects over RPC in delete_volume().
1.34 - Adds support for sending objects over RPC in retype().
1.35 - Adds support for sending objects over RPC in extend_volume().
1.36 - Adds support for sending objects over RPC in migrate_volume(),
migrate_volume_completion(), and update_migrated_volume().
1.37 - Adds old_reservations parameter to retype to support quota
checks in the API.
1.38 - Scaling backup service, add get_backup_device() and
secure_file_operations_enabled()
1.39 - Update replication methods to reflect new backend rep strategy
1.40 - Add cascade option to delete_volume().
... Mitaka supports messaging version 1.40. Any changes to existing
methods in 1.x after that point should be done so that they can handle
the version_cap being set to 1.40.
2.0 - Remove 1.x compatibility
2.1 - Add get_manageable_volumes() and get_manageable_snapshots().
2.2 - Adds support for sending objects over RPC in manage_existing().
2.3 - Adds support for sending objects over RPC in
initialize_connection().
"""
RPC_API_VERSION = '2.3'
TOPIC = CONF.volume_topic
BINARY = 'cinder-volume'
def _compat_ver(self, current, *legacy):
versions = (current,) + legacy
for version in versions[:-1]:
if self.client.can_send_version(version):
return version
return versions[-1]
def _get_cctxt(self, host, version):
new_host = utils.get_volume_rpc_host(host)
return self.client.prepare(server=new_host, version=version)
def create_consistencygroup(self, ctxt, group, host):
cctxt = self._get_cctxt(host, '2.0')
cctxt.cast(ctxt, 'create_consistencygroup',
group=group)
def delete_consistencygroup(self, ctxt, group):
cctxt = self._get_cctxt(group.host, '2.0')
cctxt.cast(ctxt, 'delete_consistencygroup',
group=group)
def update_consistencygroup(self, ctxt, group, add_volumes=None,
remove_volumes=None):
cctxt = self._get_cctxt(group.host, '2.0')
cctxt.cast(ctxt, 'update_consistencygroup',
group=group,
add_volumes=add_volumes,
remove_volumes=remove_volumes)
def create_consistencygroup_from_src(self, ctxt, group, cgsnapshot=None,
source_cg=None):
cctxt = self._get_cctxt(group.host, '2.0')
cctxt.cast(ctxt, 'create_consistencygroup_from_src',
group=group,
cgsnapshot=cgsnapshot,
source_cg=source_cg)
def create_cgsnapshot(self, ctxt, cgsnapshot):
cctxt = self._get_cctxt(cgsnapshot.consistencygroup.host, '2.0')
cctxt.cast(ctxt, 'create_cgsnapshot', cgsnapshot=cgsnapshot)
def delete_cgsnapshot(self, ctxt, cgsnapshot):
cctxt = self._get_cctxt(cgsnapshot.consistencygroup.host, '2.0')
cctxt.cast(ctxt, 'delete_cgsnapshot', cgsnapshot=cgsnapshot)
def create_volume(self, ctxt, volume, host, request_spec,
filter_properties, allow_reschedule=True):
request_spec_p = jsonutils.to_primitive(request_spec)
cctxt = self._get_cctxt(host, '2.0')
cctxt.cast(ctxt, 'create_volume', volume_id=volume.id,
request_spec=request_spec_p,
filter_properties=filter_properties,
allow_reschedule=allow_reschedule, volume=volume)
def delete_volume(self, ctxt, volume, unmanage_only=False, cascade=False):
cctxt = self._get_cctxt(volume.host, '2.0')
cctxt.cast(ctxt, 'delete_volume', volume_id=volume.id,
unmanage_only=unmanage_only, volume=volume, cascade=cascade)
def create_snapshot(self, ctxt, volume, snapshot):
cctxt = self._get_cctxt(volume['host'], '2.0')
cctxt.cast(ctxt, 'create_snapshot', volume_id=volume['id'],
snapshot=snapshot)
def delete_snapshot(self, ctxt, snapshot, host, unmanage_only=False):
cctxt = self._get_cctxt(host, '2.0')
cctxt.cast(ctxt, 'delete_snapshot', snapshot=snapshot,
unmanage_only=unmanage_only)
def attach_volume(self, ctxt, volume, instance_uuid, host_name,
mountpoint, mode):
cctxt = self._get_cctxt(volume['host'], '2.0')
return cctxt.call(ctxt, 'attach_volume',
volume_id=volume['id'],
instance_uuid=instance_uuid,
host_name=host_name,
mountpoint=mountpoint,
mode=mode)
def detach_volume(self, ctxt, volume, attachment_id):
cctxt = self._get_cctxt(volume['host'], '2.0')
return cctxt.call(ctxt, 'detach_volume', volume_id=volume['id'],
attachment_id=attachment_id)
def copy_volume_to_image(self, ctxt, volume, image_meta):
cctxt = self._get_cctxt(volume['host'], '2.0')
cctxt.cast(ctxt, 'copy_volume_to_image', volume_id=volume['id'],
image_meta=image_meta)
def initialize_connection(self, ctxt, volume, connector):
version = self._compat_ver('2.3', '2.0')
msg_args = {'volume_id': volume.id, 'connector': connector,
'volume': volume}
if version == '2.0':
del msg_args['volume']
cctxt = self._get_cctxt(volume['host'], version=version)
return cctxt.call(ctxt, 'initialize_connection', **msg_args)
def terminate_connection(self, ctxt, volume, connector, force=False):
cctxt = self._get_cctxt(volume['host'], '2.0')
return cctxt.call(ctxt, 'terminate_connection', volume_id=volume['id'],
connector=connector, force=force)
def remove_export(self, ctxt, volume):
cctxt = self._get_cctxt(volume['host'], '2.0')
cctxt.cast(ctxt, 'remove_export', volume_id=volume['id'])
def publish_service_capabilities(self, ctxt):
cctxt = self.client.prepare(fanout=True, version='2.0')
cctxt.cast(ctxt, 'publish_service_capabilities')
def accept_transfer(self, ctxt, volume, new_user, new_project):
cctxt = self._get_cctxt(volume['host'], '2.0')
return cctxt.call(ctxt, 'accept_transfer', volume_id=volume['id'],
new_user=new_user, new_project=new_project)
def extend_volume(self, ctxt, volume, new_size, reservations):
cctxt = self._get_cctxt(volume.host, '2.0')
cctxt.cast(ctxt, 'extend_volume', volume_id=volume.id,
new_size=new_size, reservations=reservations, volume=volume)
def migrate_volume(self, ctxt, volume, dest_host, force_host_copy):
host_p = {'host': dest_host.host,
'capabilities': dest_host.capabilities}
cctxt = self._get_cctxt(volume.host, '2.0')
cctxt.cast(ctxt, 'migrate_volume', volume_id=volume.id, host=host_p,
force_host_copy=force_host_copy, volume=volume)
def migrate_volume_completion(self, ctxt, volume, new_volume, error):
cctxt = self._get_cctxt(volume.host, '2.0')
return cctxt.call(ctxt, 'migrate_volume_completion',
volume_id=volume.id, new_volume_id=new_volume.id,
error=error, volume=volume, new_volume=new_volume)
def retype(self, ctxt, volume, new_type_id, dest_host,
migration_policy='never', reservations=None,
old_reservations=None):
host_p = {'host': dest_host.host,
'capabilities': dest_host.capabilities}
cctxt = self._get_cctxt(volume.host, '2.0')
cctxt.cast(ctxt, 'retype', volume_id=volume.id,
new_type_id=new_type_id, host=host_p,
migration_policy=migration_policy,
reservations=reservations, volume=volume,
old_reservations=old_reservations)
def manage_existing(self, ctxt, volume, ref):
msg_args = {
'volume_id': volume.id, 'ref': ref, 'volume': volume,
}
version = '2.2'
if not self.client.can_send_version('2.2'):
version = '2.0'
msg_args.pop('volume')
cctxt = self._get_cctxt(volume.host, version)
cctxt.cast(ctxt, 'manage_existing', **msg_args)
def promote_replica(self, ctxt, volume):
cctxt = self._get_cctxt(volume['host'], '2.0')
cctxt.cast(ctxt, 'promote_replica', volume_id=volume['id'])
def reenable_replication(self, ctxt, volume):
cctxt = self._get_cctxt(volume['host'], '2.0')
cctxt.cast(ctxt, 'reenable_replication', volume_id=volume['id'])
def update_migrated_volume(self, ctxt, volume, new_volume,
original_volume_status):
cctxt = self._get_cctxt(new_volume['host'], '2.0')
cctxt.call(ctxt,
'update_migrated_volume',
volume=volume,
new_volume=new_volume,
volume_status=original_volume_status)
def freeze_host(self, ctxt, host):
"""Set backend host to frozen."""
cctxt = self._get_cctxt(host, '2.0')
return cctxt.call(ctxt, 'freeze_host')
def thaw_host(self, ctxt, host):
"""Clear the frozen setting on a backend host."""
cctxt = self._get_cctxt(host, '2.0')
return cctxt.call(ctxt, 'thaw_host')
def failover_host(self, ctxt, host, secondary_backend_id=None):
"""Failover host to the specified backend_id (secondary). """
cctxt = self._get_cctxt(host, '2.0')
cctxt.cast(ctxt, 'failover_host',
secondary_backend_id=secondary_backend_id)
def manage_existing_snapshot(self, ctxt, snapshot, ref, host):
cctxt = self._get_cctxt(host, '2.0')
cctxt.cast(ctxt, 'manage_existing_snapshot',
snapshot=snapshot,
ref=ref)
def get_capabilities(self, ctxt, host, discover):
cctxt = self._get_cctxt(host, '2.0')
return cctxt.call(ctxt, 'get_capabilities', discover=discover)
def get_backup_device(self, ctxt, backup, volume):
cctxt = self._get_cctxt(volume.host, '2.0')
return cctxt.call(ctxt, 'get_backup_device', backup=backup)
def secure_file_operations_enabled(self, ctxt, volume):
cctxt = self._get_cctxt(volume.host, '2.0')
return cctxt.call(ctxt, 'secure_file_operations_enabled',
volume=volume)
def get_manageable_volumes(self, ctxt, host, marker, limit, offset,
sort_keys, sort_dirs):
cctxt = self._get_cctxt(host, '2.1')
return cctxt.call(ctxt, 'get_manageable_volumes', marker=marker,
limit=limit, offset=offset, sort_keys=sort_keys,
sort_dirs=sort_dirs)
def get_manageable_snapshots(self, ctxt, host, marker, limit, offset,
sort_keys, sort_dirs):
cctxt = self._get_cctxt(host, '2.1')
return cctxt.call(ctxt, 'get_manageable_snapshots', marker=marker,
limit=limit, offset=offset, sort_keys=sort_keys,
sort_dirs=sort_dirs)
| bswartz/cinder | cinder/volume/rpcapi.py | Python | apache-2.0 | 15,156 | 0 |
# -*- coding: utf-8 -*-
import os
os_env = os.environ
class Config(object):
SECRET_KEY = os_env.get('DYNAMICFORMS_SECRET', 'secret-key') # TODO: Change me
APP_DIR = os.path.abspath(os.path.dirname(__file__)) # This directory
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
BCRYPT_LOG_ROUNDS = 13
ASSETS_DEBUG = False
DEBUG_TB_ENABLED = False # Disable Debug toolbar
DEBUG_TB_INTERCEPT_REDIRECTS = False
CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc.
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/example' # TODO: Change me
DEBUG_TB_ENABLED = False # Disable Debug toolbar
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
DB_NAME = 'dev.db'
# Put the db file in project root
DB_PATH = os.path.join(Config.PROJECT_ROOT, DB_NAME)
SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format(DB_PATH)
DEBUG_TB_ENABLED = True
ASSETS_DEBUG = True # Don't bundle/minify static assets
CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc.
class TestConfig(Config):
TESTING = True
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite://'
BCRYPT_LOG_ROUNDS = 1 # For faster tests
WTF_CSRF_ENABLED = False # Allows form testing
| ossifrage/dynamicforms | dynamicforms/settings.py | Python | bsd-3-clause | 1,384 | 0.002168 |
#!/usr/bin/env python2.2
#-----------------------------------------------------------------------------
# Name: wxPyPlot.py
# Purpose:
#
# Author: Gordon Williams
#
# Created: 2003/11/03
# RCS-ID: $Id$
# Copyright: (c) 2002
# Licence: Use as you wish.
#-----------------------------------------------------------------------------
"""
This is a simple light weight plotting module that can be used with Boa or
easily integrated into your own wxPython application. The emphasis is on small
size and fast plotting for large data sets. It has a reasonable number of
features to do line and scatter graphs easily. It is not as sophisticated or as
powerful as SciPy Plt or Chaco. Both of these are great packages but consume
huge amounts of computer resources for simple plots. They can be found at
http://scipy.com
This file contains two parts; first the re-usable library stuff, then, after
a "if __name__=='__main__'" test, a simple frame and a few default plots
for examples and testing.
Based on wxPlotCanvas
Written by K.Hinsen, R. Srinivasan;
Ported to wxPython Harm van der Heijden, feb 1999
Major Additions Gordon Williams Feb. 2003 (g_will@cyberus.ca)
-More style options
-Zooming using mouse "rubber band"
-Scroll left, right
-Grid(graticule)
-Printing, preview, and page set up (margins)
-Axis and title labels
-Cursor xy axis values
-Doc strings and lots of comments
-Optimizations for large number of points
-Legends
Did a lot of work here to speed markers up. Only a factor of 4 improvement
though. Lines are much faster than markers, especially filled markers. Stay
away from circles and triangles unless you only have a few thousand points.
Times for 25,000 points
Line - 0.078 sec
Markers
Square - 0.22 sec
dot - 0.10
circle - 0.87
cross,plus - 0.28
triangle, triangle_down - 0.90
Thanks to Chris Barker for getting this version working on Linux.
Zooming controls with mouse (when enabled):
Left mouse drag - Zoom box.
Left mouse double click - reset zoom.
Right mouse click - zoom out centred on click location.
"""
import wx
import time, string
# Needs Numeric
try:
import Numeric
except:
try:
import numarray as Numeric #if numarray is used it is renamed Numeric
except:
msg= """
This module requires the Numeric or numarray module,
which could not be imported. It probably is not installed
(it's not part of the standard Python distribution). See the
Python site (http://www.python.org) for information on
downloading source or binaries."""
raise ImportError, "Numeric or numarray not found. \n" + msg
try:
True
except NameError:
True = 1==1
False = 1==0
#
# Plotting classes...
#
class PolyPoints:
"""Base Class for lines and markers
- All methods are private.
"""
def __init__(self, points, attr):
self.points = Numeric.array(points)
self.currentScale= (1,1)
self.currentShift= (0,0)
self.scaled = self.points
self.attributes = {}
self.attributes.update(self._attributes)
for name, value in attr.items():
if name not in self._attributes.keys():
raise KeyError, "Style attribute incorrect. Should be one of %s" %self._attributes.keys()
self.attributes[name] = value
def boundingBox(self):
if len(self.points) == 0:
#no curves to draw
#defaults to (-1,-1) and (1,1) but axis can be set in Draw
minXY= Numeric.array([-1,-1])
maxXY= Numeric.array([ 1, 1])
else:
minXY= Numeric.minimum.reduce(self.points)
maxXY= Numeric.maximum.reduce(self.points)
return minXY, maxXY
def scaleAndShift(self, scale=(1,1), shift=(0,0)):
if len(self.points) == 0:
#no curves to draw
return
if (scale is not self.currentScale) or (shift is not self.currentShift):
#update point scaling
self.scaled = scale*self.points+shift
self.currentScale= scale
self.currentShift= shift
#else unchanged use the current scaling
def getLegend(self):
return self.attributes['legend']
class PolyLine(PolyPoints):
"""Class to define line type and style
- All methods except __init__ are private.
"""
_attributes = {'colour': 'black',
'width': 1,
'style': wx.SOLID,
'legend': ''}
def __init__(self, points, **attr):
"""Creates PolyLine object
points - sequence (array, tuple or list) of (x,y) points making up line
**attr - key word attributes
Defaults:
'colour'= 'black', - wxPen Colour any wxNamedColour
'width'= 1, - Pen width
'style'= wxSOLID, - wxPen style
'legend'= '' - Line Legend to display
"""
PolyPoints.__init__(self, points, attr)
def draw(self, dc, printerScale, coord= None):
colour = self.attributes['colour']
width = self.attributes['width'] * printerScale
style= self.attributes['style']
dc.SetPen(wx.Pen(wx.NamedColour(colour), int(width), style))
if coord == None:
dc.DrawLines(self.scaled)
else:
dc.DrawLines(coord) #draw legend line
def getSymExtent(self, printerScale):
"""Width and Height of Marker"""
h= self.attributes['width'] * printerScale
w= 5 * h
return (w,h)
class PolyMarker(PolyPoints):
"""Class to define marker type and style
- All methods except __init__ are private.
"""
_attributes = {'colour': 'black',
'width': 1,
'size': 2,
'fillcolour': None,
'fillstyle': wx.SOLID,
'marker': 'circle',
'legend': ''}
def __init__(self, points, **attr):
"""Creates PolyMarker object
points - sequence (array, tuple or list) of (x,y) points
**attr - key word attributes
Defaults:
'colour'= 'black', - wxPen Colour any wxNamedColour
'width'= 1, - Pen width
'size'= 2, - Marker size
'fillcolour'= same as colour, - wxBrush Colour any wxNamedColour
'fillstyle'= wx.SOLID, - wxBrush fill style (use wxTRANSPARENT for no fill)
'marker'= 'circle' - Marker shape
'legend'= '' - Marker Legend to display
Marker Shapes:
- 'circle'
- 'dot'
- 'square'
- 'triangle'
- 'triangle_down'
- 'cross'
- 'plus'
"""
PolyPoints.__init__(self, points, attr)
def draw(self, dc, printerScale, coord= None):
colour = self.attributes['colour']
width = self.attributes['width'] * printerScale
size = self.attributes['size'] * printerScale
fillcolour = self.attributes['fillcolour']
fillstyle = self.attributes['fillstyle']
marker = self.attributes['marker']
dc.SetPen(wx.Pen(wx.NamedColour(colour),int(width)))
if fillcolour:
dc.SetBrush(wx.Brush(wx.NamedColour(fillcolour),fillstyle))
else:
dc.SetBrush(wx.Brush(wx.NamedColour(colour), fillstyle))
if coord == None:
self._drawmarkers(dc, self.scaled, marker, size)
else:
self._drawmarkers(dc, coord, marker, size) #draw legend marker
def getSymExtent(self, printerScale):
"""Width and Height of Marker"""
s= 5*self.attributes['size'] * printerScale
return (s,s)
def _drawmarkers(self, dc, coords, marker,size=1):
f = eval('self._' +marker)
f(dc, coords, size)
def _circle(self, dc, coords, size=1):
fact= 2.5*size
wh= 5.0*size
rect= Numeric.zeros((len(coords),4),Numeric.Float)+[0.0,0.0,wh,wh]
rect[:,0:2]= coords-[fact,fact]
dc.DrawEllipseList(rect.astype(Numeric.Int32))
def _dot(self, dc, coords, size=1):
dc.DrawPointList(coords)
def _square(self, dc, coords, size=1):
fact= 2.5*size
wh= 5.0*size
rect= Numeric.zeros((len(coords),4),Numeric.Float)+[0.0,0.0,wh,wh]
rect[:,0:2]= coords-[fact,fact]
dc.DrawRectangleList(rect.astype(Numeric.Int32))
def _triangle(self, dc, coords, size=1):
shape= [(-2.5*size,1.44*size), (2.5*size,1.44*size), (0.0,-2.88*size)]
poly= Numeric.repeat(coords,3)
poly.shape= (len(coords),3,2)
poly += shape
dc.DrawPolygonList(poly.astype(Numeric.Int32))
def _triangle_down(self, dc, coords, size=1):
shape= [(-2.5*size,-1.44*size), (2.5*size,-1.44*size), (0.0,2.88*size)]
poly= Numeric.repeat(coords,3)
poly.shape= (len(coords),3,2)
poly += shape
dc.DrawPolygonList(poly.astype(Numeric.Int32))
def _cross(self, dc, coords, size=1):
fact= 2.5*size
for f in [[-fact,-fact,fact,fact],[-fact,fact,fact,-fact]]:
lines= Numeric.concatenate((coords,coords),axis=1)+f
dc.DrawLineList(lines.astype(Numeric.Int32))
def _plus(self, dc, coords, size=1):
fact= 2.5*size
for f in [[-fact,0,fact,0],[0,-fact,0,fact]]:
lines= Numeric.concatenate((coords,coords),axis=1)+f
dc.DrawLineList(lines.astype(Numeric.Int32))
class PlotGraphics:
"""Container to hold PolyXXX objects and graph labels
- All methods except __init__ are private.
"""
def __init__(self, objects, title='', xLabel='', yLabel= ''):
"""Creates PlotGraphics object
objects - list of PolyXXX objects to make graph
title - title shown at top of graph
xLabel - label shown on x-axis
yLabel - label shown on y-axis
"""
if type(objects) not in [list,tuple]:
raise TypeError, "objects argument should be list or tuple"
self.objects = objects
self.title= title
self.xLabel= xLabel
self.yLabel= yLabel
def boundingBox(self):
p1, p2 = self.objects[0].boundingBox()
for o in self.objects[1:]:
p1o, p2o = o.boundingBox()
p1 = Numeric.minimum(p1, p1o)
p2 = Numeric.maximum(p2, p2o)
return p1, p2
def scaleAndShift(self, scale=(1,1), shift=(0,0)):
for o in self.objects:
o.scaleAndShift(scale, shift)
def setPrinterScale(self, scale):
"""Thickens up lines and markers only for printing"""
self.printerScale= scale
def setXLabel(self, xLabel= ''):
"""Set the X axis label on the graph"""
self.xLabel= xLabel
def setYLabel(self, yLabel= ''):
"""Set the Y axis label on the graph"""
self.yLabel= yLabel
def setTitle(self, title= ''):
"""Set the title at the top of graph"""
self.title= title
def getXLabel(self):
"""Get x axis label string"""
return self.xLabel
def getYLabel(self):
"""Get y axis label string"""
return self.yLabel
def getTitle(self, title= ''):
"""Get the title at the top of graph"""
return self.title
def draw(self, dc):
for o in self.objects:
#t=time.clock() #profile info
o.draw(dc, self.printerScale)
#dt= time.clock()-t
#print o, "time=", dt
def getSymExtent(self, printerScale):
"""Get max width and height of lines and markers symbols for legend"""
symExt = self.objects[0].getSymExtent(printerScale)
for o in self.objects[1:]:
oSymExt = o.getSymExtent(printerScale)
symExt = Numeric.maximum(symExt, oSymExt)
return symExt
def getLegendNames(self):
"""Returns list of legend names"""
lst = [None]*len(self)
for i in range(len(self)):
lst[i]= self.objects[i].getLegend()
return lst
def __len__(self):
return len(self.objects)
def __getitem__(self, item):
return self.objects[item]
#-------------------------------------------------------------------------------
#Main window that you will want to import into your application.
class PlotCanvas(wx.Window):
"""Subclass of a wxWindow to allow simple general plotting
of data with zoom, labels, and automatic axis scaling."""
def __init__(self, parent, id = -1, pos=wx.DefaultPosition,
size=wx.DefaultSize, style= wx.DEFAULT_FRAME_STYLE, name= ""):
"""Constucts a window, which can be a child of a frame, dialog or
any other non-control window"""
wx.Window.__init__(self, parent, id, pos, size, style, name)
self.border = (1,1)
self.SetBackgroundColour(wx.NamedColour("white"))
wx.EVT_PAINT(self, self.OnPaint)
wx.EVT_SIZE(self,self.OnSize)
#Create some mouse events for zooming
wx.EVT_LEFT_DOWN(self, self.OnMouseLeftDown)
wx.EVT_LEFT_UP(self, self.OnMouseLeftUp)
wx.EVT_MOTION(self, self.OnMotion)
wx.EVT_LEFT_DCLICK(self, self.OnMouseDoubleClick)
wx.EVT_RIGHT_DOWN(self, self.OnMouseRightDown)
# set curser as cross-hairs
self.SetCursor(wx.CROSS_CURSOR)
#Things for printing
self.print_data = wx.PrintData()
self.print_data.SetPaperId(wx.PAPER_LETTER)
self.print_data.SetOrientation(wx.LANDSCAPE)
self.pageSetupData= wx.PageSetupDialogData()
self.pageSetupData.SetMarginBottomRight((25,25))
self.pageSetupData.SetMarginTopLeft((25,25))
self.pageSetupData.SetPrintData(self.print_data)
self.printerScale = 1
self.parent= parent
#Zooming variables
self._zoomInFactor = 0.5
self._zoomOutFactor = 2
self._zoomCorner1= Numeric.array([0.0, 0.0]) #left mouse down corner
self._zoomCorner2= Numeric.array([0.0, 0.0]) #left mouse up corner
self._zoomEnabled= False
self._hasDragged= False
#Drawing Variables
self.last_draw = None
self._pointScale= 1
self._pointShift= 0
self._xSpec= 'auto'
self._ySpec= 'auto'
self._gridEnabled= False
self._legendEnabled= False
#Fonts
self._fontCache = {}
self._fontSizeAxis= 10
self._fontSizeTitle= 15
self._fontSizeLegend= 7
# OnSize called to make sure the buffer is initialized.
# This might result in OnSize getting called twice on some
# platforms at initialization, but little harm done.
self.OnSize(None) #sets the initial size based on client size
#SaveFile
wx.InitAllImageHandlers()
def SaveFile(self, fileName= ''):
"""Saves the file to the type specified in the extension. If no file
name is specified a dialog box is provided. Returns True if sucessful,
otherwise False.
.bmp Save a Windows bitmap file.
.xbm Save an X bitmap file.
.xpm Save an XPM bitmap file.
.png Save a Portable Network Graphics file.
.jpg Save a Joint Photographic Experts Group file.
"""
if string.lower(fileName[-3:]) not in ['bmp','xbm','xpm','png','jpg']:
dlg1 = wx.FileDialog(self, "Choose a file with extension bmp, gif, xbm, xpm, png, or jpg", ".", "",
"BMP files (*.bmp)|*.bmp|XBM files (*.xbm)|*.xbm|XPM file (*.xpm)|*.xpm|PNG files (*.png)|*.png|JPG files (*.jpg)|*.jpg",
wx.SAVE|wx.OVERWRITE_PROMPT)
try:
while 1:
if dlg1.ShowModal() == wx.ID_OK:
fileName = dlg1.GetPath()
#Check for proper exension
if string.lower(fileName[-3:]) not in ['bmp','xbm','xpm','png','jpg']:
dlg2 = wx.MessageDialog(self, 'File name extension\n'
'must be one of\n'
'bmp, xbm, xpm, png, or jpg',
'File Name Error', wx.OK | wx.ICON_ERROR)
try:
dlg2.ShowModal()
finally:
dlg2.Destroy()
else:
break #now save file
else: #exit without saving
return False
finally:
dlg1.Destroy()
#File name has required extension
fType = string.lower(fileName[-3:])
if fType == "bmp":
tp= wx.BITMAP_TYPE_BMP #Save a Windows bitmap file.
elif fType == "xbm":
tp= wx.BITMAP_TYPE_XBM #Save an X bitmap file.
elif fType == "xpm":
tp= wx.BITMAP_TYPE_XPM #Save an XPM bitmap file.
elif fType == "jpg":
tp= wx.BITMAP_TYPE_JPEG #Save a JPG file.
else:
tp= wx.BITMAP_TYPE_PNG #Save a PNG file.
#Save Bitmap
res= self._Buffer.SaveFile(fileName, tp)
return res
def PageSetup(self):
"""Brings up the page setup dialog"""
data = self.pageSetupData
data.SetPrintData(self.print_data)
dlg = wx.PageSetupDialog(self.parent, data)
try:
if dlg.ShowModal() == wx.ID_OK:
data = dlg.GetPageSetupData() #returns wxPageSetupDialogData
#updates page parameters from dialog
self.pageSetupData.SetMarginBottomRight(data.GetMarginBottomRight())
self.pageSetupData.SetMarginTopLeft(data.GetMarginTopLeft())
self.pageSetupData.SetPrintData(data.GetPrintData())
self.print_data=data.GetPrintData() #updates print_data
finally:
dlg.Destroy()
def Printout(self, paper=None):
"""Print current plot."""
if paper != None:
self.print_data.SetPaperId(paper)
pdd = wx.PrintDialogData()
pdd.SetPrintData(self.print_data)
printer = wx.Printer(pdd)
out = plot_printout(self)
print_ok = printer.Print(self.parent, out)
if print_ok:
self.print_data = printer.GetPrintDialogData().GetPrintData()
out.Destroy()
def PrintPreview(self):
"""Print-preview current plot."""
printout = plot_printout(self)
printout2 = plot_printout(self)
self.preview = wx.PrintPreview(printout, printout2, self.print_data)
if not self.preview.Ok():
wx.MessageDialog(self, "Print Preview failed.\n" \
"Check that default printer is configured\n", \
"Print error", wx.OK|wx.CENTRE).ShowModal()
self.preview.SetZoom(30)
#search up tree to find frame instance
frameInst= self
while not isinstance(frameInst, wx.Frame):
frameInst= frameInst.GetParent()
frame = wx.PreviewFrame(self.preview, frameInst, "Preview")
frame.Initialize()
frame.SetPosition(self.GetPosition())
frame.SetSize((500,400))
frame.Centre(wx.BOTH)
frame.Show(True)
def SetFontSizeAxis(self, point= 10):
"""Set the tick and axis label font size (default is 10 point)"""
self._fontSizeAxis= point
def GetFontSizeAxis(self):
"""Get current tick and axis label font size in points"""
return self._fontSizeAxis
def SetFontSizeTitle(self, point= 15):
"""Set Title font size (default is 15 point)"""
self._fontSizeTitle= point
def GetFontSizeTitle(self):
"""Get current Title font size in points"""
return self._fontSizeTitle
def SetFontSizeLegend(self, point= 7):
"""Set Legend font size (default is 7 point)"""
self._fontSizeLegend= point
def GetFontSizeLegend(self):
"""Get current Legend font size in points"""
return self._fontSizeLegend
def SetEnableZoom(self, value):
"""Set True to enable zooming."""
if value not in [True,False]:
raise TypeError, "Value should be True or False"
self._zoomEnabled= value
def GetEnableZoom(self):
"""True if zooming enabled."""
return self._zoomEnabled
def SetEnableGrid(self, value):
"""Set True to enable grid."""
if value not in [True,False]:
raise TypeError, "Value should be True or False"
self._gridEnabled= value
self.Redraw()
def GetEnableGrid(self):
"""True if grid enabled."""
return self._gridEnabled
def SetEnableLegend(self, value):
"""Set True to enable legend."""
if value not in [True,False]:
raise TypeError, "Value should be True or False"
self._legendEnabled= value
self.Redraw()
def GetEnableLegend(self):
"""True if Legend enabled."""
return self._legendEnabled
def Reset(self):
"""Unzoom the plot."""
if self.last_draw is not None:
self.Draw(self.last_draw[0])
def ScrollRight(self, units):
"""Move view right number of axis units."""
if self.last_draw is not None:
graphics, xAxis, yAxis= self.last_draw
xAxis= (xAxis[0]+units, xAxis[1]+units)
self.Draw(graphics,xAxis,yAxis)
def ScrollUp(self, units):
"""Move view up number of axis units."""
if self.last_draw is not None:
graphics, xAxis, yAxis= self.last_draw
yAxis= (yAxis[0]+units, yAxis[1]+units)
self.Draw(graphics,xAxis,yAxis)
def GetXY(self,event):
"""Takes a mouse event and returns the XY user axis values."""
screenPos= Numeric.array( event.GetPosition())
x,y= (screenPos-self._pointShift)/self._pointScale
return x,y
def SetXSpec(self, type= 'auto'):
"""xSpec- defines x axis type. Can be 'none', 'min' or 'auto'
where:
'none' - shows no axis or tick mark values
'min' - shows min bounding box values
'auto' - rounds axis range to sensible values
"""
self._xSpec= type
def SetYSpec(self, type= 'auto'):
"""ySpec- defines x axis type. Can be 'none', 'min' or 'auto'
where:
'none' - shows no axis or tick mark values
'min' - shows min bounding box values
'auto' - rounds axis range to sensible values
"""
self._ySpec= type
def GetXSpec(self):
"""Returns current XSpec for axis"""
return self._xSpec
def GetYSpec(self):
"""Returns current YSpec for axis"""
return self._ySpec
def GetXMaxRange(self):
"""Returns (minX, maxX) x-axis range for displayed graph"""
graphics= self.last_draw[0]
p1, p2 = graphics.boundingBox() #min, max points of graphics
xAxis = self._axisInterval(self._xSpec, p1[0], p2[0]) #in user units
return xAxis
def GetYMaxRange(self):
"""Returns (minY, maxY) y-axis range for displayed graph"""
graphics= self.last_draw[0]
p1, p2 = graphics.boundingBox() #min, max points of graphics
yAxis = self._axisInterval(self._ySpec, p1[1], p2[1])
return yAxis
def GetXCurrentRange(self):
"""Returns (minX, maxX) x-axis for currently displayed portion of graph"""
return self.last_draw[1]
def GetYCurrentRange(self):
"""Returns (minY, maxY) y-axis for currently displayed portion of graph"""
return self.last_draw[2]
def Draw(self, graphics, xAxis = None, yAxis = None, dc = None):
"""Draw objects in graphics with specified x and y axis.
graphics- instance of PlotGraphics with list of PolyXXX objects
xAxis - tuple with (min, max) axis range to view
yAxis - same as xAxis
dc - drawing context - doesn't have to be specified.
If it's not, the offscreen buffer is used
"""
#check Axis is either tuple or none
if type(xAxis) not in [type(None),tuple]:
raise TypeError, "xAxis should be None or (minX,maxX)"
if type(yAxis) not in [type(None),tuple]:
raise TypeError, "yAxis should be None or (minY,maxY)"
#check case for axis = (a,b) where a==b caused by improper zooms
if xAxis != None:
if xAxis[0] == xAxis[1]:
return
if yAxis != None:
if yAxis[0] == yAxis[1]:
return
if dc == None:
# allows using floats for certain functions
dc = FloatDCWrapper(wx.BufferedDC(wx.ClientDC(self), self._Buffer))
dc.Clear()
dc.BeginDrawing()
#dc.Clear()
#set font size for every thing but title and legend
dc.SetFont(self._getFont(self._fontSizeAxis))
#sizes axis to axis type, create lower left and upper right corners of plot
if xAxis == None or yAxis == None:
#One or both axis not specified in Draw
p1, p2 = graphics.boundingBox() #min, max points of graphics
if xAxis == None:
xAxis = self._axisInterval(self._xSpec, p1[0], p2[0]) #in user units
if yAxis == None:
yAxis = self._axisInterval(self._ySpec, p1[1], p2[1])
#Adjust bounding box for axis spec
p1[0],p1[1] = xAxis[0], yAxis[0] #lower left corner user scale (xmin,ymin)
p2[0],p2[1] = xAxis[1], yAxis[1] #upper right corner user scale (xmax,ymax)
else:
#Both axis specified in Draw
p1= Numeric.array([xAxis[0], yAxis[0]]) #lower left corner user scale (xmin,ymin)
p2= Numeric.array([xAxis[1], yAxis[1]]) #upper right corner user scale (xmax,ymax)
self.last_draw = (graphics, xAxis, yAxis) #saves most recient values
#Get ticks and textExtents for axis if required
if self._xSpec is not 'none':
xticks = self._ticks(xAxis[0], xAxis[1])
xTextExtent = dc.GetTextExtent(xticks[-1][1])#w h of x axis text last number on axis
else:
xticks = None
xTextExtent= (0,0) #No text for ticks
if self._ySpec is not 'none':
yticks = self._ticks(yAxis[0], yAxis[1])
yTextExtentBottom= dc.GetTextExtent(yticks[0][1])
yTextExtentTop = dc.GetTextExtent(yticks[-1][1])
yTextExtent= (max(yTextExtentBottom[0],yTextExtentTop[0]),
max(yTextExtentBottom[1],yTextExtentTop[1]))
else:
yticks = None
yTextExtent= (0,0) #No text for ticks
#TextExtents for Title and Axis Labels
titleWH, xLabelWH, yLabelWH= self._titleLablesWH(dc, graphics)
#TextExtents for Legend
legendBoxWH, legendSymExt, legendTextExt = self._legendWH(dc, graphics)
#room around graph area
rhsW= max(xTextExtent[0], legendBoxWH[0]) #use larger of number width or legend width
lhsW= yTextExtent[0]+ yLabelWH[1]
bottomH= max(xTextExtent[1], yTextExtent[1]/2.)+ xLabelWH[1]
topH= yTextExtent[1]/2. + titleWH[1]
textSize_scale= Numeric.array([rhsW+lhsW,bottomH+topH]) #make plot area smaller by text size
textSize_shift= Numeric.array([lhsW, bottomH]) #shift plot area by this amount
#drawing title and labels text
dc.SetFont(self._getFont(self._fontSizeTitle))
titlePos= (self.plotbox_origin[0]+ lhsW + (self.plotbox_size[0]-lhsW-rhsW)/2.- titleWH[0]/2.,
self.plotbox_origin[1]- self.plotbox_size[1])
dc.DrawText(graphics.getTitle(),titlePos[0],titlePos[1])
dc.SetFont(self._getFont(self._fontSizeAxis))
xLabelPos= (self.plotbox_origin[0]+ lhsW + (self.plotbox_size[0]-lhsW-rhsW)/2.- xLabelWH[0]/2.,
self.plotbox_origin[1]- xLabelWH[1])
dc.DrawText(graphics.getXLabel(),xLabelPos[0],xLabelPos[1])
yLabelPos= (self.plotbox_origin[0],
self.plotbox_origin[1]- bottomH- (self.plotbox_size[1]-bottomH-topH)/2.+ yLabelWH[0]/2.)
if graphics.getYLabel(): #bug fix for Linux
dc.DrawRotatedText(graphics.getYLabel(),yLabelPos[0],yLabelPos[1],90)
#drawing legend makers and text
if self._legendEnabled:
self._drawLegend(dc,graphics,rhsW,topH,legendBoxWH, legendSymExt, legendTextExt)
#allow for scaling and shifting plotted points
scale = (self.plotbox_size-textSize_scale) / (p2-p1)* Numeric.array((1,-1))
shift = -p1*scale + self.plotbox_origin + textSize_shift * Numeric.array((1,-1))
self._pointScale= scale #make available for mouse events
self._pointShift= shift
self._drawAxes(dc, p1, p2, scale, shift, xticks, yticks)
graphics.scaleAndShift(scale, shift)
graphics.setPrinterScale(self.printerScale) #thicken up lines and markers if printing
#set clipping area so drawing does not occur outside axis box
ptx,pty,rectWidth,rectHeight= self._point2ClientCoord(p1, p2)
dc.SetClippingRegion(ptx,pty,rectWidth,rectHeight)
#Draw the lines and markers
#start = time.clock()
graphics.draw(dc)
#print "entire graphics drawing took: %f second"%(time.clock() - start)
#remove the clipping region
dc.DestroyClippingRegion()
dc.EndDrawing()
def Redraw(self, dc= None):
"""Redraw the existing plot."""
if self.last_draw is not None:
graphics, xAxis, yAxis= self.last_draw
self.Draw(graphics,xAxis,yAxis,dc)
def Clear(self):
"""Erase the window."""
dc = wx.BufferedDC(wx.ClientDC(self), self._Buffer)
dc.Clear()
self.last_draw = None
def Zoom(self, Center, Ratio):
""" Zoom on the plot
Centers on the X,Y coords given in Center
Zooms by the Ratio = (Xratio, Yratio) given
"""
x,y = Center
if self.last_draw != None:
(graphics, xAxis, yAxis) = self.last_draw
w = (xAxis[1] - xAxis[0]) * Ratio[0]
h = (yAxis[1] - yAxis[0]) * Ratio[1]
xAxis = ( x - w/2, x + w/2 )
yAxis = ( y - h/2, y + h/2 )
self.Draw(graphics, xAxis, yAxis)
# event handlers **********************************
def OnMotion(self, event):
if self._zoomEnabled and event.LeftIsDown():
if self._hasDragged:
self._drawRubberBand(self._zoomCorner1, self._zoomCorner2) #remove old
else:
self._hasDragged= True
self._zoomCorner2[0], self._zoomCorner2[1] = self.GetXY(event)
self._drawRubberBand(self._zoomCorner1, self._zoomCorner2) #add new
def OnMouseLeftDown(self,event):
self._zoomCorner1[0], self._zoomCorner1[1]= self.GetXY(event)
def OnMouseLeftUp(self, event):
if self._zoomEnabled:
if self._hasDragged == True:
self._drawRubberBand(self._zoomCorner1, self._zoomCorner2) #remove old
self._zoomCorner2[0], self._zoomCorner2[1]= self.GetXY(event)
self._hasDragged = False #reset flag
minX, minY= Numeric.minimum( self._zoomCorner1, self._zoomCorner2)
maxX, maxY= Numeric.maximum( self._zoomCorner1, self._zoomCorner2)
if self.last_draw != None:
self.Draw(self.last_draw[0], xAxis = (minX,maxX), yAxis = (minY,maxY), dc = None)
#else: # A box has not been drawn, zoom in on a point
## this interfered with the double click, so I've disables it.
# X,Y = self.GetXY(event)
# self.Zoom( (X,Y), (self._zoomInFactor,self._zoomInFactor) )
def OnMouseDoubleClick(self,event):
if self._zoomEnabled:
self.Reset()
def OnMouseRightDown(self,event):
if self._zoomEnabled:
X,Y = self.GetXY(event)
self.Zoom( (X,Y), (self._zoomOutFactor, self._zoomOutFactor) )
def OnPaint(self, event):
# All that is needed here is to draw the buffer to screen
dc = wx.BufferedPaintDC(self, self._Buffer)
def OnSize(self,event):
# The Buffer init is done here, to make sure the buffer is always
# the same size as the Window
Size = self.GetClientSizeTuple()
# Make new offscreen bitmap: this bitmap will always have the
# current drawing in it, so it can be used to save the image to
# a file, or whatever.
self._Buffer = wx.EmptyBitmap(Size[0],Size[1])
self._setSize()
if self.last_draw is None:
self.Clear()
else:
graphics, xSpec, ySpec = self.last_draw
self.Draw(graphics,xSpec,ySpec)
#Private Methods **************************************************
def _setSize(self, width=None, height=None):
"""DC width and height."""
if width == None:
(self.width,self.height) = self.GetClientSizeTuple()
else:
self.width, self.height= width,height
self.plotbox_size = 0.97*Numeric.array([self.width, self.height])
xo = 0.5*(self.width-self.plotbox_size[0])
yo = self.height-0.5*(self.height-self.plotbox_size[1])
self.plotbox_origin = Numeric.array([xo, yo])
def _setPrinterScale(self, scale):
"""Used to thicken lines and increase marker size for print out."""
#line thickness on printer is very thin at 600 dot/in. Markers small
self.printerScale= scale
def _printDraw(self, printDC):
"""Used for printing."""
if self.last_draw != None:
graphics, xSpec, ySpec= self.last_draw
self.Draw(graphics,xSpec,ySpec,printDC)
def _drawLegend(self,dc,graphics,rhsW,topH,legendBoxWH, legendSymExt, legendTextExt):
"""Draws legend symbols and text"""
#top right hand corner of graph box is ref corner
trhc= self.plotbox_origin+ (self.plotbox_size-[rhsW,topH])*[1,-1]
legendLHS= .091* legendBoxWH[0] #border space between legend sym and graph box
lineHeight= max(legendSymExt[1], legendTextExt[1]) * 1.1 #1.1 used as space between lines
dc.SetFont(self._getFont(self._fontSizeLegend))
for i in range(len(graphics)):
o = graphics[i]
s= i*lineHeight
if isinstance(o,PolyMarker):
#draw marker with legend
pnt= (trhc[0]+legendLHS+legendSymExt[0]/2., trhc[1]+s+lineHeight/2.)
o.draw(dc, self.printerScale, coord= Numeric.array([pnt]))
elif isinstance(o,PolyLine):
#draw line with legend
pnt1= (trhc[0]+legendLHS, trhc[1]+s+lineHeight/2.)
pnt2= (trhc[0]+legendLHS+legendSymExt[0], trhc[1]+s+lineHeight/2.)
o.draw(dc, self.printerScale, coord= Numeric.array([pnt1,pnt2]))
else:
raise TypeError, "object is neither PolyMarker or PolyLine instance"
#draw legend txt
pnt= (trhc[0]+legendLHS+legendSymExt[0], trhc[1]+s+lineHeight/2.-legendTextExt[1]/2)
dc.DrawText(o.getLegend(),pnt[0],pnt[1])
dc.SetFont(self._getFont(self._fontSizeAxis)) #reset
def _titleLablesWH(self, dc, graphics):
"""Draws Title and labels and returns width and height for each"""
#TextExtents for Title and Axis Labels
dc.SetFont(self._getFont(self._fontSizeTitle))
title= graphics.getTitle()
titleWH= dc.GetTextExtent(title)
dc.SetFont(self._getFont(self._fontSizeAxis))
xLabel, yLabel= graphics.getXLabel(),graphics.getYLabel()
xLabelWH= dc.GetTextExtent(xLabel)
yLabelWH= dc.GetTextExtent(yLabel)
return titleWH, xLabelWH, yLabelWH
def _legendWH(self, dc, graphics):
"""Returns the size in screen units for legend box"""
if self._legendEnabled != True:
legendBoxWH= symExt= txtExt= (0,0)
else:
#find max symbol size
symExt= graphics.getSymExtent(self.printerScale)
#find max legend text extent
dc.SetFont(self._getFont(self._fontSizeLegend))
txtList= graphics.getLegendNames()
txtExt= dc.GetTextExtent(txtList[0])
for txt in graphics.getLegendNames()[1:]:
txtExt= Numeric.maximum(txtExt,dc.GetTextExtent(txt))
maxW= symExt[0]+txtExt[0]
maxH= max(symExt[1],txtExt[1])
#padding .1 for lhs of legend box and space between lines
maxW= maxW* 1.1
maxH= maxH* 1.1 * len(txtList)
dc.SetFont(self._getFont(self._fontSizeAxis))
legendBoxWH= (maxW,maxH)
return (legendBoxWH, symExt, txtExt)
def _drawRubberBand(self, corner1, corner2):
"""Draws/erases rect box from corner1 to corner2"""
ptx,pty,rectWidth,rectHeight= self._point2ClientCoord(corner1, corner2)
#draw rectangle
dc = wx.ClientDC( self )
dc.BeginDrawing()
dc.SetPen(wx.Pen(wx.BLACK))
dc.SetBrush(wx.Brush( wx.WHITE, wx.TRANSPARENT ) )
dc.SetLogicalFunction(wx.INVERT)
dc.DrawRectangle( ptx,pty,rectWidth,rectHeight)
dc.SetLogicalFunction(wx.COPY)
dc.EndDrawing()
def _getFont(self,size):
"""Take font size, adjusts if printing and returns wxFont"""
s = size*self.printerScale
of = self.GetFont()
#Linux speed up to get font from cache rather than X font server
key = (int(s), of.GetFamily (), of.GetStyle (), of.GetWeight ())
font = self._fontCache.get (key, None)
if font:
return font # yeah! cache hit
else:
font = wx.Font(int(s), of.GetFamily(), of.GetStyle(), of.GetWeight())
self._fontCache[key] = font
return font
def _point2ClientCoord(self, corner1, corner2):
"""Converts user point coords to client screen int coords x,y,width,height"""
c1= Numeric.array(corner1)
c2= Numeric.array(corner2)
#convert to screen coords
pt1= c1*self._pointScale+self._pointShift
pt2= c2*self._pointScale+self._pointShift
#make height and width positive
pul= Numeric.minimum(pt1,pt2) #Upper left corner
plr= Numeric.maximum(pt1,pt2) #Lower right corner
rectWidth, rectHeight= plr-pul
ptx,pty= pul
return int(ptx),int(pty),int(rectWidth),int(rectHeight) #return ints
def _axisInterval(self, spec, lower, upper):
"""Returns sensible axis range for given spec"""
if spec == 'none' or spec == 'min':
if lower == upper:
return lower-0.5, upper+0.5
else:
return lower, upper
elif spec == 'auto':
range = upper-lower
if range == 0.:
return lower-0.5, upper+0.5
log = Numeric.log10(range)
power = Numeric.floor(log)
fraction = log-power
if fraction <= 0.05:
power = power-1
grid = 10.**power
lower = lower - lower % grid
mod = upper % grid
if mod != 0:
upper = upper - mod + grid
return lower, upper
elif type(spec) == type(()):
lower, upper = spec
if lower <= upper:
return lower, upper
else:
return upper, lower
else:
raise ValueError, str(spec) + ': illegal axis specification'
def _drawAxes(self, dc, p1, p2, scale, shift, xticks, yticks):
penWidth= self.printerScale #increases thickness for printing only
dc.SetPen(wx.Pen(wx.NamedColour('BLACK'),int(penWidth)))
#set length of tick marks--long ones make grid
if self._gridEnabled:
x,y,width,height= self._point2ClientCoord(p1,p2)
yTickLength= width/2.0 +1
xTickLength= height/2.0 +1
else:
yTickLength= 3 * self.printerScale #lengthens lines for printing
xTickLength= 3 * self.printerScale
if self._xSpec is not 'none':
lower, upper = p1[0],p2[0]
text = 1
for y, d in [(p1[1], -xTickLength), (p2[1], xTickLength)]: #miny, maxy and tick lengths
a1 = scale*Numeric.array([lower, y])+shift
a2 = scale*Numeric.array([upper, y])+shift
dc.DrawLine(a1[0],a1[1],a2[0],a2[1]) #draws upper and lower axis line
for x, label in xticks:
pt = scale*Numeric.array([x, y])+shift
dc.DrawLine(pt[0],pt[1],pt[0],pt[1] + d) #draws tick mark d units
if text:
dc.DrawText(label,pt[0],pt[1])
text = 0 #axis values not drawn on top side
if self._ySpec is not 'none':
lower, upper = p1[1],p2[1]
text = 1
h = dc.GetCharHeight()
for x, d in [(p1[0], -yTickLength), (p2[0], yTickLength)]:
a1 = scale*Numeric.array([x, lower])+shift
a2 = scale*Numeric.array([x, upper])+shift
dc.DrawLine(a1[0],a1[1],a2[0],a2[1])
for y, label in yticks:
pt = scale*Numeric.array([x, y])+shift
dc.DrawLine(pt[0],pt[1],pt[0]-d,pt[1])
if text:
dc.DrawText(label,pt[0]-dc.GetTextExtent(label)[0],
pt[1]-0.5*h)
text = 0 #axis values not drawn on right side
def _ticks(self, lower, upper):
ideal = (upper-lower)/7.
log = Numeric.log10(ideal)
power = Numeric.floor(log)
fraction = log-power
factor = 1.
error = fraction
for f, lf in self._multiples:
e = Numeric.fabs(fraction-lf)
if e < error:
error = e
factor = f
grid = factor * 10.**power
if power > 4 or power < -4:
format = '%+7.1e'
elif power >= 0:
digits = max(1, int(power))
format = '%' + `digits`+'.0f'
else:
digits = -int(power)
format = '%'+`digits+2`+'.'+`digits`+'f'
ticks = []
t = -grid*Numeric.floor(-lower/grid)
while t <= upper:
ticks.append( (t, format % (t,)) )
t = t + grid
return ticks
_multiples = [(2., Numeric.log10(2.)), (5., Numeric.log10(5.))]
#-------------------------------------------------------------------------------
#Used to layout the printer page
class plot_printout(wx.Printout):
"""Controls how the plot is made in printing and previewing"""
# Do not change method names in this class,
# we have to override wxPrintout methods here!
def __init__(self, graph):
"""graph is instance of plotCanvas to be printed or previewed"""
wx.Printout.__init__(self)
self.graph = graph
def HasPage(self, page):
if page == 1:
return True
else:
return False
def GetPageInfo(self):
return (0, 1, 1, 1) #disable page numbers
def OnPrintPage(self, page):
dc = FloatDCWrapper(self.GetDC()) # allows using floats for certain functions
## print "PPI Printer",self.GetPPIPrinter()
## print "PPI Screen", self.GetPPIScreen()
## print "DC GetSize", dc.GetSize()
## print "GetPageSizePixels", self.GetPageSizePixels()
#Note PPIScreen does not give the correct number
#Calulate everything for printer and then scale for preview
PPIPrinter= self.GetPPIPrinter() #printer dots/inch (w,h)
#PPIScreen= self.GetPPIScreen() #screen dots/inch (w,h)
dcSize= dc.GetSizeTuple() #DC size
pageSize= self.GetPageSizePixels() #page size in terms of pixcels
clientDcSize= self.graph.GetClientSizeTuple()
#find what the margins are (mm)
margLeftSize,margTopSize= self.graph.pageSetupData.GetMarginTopLeft()
margRightSize, margBottomSize= self.graph.pageSetupData.GetMarginBottomRight()
#calculate offset and scale for dc
pixLeft= margLeftSize*PPIPrinter[0]/25.4 #mm*(dots/in)/(mm/in)
pixRight= margRightSize*PPIPrinter[0]/25.4
pixTop= margTopSize*PPIPrinter[1]/25.4
pixBottom= margBottomSize*PPIPrinter[1]/25.4
plotAreaW= pageSize[0]-(pixLeft+pixRight)
plotAreaH= pageSize[1]-(pixTop+pixBottom)
#ratio offset and scale to screen size if preview
if self.IsPreview():
ratioW= float(dcSize[0])/pageSize[0]
ratioH= float(dcSize[1])/pageSize[1]
pixLeft *= ratioW
pixTop *= ratioH
plotAreaW *= ratioW
plotAreaH *= ratioH
#rescale plot to page or preview plot area
self.graph._setSize(plotAreaW,plotAreaH)
#Set offset and scale
dc.SetDeviceOrigin(pixLeft,pixTop)
#Thicken up pens and increase marker size for printing
ratioW= float(plotAreaW)/clientDcSize[0]
ratioH= float(plotAreaH)/clientDcSize[1]
aveScale= (ratioW+ratioH)/2
self.graph._setPrinterScale(aveScale) #tickens up pens for printing
self.graph._printDraw(dc)
#rescale back to original
self.graph._setSize()
self.graph._setPrinterScale(1)
return True
# Hack to allow plotting real numbers for the methods listed.
# All others passed directly to DC.
# For Drawing it is used as
# dc = FloatDCWrapper(wx.BufferedDC(wx.ClientDC(self), self._Buffer))
# For printing is is used as
# dc = FloatDCWrapper(self.GetDC())
class FloatDCWrapper:
def __init__(self, aDC):
self.theDC = aDC
def DrawLine(self, x1,y1,x2,y2):
self.theDC.DrawLine(int(x1),int(y1),int(x2),int(y2))
def DrawText(self, txt, x, y):
self.theDC.DrawText(txt, int(x), int(y))
def DrawRotatedText(self, txt, x, y, angle):
self.theDC.DrawRotatedText(txt, int(x), int(y), angle)
def SetClippingRegion(self, x, y, width, height):
self.theDC.SetClippingRegion(int(x), int(y), int(width), int(height))
def SetDeviceOrigin(self, x, y):
self.theDC.SetDeviceOrigin(int(x), int(y))
def __getattr__(self, name):
return getattr(self.theDC, name)
#---------------------------------------------------------------------------
# if running standalone...
#
# ...a sample implementation using the above
#
def __test():
from wxPython.lib.dialogs import wxScrolledMessageDialog
def _draw1Objects():
# 100 points sin function, plotted as green circles
data1 = 2.*Numeric.pi*Numeric.arange(200)/200.
data1.shape = (100, 2)
data1[:,1] = Numeric.sin(data1[:,0])
markers1 = PolyMarker(data1, legend='Green Markers', colour='green', marker='circle',size=1)
# 50 points cos function, plotted as red line
data1 = 2.*Numeric.pi*Numeric.arange(100)/100.
data1.shape = (50,2)
data1[:,1] = Numeric.cos(data1[:,0])
lines = PolyLine(data1, legend= 'Red Line', colour='red')
# A few more points...
pi = Numeric.pi
markers2 = PolyMarker([(0., 0.), (pi/4., 1.), (pi/2, 0.),
(3.*pi/4., -1)], legend='Cross Legend', colour='blue',
marker='cross')
return PlotGraphics([markers1, lines, markers2],"Graph Title", "X Axis", "Y Axis")
def _draw2Objects():
# 100 points sin function, plotted as green dots
data1 = 2.*Numeric.pi*Numeric.arange(200)/200.
data1.shape = (100, 2)
data1[:,1] = Numeric.sin(data1[:,0])
line1 = PolyLine(data1, legend='Green Line', colour='green', width=6, style=wx.DOT)
# 50 points cos function, plotted as red dot-dash
data1 = 2.*Numeric.pi*Numeric.arange(100)/100.
data1.shape = (50,2)
data1[:,1] = Numeric.cos(data1[:,0])
line2 = PolyLine(data1, legend='Red Line', colour='red', width=3, style= wx.DOT_DASH)
# A few more points...
pi = Numeric.pi
markers1 = PolyMarker([(0., 0.), (pi/4., 1.), (pi/2, 0.),
(3.*pi/4., -1)], legend='Cross Hatch Square', colour='blue', width= 3, size= 6,
fillcolour= 'red', fillstyle= wx.CROSSDIAG_HATCH,
marker='square')
return PlotGraphics([markers1, line1, line2], "Big Markers with Different Line Styles")
def _draw3Objects():
markerList= ['circle', 'dot', 'square', 'triangle', 'triangle_down',
'cross', 'plus', 'circle']
m=[]
for i in range(len(markerList)):
m.append(PolyMarker([(2*i+.5,i+.5)], legend=markerList[i], colour='blue',
marker=markerList[i]))
return PlotGraphics(m, "Selection of Markers", "Minimal Axis", "No Axis")
def _draw4Objects():
# 25,000 point line
data1 = Numeric.arange(5e5,1e6,10)
data1.shape = (25000, 2)
line1 = PolyLine(data1, legend='Wide Line', colour='green', width=5)
# A few more points...
markers2 = PolyMarker(data1, legend='Square', colour='blue',
marker='square')
return PlotGraphics([line1, markers2], "25,000 Points", "Value X", "")
def _draw5Objects():
# Empty graph with axis defined but no points/lines
points=[]
line1 = PolyLine(points, legend='Wide Line', colour='green', width=5)
return PlotGraphics([line1], "Empty Plot With Just Axes", "Value X", "Value Y")
class AppFrame(wx.Frame):
def __init__(self, parent, id, title):
wx.Frame.__init__(self, parent, id, title,
wx.DefaultPosition, wx.Size(600, 400))
# Now Create the menu bar and items
self.mainmenu = wx.MenuBar()
menu = wx.Menu()
menu.Append(200, 'Page Setup...', 'Setup the printer page')
wx.EVT_MENU(self, 200, self.OnFilePageSetup)
menu.Append(201, 'Print Preview...', 'Show the current plot on page')
wx.EVT_MENU(self, 201, self.OnFilePrintPreview)
menu.Append(202, 'Print...', 'Print the current plot')
wx.EVT_MENU(self, 202, self.OnFilePrint)
menu.Append(203, 'Save Plot...', 'Save current plot')
wx.EVT_MENU(self, 203, self.OnSaveFile)
menu.Append(205, 'E&xit', 'Enough of this already!')
wx.EVT_MENU(self, 205, self.OnFileExit)
self.mainmenu.Append(menu, '&File')
menu = wx.Menu()
menu.Append(206, 'Draw1', 'Draw plots1')
wx.EVT_MENU(self,206,self.OnPlotDraw1)
menu.Append(207, 'Draw2', 'Draw plots2')
wx.EVT_MENU(self,207,self.OnPlotDraw2)
menu.Append(208, 'Draw3', 'Draw plots3')
wx.EVT_MENU(self,208,self.OnPlotDraw3)
menu.Append(209, 'Draw4', 'Draw plots4')
wx.EVT_MENU(self,209,self.OnPlotDraw4)
menu.Append(210, 'Draw5', 'Draw plots5')
wx.EVT_MENU(self,210,self.OnPlotDraw5)
menu.Append(211, '&Redraw', 'Redraw plots')
wx.EVT_MENU(self,211,self.OnPlotRedraw)
menu.Append(212, '&Clear', 'Clear canvas')
wx.EVT_MENU(self,212,self.OnPlotClear)
menu.Append(213, '&Scale', 'Scale canvas')
wx.EVT_MENU(self,213,self.OnPlotScale)
menu.Append(214, 'Enable &Zoom', 'Enable Mouse Zoom', kind=wx.ITEM_CHECK)
wx.EVT_MENU(self,214,self.OnEnableZoom)
menu.Append(215, 'Enable &Grid', 'Turn on Grid', kind=wx.ITEM_CHECK)
wx.EVT_MENU(self,215,self.OnEnableGrid)
menu.Append(220, 'Enable &Legend', 'Turn on Legend', kind=wx.ITEM_CHECK)
wx.EVT_MENU(self,220,self.OnEnableLegend)
menu.Append(225, 'Scroll Up 1', 'Move View Up 1 Unit')
wx.EVT_MENU(self,225,self.OnScrUp)
menu.Append(230, 'Scroll Rt 2', 'Move View Right 2 Units')
wx.EVT_MENU(self,230,self.OnScrRt)
menu.Append(235, '&Plot Reset', 'Reset to original plot')
wx.EVT_MENU(self,235,self.OnReset)
self.mainmenu.Append(menu, '&Plot')
menu = wx.Menu()
menu.Append(300, '&About', 'About this thing...')
wx.EVT_MENU(self, 300, self.OnHelpAbout)
self.mainmenu.Append(menu, '&Help')
self.SetMenuBar(self.mainmenu)
# A status bar to tell people what's happening
self.CreateStatusBar(1)
self.client = PlotCanvas(self)
#Create mouse event for showing cursor coords in status bar
wx.EVT_LEFT_DOWN(self.client, self.OnMouseLeftDown)
def OnMouseLeftDown(self,event):
s= "Left Mouse Down at Point: (%.4f, %.4f)" % self.client.GetXY(event)
self.SetStatusText(s)
event.Skip()
def OnFilePageSetup(self, event):
self.client.PageSetup()
def OnFilePrintPreview(self, event):
self.client.PrintPreview()
def OnFilePrint(self, event):
self.client.Printout()
def OnSaveFile(self, event):
self.client.SaveFile()
def OnFileExit(self, event):
self.Close()
def OnPlotDraw1(self, event):
self.resetDefaults()
self.client.Draw(_draw1Objects())
def OnPlotDraw2(self, event):
self.resetDefaults()
self.client.Draw(_draw2Objects())
def OnPlotDraw3(self, event):
self.resetDefaults()
self.client.SetFont(wx.Font(10,wx.SCRIPT,wx.NORMAL,wx.NORMAL))
self.client.SetFontSizeAxis(20)
self.client.SetFontSizeLegend(12)
self.client.SetXSpec('min')
self.client.SetYSpec('none')
self.client.Draw(_draw3Objects())
def OnPlotDraw4(self, event):
self.resetDefaults()
drawObj= _draw4Objects()
self.client.Draw(drawObj)
## #profile
## start = time.clock()
## for x in range(10):
## self.client.Draw(drawObj)
## print "10 plots of Draw4 took: %f sec."%(time.clock() - start)
## #profile end
def OnPlotDraw5(self, event):
#Empty plot with just axes
self.resetDefaults()
drawObj= _draw5Objects()
#make the axis X= (0,5), Y=(0,10)
#(default with None is X= (-1,1), Y= (-1,1))
self.client.Draw(drawObj, xAxis= (0,5), yAxis= (0,10))
def OnPlotRedraw(self,event):
self.client.Redraw()
def OnPlotClear(self,event):
self.client.Clear()
def OnPlotScale(self, event):
if self.client.last_draw != None:
graphics, xAxis, yAxis= self.client.last_draw
self.client.Draw(graphics,(1,3.05),(0,1))
def OnEnableZoom(self, event):
self.client.SetEnableZoom(event.IsChecked())
def OnEnableGrid(self, event):
self.client.SetEnableGrid(event.IsChecked())
def OnEnableLegend(self, event):
self.client.SetEnableLegend(event.IsChecked())
def OnScrUp(self, event):
self.client.ScrollUp(1)
def OnScrRt(self,event):
self.client.ScrollRight(2)
def OnReset(self,event):
self.client.Reset()
def OnHelpAbout(self, event):
about = wxScrolledMessageDialog(self, __doc__, "About...")
about.ShowModal()
def resetDefaults(self):
"""Just to reset the fonts back to the PlotCanvas defaults"""
self.client.SetFont(wx.Font(10,wx.SWISS,wx.NORMAL,wx.NORMAL))
self.client.SetFontSizeAxis(10)
self.client.SetFontSizeLegend(7)
self.client.SetXSpec('auto')
self.client.SetYSpec('auto')
class MyApp(wx.App):
def OnInit(self):
frame = AppFrame(None, -1, "wxPlotCanvas")
frame.Show(True)
self.SetTopWindow(frame)
return True
app = MyApp(0)
app.MainLoop()
if __name__ == '__main__':
__test()
| nagyistoce/devide | external/wxPyPlot.py | Python | bsd-3-clause | 57,752 | 0.015844 |
#!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from pool import Pool
def Run(x):
if x == 10:
raise Exception("Expected exception triggered by test.")
return x
class PoolTest(unittest.TestCase):
def testNormal(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 10)]):
results.add(result)
self.assertEquals(set(range(0, 10)), results)
def testException(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 12)]):
# Item 10 will not appear in results due to an internal exception.
results.add(result)
expect = set(range(0, 12))
expect.remove(10)
self.assertEquals(expect, results)
def testAdd(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 10)]):
results.add(result)
if result < 30:
pool.add([result + 20])
self.assertEquals(set(range(0, 10) + range(20, 30) + range(40, 50)),
results)
| CTSRD-SOAAP/chromium-42.0.2311.135 | v8/tools/testrunner/local/pool_unittest.py | Python | bsd-3-clause | 1,222 | 0.00982 |
import logging
import urllib
from typing import Any, Dict, List, Mapping, Tuple, Union
import orjson
import requests
from django.conf import settings
from django.forms.models import model_to_dict
from django.utils.translation import gettext as _
from analytics.models import InstallationCount, RealmCount
from version import ZULIP_VERSION
from zerver.lib.exceptions import JsonableError
from zerver.lib.export import floatify_datetime_fields
from zerver.lib.outgoing_http import OutgoingSession
from zerver.models import RealmAuditLog
class PushBouncerSession(OutgoingSession):
def __init__(self) -> None:
super().__init__(role="push_bouncer", timeout=30)
class PushNotificationBouncerException(Exception):
pass
class PushNotificationBouncerRetryLaterError(JsonableError):
http_status_code = 502
def send_to_push_bouncer(
method: str,
endpoint: str,
post_data: Union[bytes, Mapping[str, Union[str, int, None, bytes]]],
extra_headers: Mapping[str, str] = {},
) -> Dict[str, object]:
"""While it does actually send the notice, this function has a lot of
code and comments around error handling for the push notifications
bouncer. There are several classes of failures, each with its own
potential solution:
* Network errors with requests.request. We raise an exception to signal
it to the callers.
* 500 errors from the push bouncer or other unexpected responses;
we don't try to parse the response, but do make clear the cause.
* 400 errors from the push bouncer. Here there are 2 categories:
Our server failed to connect to the push bouncer (should throw)
vs. client-side errors like an invalid token.
"""
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
url = urllib.parse.urljoin(
settings.PUSH_NOTIFICATION_BOUNCER_URL, "/api/v1/remotes/" + endpoint
)
api_auth = requests.auth.HTTPBasicAuth(settings.ZULIP_ORG_ID, settings.ZULIP_ORG_KEY)
headers = {"User-agent": f"ZulipServer/{ZULIP_VERSION}"}
headers.update(extra_headers)
try:
res = PushBouncerSession().request(
method, url, data=post_data, auth=api_auth, verify=True, headers=headers
)
except (
requests.exceptions.Timeout,
requests.exceptions.SSLError,
requests.exceptions.ConnectionError,
) as e:
raise PushNotificationBouncerRetryLaterError(
f"{e.__class__.__name__} while trying to connect to push notification bouncer"
)
if res.status_code >= 500:
# 500s should be resolved by the people who run the push
# notification bouncer service, and they'll get an appropriate
# error notification from the server. We raise an exception to signal
# to the callers that the attempt failed and they can retry.
error_msg = "Received 500 from push notification bouncer"
logging.warning(error_msg)
raise PushNotificationBouncerRetryLaterError(error_msg)
elif res.status_code >= 400:
# If JSON parsing errors, just let that exception happen
result_dict = orjson.loads(res.content)
msg = result_dict["msg"]
if "code" in result_dict and result_dict["code"] == "INVALID_ZULIP_SERVER":
# Invalid Zulip server credentials should email this server's admins
raise PushNotificationBouncerException(
_("Push notifications bouncer error: {}").format(msg)
)
else:
# But most other errors coming from the push bouncer
# server are client errors (e.g. never-registered token)
# and should be handled as such.
raise JsonableError(msg)
elif res.status_code != 200:
# Anything else is unexpected and likely suggests a bug in
# this version of Zulip, so we throw an exception that will
# email the server admins.
raise PushNotificationBouncerException(
f"Push notification bouncer returned unexpected status code {res.status_code}"
)
# If we don't throw an exception, it's a successful bounce!
return orjson.loads(res.content)
def send_json_to_push_bouncer(
method: str, endpoint: str, post_data: Mapping[str, object]
) -> Dict[str, object]:
return send_to_push_bouncer(
method,
endpoint,
orjson.dumps(post_data),
extra_headers={"Content-type": "application/json"},
)
REALMAUDITLOG_PUSHED_FIELDS = [
"id",
"realm",
"event_time",
"backfilled",
"extra_data",
"event_type",
]
def build_analytics_data(
realm_count_query: Any, installation_count_query: Any, realmauditlog_query: Any
) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]], List[Dict[str, Any]]]:
# We limit the batch size on the client side to avoid OOM kills timeouts, etc.
MAX_CLIENT_BATCH_SIZE = 10000
data = {}
data["analytics_realmcount"] = [
model_to_dict(row) for row in realm_count_query.order_by("id")[0:MAX_CLIENT_BATCH_SIZE]
]
data["analytics_installationcount"] = [
model_to_dict(row)
for row in installation_count_query.order_by("id")[0:MAX_CLIENT_BATCH_SIZE]
]
data["zerver_realmauditlog"] = [
model_to_dict(row, fields=REALMAUDITLOG_PUSHED_FIELDS)
for row in realmauditlog_query.order_by("id")[0:MAX_CLIENT_BATCH_SIZE]
]
floatify_datetime_fields(data, "analytics_realmcount")
floatify_datetime_fields(data, "analytics_installationcount")
floatify_datetime_fields(data, "zerver_realmauditlog")
return (
data["analytics_realmcount"],
data["analytics_installationcount"],
data["zerver_realmauditlog"],
)
def send_analytics_to_remote_server() -> None:
# first, check what's latest
try:
result = send_to_push_bouncer("GET", "server/analytics/status", {})
except PushNotificationBouncerRetryLaterError as e:
logging.warning(e.msg)
return
last_acked_realm_count_id = result["last_realm_count_id"]
last_acked_installation_count_id = result["last_installation_count_id"]
last_acked_realmauditlog_id = result["last_realmauditlog_id"]
(realm_count_data, installation_count_data, realmauditlog_data) = build_analytics_data(
realm_count_query=RealmCount.objects.filter(id__gt=last_acked_realm_count_id),
installation_count_query=InstallationCount.objects.filter(
id__gt=last_acked_installation_count_id
),
realmauditlog_query=RealmAuditLog.objects.filter(
event_type__in=RealmAuditLog.SYNCED_BILLING_EVENTS, id__gt=last_acked_realmauditlog_id
),
)
if len(realm_count_data) + len(installation_count_data) + len(realmauditlog_data) == 0:
return
request = {
"realm_counts": orjson.dumps(realm_count_data).decode(),
"installation_counts": orjson.dumps(installation_count_data).decode(),
"realmauditlog_rows": orjson.dumps(realmauditlog_data).decode(),
"version": orjson.dumps(ZULIP_VERSION).decode(),
}
# Gather only entries with an ID greater than last_realm_count_id
try:
send_to_push_bouncer("POST", "server/analytics", request)
except JsonableError as e:
logging.warning(e.msg)
| andersk/zulip | zerver/lib/remote_server.py | Python | apache-2.0 | 7,292 | 0.00192 |
import subprocess
import os
def start_service():
subprocess.Popen("ipy start_srv.py", stdout=subprocess.PIPE)
return 0
def close_service():
os.system("taskkill /im ipy.exe /f")
| QuentinJi/pyuiautomation | initial_work.py | Python | mit | 193 | 0 |
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import re
from cfgm_common import jsonutils as json
import time
import gevent
import disc_consts
import disc_exceptions
from datetime import datetime
from gevent.coros import BoundedSemaphore
import pycassa
import pycassa.util
from pycassa.system_manager import *
from pycassa.util import *
from pycassa.types import *
from sandesh_common.vns.constants import DISCOVERY_SERVER_KEYSPACE_NAME, \
CASSANDRA_DEFAULT_GC_GRACE_SECONDS
class DiscoveryCassandraClient(object):
_DISCOVERY_KEYSPACE_NAME = DISCOVERY_SERVER_KEYSPACE_NAME
_DISCOVERY_CF_NAME = 'discovery'
@classmethod
def get_db_info(cls):
db_info = [(cls._DISCOVERY_KEYSPACE_NAME, [cls._DISCOVERY_CF_NAME])]
return db_info
# end get_db_info
def __init__(self, module, cass_srv_list, reset_config=False,
max_retries=5, timeout=5, cass_credential=None):
self._disco_cf_name = 'discovery'
self._keyspace_name = 'DISCOVERY_SERVER'
self._reset_config = reset_config
self._credential = cass_credential
self._cassandra_init(cass_srv_list, max_retries, timeout)
self._debug = {
}
#end __init__
# Helper routines for cassandra
def _cassandra_init(self, server_list, max_retries, timeout):
# column name <table-name>, <id1>, <id2>
disco_cf_info = (self._disco_cf_name,
CompositeType(AsciiType(), UTF8Type(), UTF8Type()), AsciiType())
# 1. Ensure keyspace and schema/CFs exist
self._cassandra_ensure_keyspace(server_list, self._keyspace_name,
[disco_cf_info])
pool = pycassa.ConnectionPool(self._keyspace_name,
server_list, max_overflow=-1,
use_threadlocal=True, prefill=True,
pool_size=100, pool_timeout=120,
max_retries=max_retries, timeout=timeout,
credentials=self._credential)
rd_consistency = pycassa.cassandra.ttypes.ConsistencyLevel.ONE
wr_consistency = pycassa.cassandra.ttypes.ConsistencyLevel.ONE
self._disco_cf = pycassa.ColumnFamily(pool, self._disco_cf_name,
read_consistency_level = rd_consistency,
write_consistency_level = wr_consistency)
#end _cassandra_init
def _cassandra_ensure_keyspace(self, server_list,
keyspace_name, cf_info_list):
# Retry till cassandra is up
server_idx = 0
num_dbnodes = len(server_list)
connected = False
while not connected:
try:
cass_server = server_list[server_idx]
sys_mgr = SystemManager(cass_server,credentials=self._credential)
connected = True
except Exception as e:
# TODO do only for thrift.transport.TTransport.TTransportException
server_idx = (server_idx + 1) % num_dbnodes
time.sleep(3)
if self._reset_config:
try:
sys_mgr.drop_keyspace(keyspace_name)
except pycassa.cassandra.ttypes.InvalidRequestException as e:
# TODO verify only EEXISTS
print "Warning! " + str(e)
try:
# TODO replication_factor adjust?
sys_mgr.create_keyspace(keyspace_name, SIMPLE_STRATEGY,
{'replication_factor': str(num_dbnodes)})
except pycassa.cassandra.ttypes.InvalidRequestException as e:
# TODO verify only EEXISTS
print "Warning! " + str(e)
for cf_info in cf_info_list:
try:
(cf_name, comparator_type, validator_type) = cf_info
sys_mgr.create_column_family(keyspace_name, cf_name,
comparator_type = comparator_type, default_validation_class = validator_type)
sys_mgr.alter_column_family(keyspace_name, cf_name,
gc_grace_seconds=CASSANDRA_DEFAULT_GC_GRACE_SECONDS)
except pycassa.cassandra.ttypes.InvalidRequestException as e:
# TODO verify only EEXISTS
print "Warning! " + str(e)
#end _cassandra_ensure_keyspace
def get_debug_stats(self):
return self._debug
# end
"""
various column names
('client', client_id, 'client-entry')
('subscriber', service_id, client_id)
('subscription', client_id, service_id)
('service', service_id, 'service-entry')
"""
# decorator to catch connectivity error
def cass_error_handler(func):
def error_handler(*args, **kwargs):
try:
return func(*args,**kwargs)
except (pycassa.pool.AllServersUnavailable,
pycassa.pool.MaximumRetryException):
raise disc_exceptions.ServiceUnavailable()
except Exception as e:
raise
return error_handler
# return all publisher entries
@cass_error_handler
def service_entries(self, service_type = None):
col_name = ('service',)
try:
data = self._disco_cf.get_range(column_start = col_name, column_finish = col_name)
for service_type, services in data:
for col_name in services:
col_value = services[col_name]
entry = json.loads(col_value)
col_name = ('subscriber', entry['service_id'],)
entry['in_use'] = self._disco_cf.get_count(service_type,
column_start = col_name, column_finish = col_name)
yield(entry)
except pycassa.pool.AllServersUnavailable:
raise disc_exceptions.ServiceUnavailable()
#raise StopIteration
# return all clients
def subscriber_entries(self):
col_name = ('client',)
data = self._disco_cf.get_range(column_start = col_name, column_finish = col_name)
for service_type, clients in data:
for col_name in clients:
(foo, client_id, service_id) = col_name
# skip pure client entry
if service_id == disc_consts.CLIENT_TAG:
continue
yield((client_id, service_type))
# end
# return all subscriptions
@cass_error_handler
def get_all_clients(self, service_type=None, service_id=None):
r = []
entry_format_subscriber = False
if service_type and service_id:
# ('subscriber', service_id, client_id)
col_name = ('subscriber', service_id,)
try:
clients = self._disco_cf.get(service_type, column_start = col_name,
column_finish = col_name, column_count = disc_consts.MAX_COL)
except pycassa.NotFoundException:
return None
data = [(service_type, dict(clients))]
entry_format_subscriber = True
elif service_type:
col_name = ('client', )
try:
clients = self._disco_cf.get(service_type, column_start = col_name,
column_finish = col_name, column_count = disc_consts.MAX_COL)
except pycassa.NotFoundException:
return None
data = [(service_type, dict(clients))]
else:
col_name = ('client', )
try:
data = self._disco_cf.get_range(column_start=col_name, column_finish=col_name)
except pycassa.NotFoundException:
return None
for service_type, clients in data:
rr = []
for col_name in clients:
if entry_format_subscriber:
(foo, service_id, client_id) = col_name
else:
(foo, client_id, service_id) = col_name
# skip pure client entry
if service_id == disc_consts.CLIENT_TAG:
continue
entry_str = clients[col_name]
entry = json.loads(entry_str)
rr.append((service_type, client_id, service_id,
entry['mtime'], entry['ttl']))
# sort by modification time
# rr = sorted(rr, key=lambda entry: entry[3])
r.extend(rr)
return r
# end get_all_clients
# update publisher entry
@cass_error_handler
def update_service(self, service_type, service_id, entry):
self.insert_service(service_type, service_id, entry)
# end
@cass_error_handler
def insert_service(self, service_type, service_id, entry):
col_name = ('service', service_id, 'service-entry')
self._disco_cf.insert(service_type, {col_name : json.dumps(entry)})
# end insert_service
# forget service and subscribers
@cass_error_handler
def delete_service(self, entry):
self._disco_cf.remove(entry['service_type'],
columns = [('service', entry['service_id'], 'service-entry')])
#end delete_service
# return service entry
@cass_error_handler
def lookup_service(self, service_type, service_id=None):
try:
if service_id:
services = self._disco_cf.get(service_type, columns = [('service', service_id, 'service-entry')])
data = [json.loads(val) for col,val in services.items()]
entry = data[0]
col_name = ('subscriber', service_id,)
entry['in_use'] = self._disco_cf.get_count(service_type,
column_start = col_name, column_finish = col_name)
return entry
else:
col_name = ('service',)
services = self._disco_cf.get(service_type,
column_start = col_name, column_finish = col_name)
data = [json.loads(val) for col,val in services.items()]
for entry in data:
col_name = ('subscriber', entry['service_id'],)
entry['in_use'] = self._disco_cf.get_count(service_type,
column_start = col_name, column_finish = col_name)
return data
except pycassa.NotFoundException:
return None
# end lookup_service
@cass_error_handler
def query_service(self, service_type):
return self.lookup_service(service_type, service_id = None)
# end
# this is actually client create :-(
@cass_error_handler
def insert_client_data(self, service_type, client_id, blob):
col_name = ('client', client_id, disc_consts.CLIENT_TAG)
self._disco_cf.insert(service_type, {col_name : json.dumps(blob)})
# end insert_client_data
# insert a subscription (blob/ttl per service_type)
@cass_error_handler
def insert_client(self, service_type, service_id, client_id, blob, ttl):
col_val = json.dumps({'ttl': ttl, 'blob': blob, 'mtime': int(time.time())})
col_name = ('subscriber', service_id, client_id)
self._disco_cf.insert(service_type, {col_name : col_val},
ttl = ttl + disc_consts.TTL_EXPIRY_DELTA)
col_name = ('client', client_id, service_id)
self._disco_cf.insert(service_type, {col_name : col_val},
ttl = ttl + disc_consts.TTL_EXPIRY_DELTA)
# end insert_client
# return client (subscriber) entry
@cass_error_handler
def lookup_client(self, service_type, client_id, subs = False):
r = []
col_name = ('client', client_id, )
try:
subs = self._disco_cf.get(service_type, column_start = col_name,
column_finish = col_name, include_timestamp = True)
# sort columns by timestamp (subs is array of (col_name, (value, timestamp)))
subs = sorted(subs.items(), key=lambda entry: entry[1][1])
# col_name = (client, cliend_id, service_id)
# col_val = (real-value, timestamp)
data = None
for col_name, col_val in subs:
foo, client_id, service_id = col_name
if service_id == disc_consts.CLIENT_TAG:
data = json.loads(col_val[0])
continue
entry = json.loads(col_val[0])
r.append((col_name[2], entry.get('expired', False)))
return (data, r)
except pycassa.NotFoundException:
return (None, [])
# end lookup_client
# return all subscriptions for a given client
@cass_error_handler
def lookup_subscription(self, service_type, client_id):
r = []
col_name = ('client', client_id, )
try:
subs = self._disco_cf.get(service_type, column_start = col_name,
column_finish = col_name)
# col_name = subscription, cliend_id, service_id
for col_name, col_val in subs.items():
foo, client_id, bar = col_name
if bar == disc_consts.CLIENT_TAG:
continue
entry = json.loads(col_val)
r.append((col_name[2], entry['blob']))
return r
except pycassa.NotFoundException:
return None
# end lookup_subscription
# delete client subscription.
@cass_error_handler
def delete_subscription(self, service_type, client_id, service_id):
self._disco_cf.remove(service_type,
columns = [('client', client_id, service_id)])
self._disco_cf.remove(service_type,
columns = [('subscriber', service_id, client_id)])
# end
# mark client subscription for deletion in the future. If client never came
# back, entry would still get deleted due to TTL
@cass_error_handler
def mark_delete_subscription(self, service_type, client_id, service_id):
col_name = ('client', client_id, service_id)
x = self._disco_cf.get(service_type, columns = [col_name])
data = [json.loads(val) for col,val in x.items()]
entry = data[0]
entry['expired'] = True
self._disco_cf.insert(service_type, {col_name : json.dumps(entry)})
col_name = ('subscriber', service_id, client_id)
x = self._disco_cf.get(service_type, columns = [col_name])
data = [json.loads(val) for col,val in x.items()]
entry = data[0]
entry['expired'] = True
self._disco_cf.insert(service_type, {col_name : json.dumps(entry)})
# end
# return tuple (service_type, client_id, service_id)
| facetothefate/contrail-controller | src/discovery/disc_cassdb.py | Python | apache-2.0 | 14,755 | 0.009353 |
"""List of supported formats
"""
from collections import namedtuple
_FORMAT = namedtuple('FormatDefinition', 'mime_type,'
'extension, schema')
_FORMATS = namedtuple('FORMATS', 'GEOJSON, JSON, SHP, GML, GEOTIFF, WCS,'
'WCS100, WCS110, WCS20, WFS, WFS100,'
'WFS110, WFS20, WMS, WMS130, WMS110,'
'WMS100')
FORMATS = _FORMATS(
_FORMAT('application/vnd.geo+json', '.geojson', None),
_FORMAT('application/json', '.json', None),
_FORMAT('application/x-zipped-shp', '.zip', None),
_FORMAT('application/gml+xml', '.gml', None),
_FORMAT('image/tiff; subtype=geotiff', '.tiff', None),
_FORMAT('application/xogc-wcs', '.xml', None),
_FORMAT('application/x-ogc-wcs; version=1.0.0', '.xml', None),
_FORMAT('application/x-ogc-wcs; version=1.1.0', '.xml', None),
_FORMAT('application/x-ogc-wcs; version=2.0', '.xml', None),
_FORMAT('application/x-ogc-wfs', '.xml', None),
_FORMAT('application/x-ogc-wfs; version=1.0.0', '.xml', None),
_FORMAT('application/x-ogc-wfs; version=1.1.0', '.xml', None),
_FORMAT('application/x-ogc-wfs; version=2.0', '.xml', None),
_FORMAT('application/x-ogc-wms', '.xml', None),
_FORMAT('application/x-ogc-wms; version=1.3.0', '.xml', None),
_FORMAT('application/x-ogc-wms; version=1.1.0', '.xml', None),
_FORMAT('application/x-ogc-wms; version=1.0.0', '.xml', None)
)
| ricardogsilva/PyWPS | pywps/inout/formats/lists.py | Python | mit | 1,471 | 0.00068 |
from flask import Flask,request, jsonify
import json
app = Flask(__name__)
@app.route("/")
def rutaStatus():
return jsonify(status='OK')
@app.route("/status")
def rutaStatusDocker():
return jsonify(status='OK')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=80)
| mariofg92/ivmario | web2.py | Python | gpl-3.0 | 286 | 0.013986 |
# Copyright 2014 Rackspace, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.dhcp import base
class NoneDHCPApi(base.BaseDHCP):
"""No-op DHCP API."""
def update_port_dhcp_opts(self, port_id, dhcp_options, token=None):
pass
def update_dhcp_opts(self, task, options, vifs=None):
pass
def update_port_address(self, port_id, address, token=None):
pass
def get_ip_addresses(self, task):
return []
| supermari0/ironic | ironic/dhcp/none.py | Python | apache-2.0 | 1,015 | 0 |
import sys
import time
import numpy as np
from copy import deepcopy
import tensorflow as tf
import babi_input
class Config(object):
"""Holds model hyperparams and data information."""
batch_size = 100
embed_size = 80
hidden_size = 80
max_epochs = 256
early_stopping = 20
dropout = 0.9
lr = 0.001
l2 = 0.001
cap_grads = False
max_grad_val = 10
noisy_grads = False
word2vec_init = False
embedding_init = 1.7320508 # root 3
# set to zero with strong supervision to only train gates
strong_supervision = False
beta = 1
drop_grus = False
anneal_threshold = 1000
anneal_by = 1.5
num_hops = 3
num_attention_features = 4
max_allowed_inputs = 130
num_train = 9000
floatX = np.float32
babi_id = "1"
babi_test_id = ""
train_mode = True
def _add_gradient_noise(t, stddev=1e-3, name=None):
"""Adds gradient noise as described in http://arxiv.org/abs/1511.06807
The input Tensor `t` should be a gradient.
The output will be `t` + gaussian noise.
0.001 was said to be a good fixed value for memory networks."""
with tf.op_scope([t, stddev], name, "add_gradient_noise") as name:
t = tf.convert_to_tensor(t, name="t")
gn = tf.random_normal(tf.shape(t), stddev=stddev)
return tf.add(t, gn, name=name)
# from https://github.com/domluna/memn2n
def _position_encoding(sentence_size, embedding_size):
"""Position encoding described in section 4.1 in "End to End Memory Networks" (http://arxiv.org/pdf/1503.08895v5.pdf)"""
encoding = np.ones((embedding_size, sentence_size), dtype=np.float32)
ls = sentence_size+1
le = embedding_size+1
for i in range(1, le):
for j in range(1, ls):
encoding[i-1, j-1] = (i - (le-1)/2) * (j - (ls-1)/2)
encoding = 1 + 4 * encoding / embedding_size / sentence_size
return np.transpose(encoding)
# TODO fix positional encoding so that it varies according to sentence lengths
def _xavier_weight_init():
"""Xavier initializer for all variables except embeddings as desribed in [1]"""
def _xavier_initializer(shape, **kwargs):
eps = np.sqrt(6) / np.sqrt(np.sum(shape))
out = tf.random_uniform(shape, minval=-eps, maxval=eps)
return out
return _xavier_initializer
# from https://danijar.com/variable-sequence-lengths-in-tensorflow/
# used only for custom attention GRU as TF handles this with the sequence length param for normal RNNs
def _last_relevant(output, length):
"""Finds the output at the end of each input"""
batch_size = int(output.get_shape()[0])
max_length = int(output.get_shape()[1])
out_size = int(output.get_shape()[2])
index = tf.range(0, batch_size) * max_length + (length - 1)
flat = tf.reshape(output, [-1, out_size])
relevant = tf.gather(flat, index)
return relevant
class DMN_PLUS(object):
def load_data(self, debug=False):
"""Loads train/valid/test data and sentence encoding"""
if self.config.train_mode:
self.train, self.valid, self.word_embedding, self.max_q_len, self.max_input_len, self.max_sen_len, self.num_supporting_facts, self.vocab_size = babi_input.load_babi(self.config, split_sentences=True)
else:
self.test, self.word_embedding, self.max_q_len, self.max_input_len, self.max_sen_len, self.num_supporting_facts, self.vocab_size = babi_input.load_babi(self.config, split_sentences=True)
self.encoding = _position_encoding(self.max_sen_len, self.config.embed_size)
def add_placeholders(self):
"""add data placeholder to graph"""
self.question_placeholder = tf.placeholder(tf.int32, shape=(self.config.batch_size, self.max_q_len))
self.input_placeholder = tf.placeholder(tf.int32, shape=(self.config.batch_size, self.max_input_len, self.max_sen_len))
self.question_len_placeholder = tf.placeholder(tf.int32, shape=(self.config.batch_size,))
self.input_len_placeholder = tf.placeholder(tf.int32, shape=(self.config.batch_size,))
self.answer_placeholder = tf.placeholder(tf.int64, shape=(self.config.batch_size,))
self.rel_label_placeholder = tf.placeholder(tf.int32, shape=(self.config.batch_size, self.num_supporting_facts))
self.dropout_placeholder = tf.placeholder(tf.float32)
def add_reused_variables(self):
"""Adds trainable variables which are later reused"""
gru_cell = tf.nn.rnn_cell.GRUCell(self.config.hidden_size)
# apply droput to grus if flag set
if self.config.drop_grus:
self.gru_cell = tf.nn.rnn_cell.DropoutWrapper(gru_cell, input_keep_prob=self.dropout_placeholder, output_keep_prob=self.dropout_placeholder)
else:
self.gru_cell = gru_cell
with tf.variable_scope("memory/attention", initializer=_xavier_weight_init()):
b_1 = tf.get_variable("bias_1", (self.config.embed_size,))
W_1 = tf.get_variable("W_1", (self.config.embed_size*self.config.num_attention_features, self.config.embed_size))
W_2 = tf.get_variable("W_2", (self.config.embed_size, 1))
b_2 = tf.get_variable("bias_2", 1)
with tf.variable_scope("memory/attention_gru", initializer=_xavier_weight_init()):
Wr = tf.get_variable("Wr", (self.config.embed_size, self.config.hidden_size))
Ur = tf.get_variable("Ur", (self.config.hidden_size, self.config.hidden_size))
br = tf.get_variable("bias_r", (1, self.config.hidden_size))
W = tf.get_variable("W", (self.config.embed_size, self.config.hidden_size))
U = tf.get_variable("U", (self.config.hidden_size, self.config.hidden_size))
bh = tf.get_variable("bias_h", (1, self.config.hidden_size))
def get_predictions(self, output):
"""Get answer predictions from output"""
preds = tf.nn.softmax(output)
pred = tf.argmax(preds, 1)
return pred
def add_loss_op(self, output):
"""Calculate loss"""
# optional strong supervision of attention with supporting facts
gate_loss = 0
if self.config.strong_supervision:
for i, att in enumerate(self.attentions):
labels = tf.gather(tf.transpose(self.rel_label_placeholder), 0)
gate_loss += tf.reduce_sum(tf.nn.sparse_softmax_cross_entropy_with_logits(att, labels))
loss = self.config.beta*tf.reduce_sum(tf.nn.sparse_softmax_cross_entropy_with_logits(output, self.answer_placeholder)) + gate_loss
# add l2 regularization for all variables except biases
for v in tf.trainable_variables():
if not 'bias' in v.name.lower():
loss += self.config.l2*tf.nn.l2_loss(v)
tf.summary.scalar('loss', loss)
return loss
def add_training_op(self, loss):
"""Calculate and apply gradients"""
opt = tf.train.AdamOptimizer(learning_rate=self.config.lr)
gvs = opt.compute_gradients(loss)
# optionally cap and noise gradients to regularize
if self.config.cap_grads:
gvs = [(tf.clip_by_norm(grad, self.config.max_grad_val), var) for grad, var in gvs]
if self.config.noisy_grads:
gvs = [(_add_gradient_noise(grad), var) for grad, var in gvs]
train_op = opt.apply_gradients(gvs)
return train_op
def get_question_representation(self, embeddings):
"""Get question vectors via embedding and GRU"""
questions = tf.nn.embedding_lookup(embeddings, self.question_placeholder)
questions = tf.split(1, self.max_q_len, questions)
questions = [tf.squeeze(q, squeeze_dims=[1]) for q in questions]
_, q_vec = tf.nn.rnn(self.gru_cell, questions, dtype=np.float32, sequence_length=self.question_len_placeholder)
return q_vec
def get_input_representation(self, embeddings):
"""Get fact (sentence) vectors via embedding, positional encoding and bi-directional GRU"""
# get word vectors from embedding
inputs = tf.nn.embedding_lookup(embeddings, self.input_placeholder)
# use encoding to get sentence representation
inputs = tf.reduce_sum(inputs * self.encoding, 2)
inputs = tf.split(1, self.max_input_len, inputs)
inputs = [tf.squeeze(i, squeeze_dims=[1]) for i in inputs]
outputs, _, _ = tf.nn.bidirectional_rnn(self.gru_cell, self.gru_cell, inputs, dtype=np.float32, sequence_length=self.input_len_placeholder)
# f<-> = f-> + f<-
fact_vecs = [tf.reduce_sum(tf.pack(tf.split(1, 2, out)), 0) for out in outputs]
# apply dropout
fact_vecs = [tf.nn.dropout(fv, self.dropout_placeholder) for fv in fact_vecs]
return fact_vecs
def get_attention(self, q_vec, prev_memory, fact_vec):
"""Use question vector and previous memory to create scalar attention for current fact"""
with tf.variable_scope("attention", reuse=True, initializer=_xavier_weight_init()):
W_1 = tf.get_variable("W_1")
b_1 = tf.get_variable("bias_1")
W_2 = tf.get_variable("W_2")
b_2 = tf.get_variable("bias_2")
features = [fact_vec*q_vec, fact_vec*prev_memory, tf.abs(fact_vec - q_vec), tf.abs(fact_vec - prev_memory)]
feature_vec = tf.concat(1, features)
attention = tf.matmul(tf.tanh(tf.matmul(feature_vec, W_1) + b_1), W_2) + b_2
return attention
def _attention_GRU_step(self, rnn_input, h, g):
"""Implement attention GRU as described by https://arxiv.org/abs/1603.01417"""
with tf.variable_scope("attention_gru", reuse=True, initializer=_xavier_weight_init()):
Wr = tf.get_variable("Wr")
Ur = tf.get_variable("Ur")
br = tf.get_variable("bias_r")
W = tf.get_variable("W")
U = tf.get_variable("U")
bh = tf.get_variable("bias_h")
r = tf.sigmoid(tf.matmul(rnn_input, Wr) + tf.matmul(h, Ur) + br)
h_hat = tf.tanh(tf.matmul(rnn_input, W) + r*tf.matmul(h, U) + bh)
rnn_output = g*h_hat + (1-g)*h
return rnn_output
def generate_episode(self, memory, q_vec, fact_vecs):
"""Generate episode by applying attention to current fact vectors through a modified GRU"""
attentions = [tf.squeeze(self.get_attention(q_vec, memory, fv), squeeze_dims=[1]) for fv in fact_vecs]
attentions = tf.transpose(tf.pack(attentions))
self.attentions.append(attentions)
softs = tf.nn.softmax(attentions)
softs = tf.split(1, self.max_input_len, softs)
gru_outputs = []
# set initial state to zero
h = tf.zeros((self.config.batch_size, self.config.hidden_size))
# use attention gru
for i, fv in enumerate(fact_vecs):
h = self._attention_GRU_step(fv, h, softs[i])
gru_outputs.append(h)
# extract gru outputs at proper index according to input_lens
gru_outputs = tf.pack(gru_outputs)
gru_outputs = tf.transpose(gru_outputs, perm=[1,0,2])
episode = _last_relevant(gru_outputs, self.input_len_placeholder)
return episode
def add_answer_module(self, rnn_output, q_vec):
"""Linear softmax answer module"""
with tf.variable_scope("answer"):
rnn_output = tf.nn.dropout(rnn_output, self.dropout_placeholder)
U = tf.get_variable("U", (2*self.config.embed_size, self.vocab_size))
b_p = tf.get_variable("bias_p", (self.vocab_size,))
output = tf.matmul(tf.concat(1, [rnn_output, q_vec]), U) + b_p
return output
def inference(self):
"""Performs inference on the DMN model"""
# set up embedding
embeddings = tf.Variable(self.word_embedding.astype(np.float32), name="Embedding")
# input fusion module
with tf.variable_scope("question", initializer=_xavier_weight_init()):
print('==> get question representation')
q_vec = self.get_question_representation(embeddings)
with tf.variable_scope("input", initializer=_xavier_weight_init()):
print('==> get input representation')
fact_vecs = self.get_input_representation(embeddings)
# keep track of attentions for possible strong supervision
self.attentions = []
# memory module
with tf.variable_scope("memory", initializer=_xavier_weight_init()):
print('==> build episodic memory')
# generate n_hops episodes
prev_memory = q_vec
for i in range(self.config.num_hops):
# get a new episode
print('==> generating episode %i' % i)
episode = self.generate_episode(prev_memory, q_vec, fact_vecs)
# untied weights for memory update
Wt = tf.get_variable("W_t"+ str(i), (2*self.config.hidden_size+self.config.embed_size, self.config.hidden_size))
bt = tf.get_variable("bias_t"+ str(i), (self.config.hidden_size,))
# update memory with Relu
prev_memory = tf.nn.relu(tf.matmul(tf.concat(1, [prev_memory, episode, q_vec]), Wt) + bt)
output = prev_memory
# pass memory module output through linear answer module
output = self.add_answer_module(output, q_vec)
return output
def run_epoch(self, session, data, num_epoch=0, train_writer=None, train_op=None, verbose=2, train=False):
config = self.config
dp = config.dropout
if train_op is None:
train_op = tf.no_op()
dp = 1
total_steps = len(data[0]) / config.batch_size
total_loss = []
accuracy = 0
# shuffle data
p = np.random.permutation(len(data[0]))
qp, ip, ql, il, im, a, r = data
qp, ip, ql, il, im, a, r = qp[p], ip[p], ql[p], il[p], im[p], a[p], r[p]
for step in range(total_steps):
index = range(step * config.batch_size, (step + 1) * config.batch_size)
feed = {self.question_placeholder: qp[index],
self.input_placeholder: ip[index],
self.question_len_placeholder: ql[index],
self.input_len_placeholder: il[index],
self.answer_placeholder: a[index],
self.rel_label_placeholder: r[index],
self.dropout_placeholder: dp}
loss, pred, summary, _ = session.run(
[self.calculate_loss, self.pred, self.merged, train_op], feed_dict=feed)
if train_writer is not None:
train_writer.add_summary(summary, num_epoch*total_steps + step)
answers = a[step*config.batch_size:(step+1)*config.batch_size]
accuracy += np.sum(pred == answers)/float(len(answers))
total_loss.append(loss)
if verbose and step % verbose == 0:
sys.stdout.write('\r{} / {} : loss = {}'.format(
step, total_steps, np.mean(total_loss)))
sys.stdout.flush()
if verbose:
sys.stdout.write('\r')
return np.mean(total_loss), accuracy/float(total_steps)
def __init__(self, config):
self.config = config
self.variables_to_save = {}
self.load_data(debug=False)
self.add_placeholders()
self.add_reused_variables()
self.output = self.inference()
self.pred = self.get_predictions(self.output)
self.calculate_loss = self.add_loss_op(self.output)
self.train_step = self.add_training_op(self.calculate_loss)
self.merged = tf.summary.merge_all()
| kevinadda/dmn-chatbot | dmn_plus.py | Python | mit | 15,738 | 0.004384 |
from datetime import timedelta
from math import copysign
def is_workingday(input_date):
return input_date.isoweekday() < 6
def add(datestart, days):
sign = lambda x: int(copysign(1, x))
dateend = datestart
while days:
dateend = dateend + timedelta(days=sign(days))
if is_workingday(dateend):
days -= sign(days)
return dateend
def diff(date1, date2):
if date1 == date2:
return 0
if date1 > date2:
min_date = date2
max_date = date1
else:
min_date = date1
max_date = date2
diff = 0
current_date = min_date
while current_date != max_date:
current_date = current_date + timedelta(days=1)
if is_workingday(current_date):
diff += 1
return diff
def next(datestart):
while True:
datestart = datestart + timedelta(days=1)
if is_workingday(datestart):
break
return datestart | baxeico/pyworkingdays | workingdays/__init__.py | Python | mit | 947 | 0.006336 |
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import, unicode_literals
import sys
import os
import six
from mog_commons import unittest
class TestUnitTest(unittest.TestCase):
def test_assert_output(self):
def f():
print('abc')
print('123')
sys.stderr.writelines(['def\n', '456\n'])
self.assertOutput('abc\n123\n', 'def\n456\n', f)
def test_assert_output_fail(self):
def f():
print('abc')
print('123')
sys.stderr.writelines(['def\n', '456\n'])
self.assertRaisesRegexp(AssertionError, 'abc.+ != ', self.assertOutput, '', 'def\n456\n', f)
self.assertRaisesRegexp(AssertionError, 'def.+ != ', self.assertOutput, 'abc\n123\n', '', f)
self.assertRaisesRegexp(AssertionError, 'def.+ != .+def', self.assertOutput, 'abc\n123\n', 'def\n456\n\n', f)
def test_assert_system_exit(self):
self.assertSystemExit(123, lambda: sys.exit(123))
self.assertSystemExit(234, lambda x: sys.exit(x), 234)
def test_with_bytes_output(self):
with self.withBytesOutput() as (out, err):
out.write(b'\xff\xfe')
out.write('あいうえお'.encode('utf-8'))
err.write(b'\xfd\xfc')
self.assertEqual(out.getvalue(), b'\xff\xfe' + 'あいうえお'.encode('utf-8'))
self.assertEqual(err.getvalue(), b'\xfd\xfc')
def test_with_bytes_output_types(self):
# accepts unicode
def f(data, expected):
with self.withBytesOutput() as (out, err):
for d in data:
out.write(d)
self.assertEqual(out.getvalue(), expected)
f(['あいうえお'], 'あいうえお'.encode('utf-8'))
f([b'\xff', 'あいうえお'], b'\xff' + 'あいうえお'.encode('utf-8'))
# accepts only string-like types
self.assertRaises(TypeError, f, [[]])
self.assertRaises(TypeError, f, [{'a': 20}])
self.assertRaises(TypeError, f, [1.23])
def test_with_assert_output_file(self):
def f(text):
with self.withAssertOutputFile(os.path.join('tests', 'resources', 'utf8_ja.txt')) as out:
out.write(text.encode('utf-8'))
def g(text):
with self.withAssertOutputFile(
os.path.join('tests', 'resources', 'utf8_ja_template.txt'), {'text': 'かきくけこ'}
) as out:
out.write(text.encode('utf-8'))
f('あいうえお\n')
self.assertRaisesRegexp(AssertionError, 'あいうえお', f, 'あいうえお')
g('かきくけこ\n')
self.assertRaisesRegexp(AssertionError, 'かきくけこ', g, 'あいうえお\n')
def test_assert_raises_message(self):
class MyException(Exception):
pass
def f(msg):
raise MyException(msg)
self.assertRaisesMessage(MyException, 'あいうえお', f, 'あいうえお')
self.assertRaisesMessage(AssertionError, 'MyException not raised',
self.assertRaisesMessage, MyException, 'あいうえお', lambda: None)
if six.PY2:
expected = ("u'\\u3042\\u3044\\u3046\\u3048' != u'\\u3042\\u3044\\u3046\\u3048\\u304a'\n" +
"- \u3042\u3044\u3046\u3048\n+ \u3042\u3044\u3046\u3048\u304a\n? +\n")
else:
expected = "'あいうえ' != 'あいうえお'\n- あいうえ\n+ あいうえお\n? +\n"
self.assertRaisesMessage(AssertionError, expected,
self.assertRaisesMessage, MyException, 'あいうえお', f, 'あいうえ')
def test_assert_system_exit_fail(self):
self.assertRaisesRegexp(AssertionError, 'SystemExit not raised', self.assertSystemExit, 0, lambda: 0)
self.assertRaisesRegexp(AssertionError, '1 != 0', self.assertSystemExit, 0, lambda: sys.exit(1))
| mogproject/mog-commons-python | tests/mog_commons/test_unittest.py | Python | apache-2.0 | 3,948 | 0.003214 |
'''
Carousel
========
.. versionadded:: 1.4.0
The :class:`Carousel` widget provides the classic mobile-friendly carousel view
where you can swipe between slides.
You can add any content to the carousel and use it horizontally or verticaly.
The carousel can display pages in loop or not.
Example::
class Example1(App):
def build(self):
carousel = Carousel(direction='right')
for i in range(10):
src = "http://placehold.it/480x270.png&text=slide-%d&.png" % i
image = Factory.AsyncImage(source=src, allow_stretch=True)
carousel.add_widget(image)
return carousel
Example1().run()
.. versionchanged:: 1.5.0
The carousel now supports active children, like the
:class:`~kivy.uix.scrollview.ScrollView`. It will detect a swipe gesture
according to :attr:`Carousel.scroll_timeout` and
:attr:`Carousel.scroll_distance`.
In addition, the container used for adding a slide is now hidden in
the API. We made a mistake by exposing it to the user. The impacted
properties are:
:attr:`Carousel.slides`, :attr:`Carousel.current_slide`,
:attr:`Carousel.previous_slide` and :attr:`Carousel.next_slide`.
'''
__all__ = ('Carousel', )
from functools import partial
from kivy.clock import Clock
from kivy.factory import Factory
from kivy.animation import Animation
from kivy.uix.stencilview import StencilView
from kivy.uix.relativelayout import RelativeLayout
from kivy.properties import BooleanProperty, OptionProperty, AliasProperty, \
NumericProperty, ListProperty, ObjectProperty, StringProperty
class Carousel(StencilView):
'''Carousel class. See module documentation for more information.
'''
slides = ListProperty([])
'''List of slides inside the Carousel. The slides are added when a
widget is added to Carousel using add_widget().
:attr:`slides` is a :class:`~kivy.properties.ListProperty` and is
read-only.
'''
def _get_slides_container(self):
return [x.parent for x in self.slides]
slides_container = AliasProperty(_get_slides_container, None,
bind=('slides', ))
direction = OptionProperty('right',
options=('right', 'left', 'top', 'bottom'))
'''Specifies the direction in which the slides are ordered i.e. the
direction from which the user swipes to go from one slide to the next.
Can be `right`, `left`, 'top', or `bottom`. For example, with
the default value of `right`, the second slide is to the right
of the first and the user would swipe from the right towards the
left to get to the second slide.
:attr:`direction` is a :class:`~kivy.properties.OptionProperty` and
defaults to 'right'.
'''
min_move = NumericProperty(0.2)
'''Defines the minimal distance from the edge where the movement is
considered a swipe gesture and the Carousel will change its content.
This is a percentage of the Carousel width.
If the movement doesn't reach this minimal value, then the movement is
cancelled and the content is restored to its original position.
:attr:`min_move` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.2.
'''
anim_move_duration = NumericProperty(0.5)
'''Defines the duration of the Carousel animation between pages.
:attr:`anim_move_duration` is a :class:`~kivy.properties.NumericProperty`
and defaults to 0.5.
'''
anim_cancel_duration = NumericProperty(0.3)
'''Defines the duration of the animation when a swipe movement is not
accepted. This is generally when the user doesnt swipe enough.
See :attr:`min_move`.
:attr:`anim_cancel_duration` is a :class:`~kivy.properties.NumericProperty`
and defaults to 0.3.
'''
loop = BooleanProperty(False)
'''Allow the Carousel to swipe infinitely. When the user reaches the last
page, they will return to first page when trying to swipe to the next.
:attr:`loop` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
def _get_index(self):
if self.slides:
return self._index % len(self.slides)
return float('nan')
def _set_index(self, value):
if self.slides:
self._index = value % len(self.slides)
else:
self._index = float('nan')
index = AliasProperty(_get_index, _set_index, bind=('_index', 'slides'))
'''Get/Set the current visible slide based on the index.
:attr:`index` is a :class:`~kivy.properties.AliasProperty` and defaults
to 0 (the first item).
'''
def _prev_slide(self):
slides = self.slides
len_slides = len(slides)
index = self.index
if len_slides < 2: # None, or 1 slide
return None
if len_slides == 2:
if index == 0:
return None
if index == 1:
return slides[0]
if self.loop and index == 0:
return slides[-1]
if index > 0:
return slides[index - 1]
previous_slide = AliasProperty(_prev_slide, None, bind=('slides', 'index'))
'''The previous slide in the Carousel. It is None if the current slide is
the first slide in the Carousel. If :attr:`orientation` is 'horizontal',
the previous slide is to the left. If :attr:`orientation` is 'vertical',
the previous slide towards the bottom.
:attr:`previous_slide` is a :class:`~kivy.properties.AliasProperty`.
.. versionchanged:: 1.5.0
This property doesn't expose the container used for storing the slide.
It returns the widget you have added.
'''
def _curr_slide(self):
if len(self.slides):
return self.slides[self.index]
current_slide = AliasProperty(_curr_slide, None, bind=('slides', 'index'))
'''The currently shown slide.
:attr:`current_slide` is an :class:`~kivy.properties.AliasProperty`.
.. versionchanged:: 1.5.0
The property doesn't expose the container used for storing the slide.
It returns widget you have added.
'''
def _next_slide(self):
if len(self.slides) < 2: # None, or 1 slide
return None
if len(self.slides) == 2:
if self.index == 0:
return self.slides[1]
if self.index == 1:
return None
if self.loop and self.index == len(self.slides) - 1:
return self.slides[0]
if self.index < len(self.slides) - 1:
return self.slides[self.index + 1]
next_slide = AliasProperty(_next_slide, None, bind=('slides', 'index'))
'''The next slide in the Carousel. It is None if the current slide is
the last slide in the Carousel. If :attr:`orientation` is 'horizontal',
the next slide is to the right. If :attr:`orientation` is 'vertical',
the next slide is towards the bottom.
:attr:`next_slide` is a :class:`~kivy.properties.AliasProperty`.
.. versionchanged:: 1.5.0
The property doesn't expose the container used for storing the slide.
It returns the widget you have added.
'''
scroll_timeout = NumericProperty(200)
'''Timeout allowed to trigger the :attr:`scroll_distance`, in milliseconds.
If the user has not moved :attr:`scroll_distance` within the timeout,
the scrolling will be disabled and the touch event will go to the children.
:attr:`scroll_timeout` is a :class:`~kivy.properties.NumericProperty` and
defaults to 200 (milliseconds)
.. versionadded:: 1.5.0
'''
scroll_distance = NumericProperty('20dp')
'''Distance to move before scrolling the :class:`Carousel` in pixels. As
soon as the distance has been traveled, the :class:`Carousel` will start
to scroll, and no touch event will go to children.
It is advisable that you base this value on the dpi of your target device's
screen.
:attr:`scroll_distance` is a :class:`~kivy.properties.NumericProperty` and
defaults to 20dp.
.. versionadded:: 1.5.0
'''
anim_type = StringProperty('out_quad')
'''Type of animation to use while animating in the next/previous slide.
.. versionadded:: 1.8.0
'''
#### private properties, for internal use only ###
_index = NumericProperty(0)
_prev = ObjectProperty(None, allownone=True)
_current = ObjectProperty(None, allownone=True)
_next = ObjectProperty(None, allownone=True)
_offset = NumericProperty(0)
_touch = ObjectProperty(None, allownone=True)
def __init__(self, **kwargs):
self._trigger_position_visible_slides = Clock.create_trigger(
self._position_visible_slides, -1)
super(Carousel, self).__init__(**kwargs)
self._skip_slide = None
def load_slide(self, slide):
'''Animate to the slide that is passed as the argument.
.. versionchanged:: 1.8.0
'''
slides = self.slides
start, stop = slides.index(self.current_slide), slides.index(slide)
if start == stop:
return
self._skip_slide = stop
if stop > start:
self._insert_visible_slides(_next_slide=slide)
self.load_next()
else:
self._insert_visible_slides(_prev_slide=slide)
self.load_previous()
def load_previous(self):
'''Animate to the previous slide.
.. versionadded:: 1.7.0
'''
self.load_next(mode='prev')
def load_next(self, mode='next'):
'''Animate to next slide.
.. versionadded:: 1.7.0
'''
h, w = self.size
_direction = {
'top': -h / 2,
'bottom': h / 2,
'left': w / 2,
'right': -w / 2}
_offset = _direction[self.direction]
if mode == 'prev':
_offset = -_offset
self._start_animation(min_move=0, offset=_offset)
def get_slide_container(self, slide):
return slide.parent
def _insert_visible_slides(self, _next_slide=None, _prev_slide=None):
get_slide_container = self.get_slide_container
previous_slide = _prev_slide if _prev_slide else self.previous_slide
if previous_slide:
self._prev = get_slide_container(previous_slide)
else:
self._prev = None
current_slide = self.current_slide
if current_slide:
self._current = get_slide_container(current_slide)
else:
self._current = None
next_slide = _next_slide if _next_slide else self.next_slide
if next_slide:
self._next = get_slide_container(next_slide)
else:
self._next = None
super_remove = super(Carousel, self).remove_widget
for container in self.slides_container:
super_remove(container)
if self._prev:
super(Carousel, self).add_widget(self._prev)
if self._next:
super(Carousel, self).add_widget(self._next)
if self._current:
super(Carousel, self).add_widget(self._current)
def _position_visible_slides(self, *args):
slides, index = self.slides, self.index
no_of_slides = len(slides) - 1
if not slides:
return
x, y, width, height = self.x, self.y, self.width, self.height
_offset, direction = self._offset, self.direction
_prev, _next, _current = self._prev, self._next, self._current
get_slide_container = self.get_slide_container
last_slide = get_slide_container(slides[-1])
first_slide = get_slide_container(slides[0])
skip_next = False
_loop = self.loop
if direction[0] in ['r', 'l']:
xoff = x + _offset
x_prev = {'l': xoff + width, 'r': xoff - width}
x_next = {'l': xoff - width, 'r': xoff + width}
if _prev:
_prev.pos = (x_prev[direction[0]], y)
elif _loop and _next and index == 0:
# if first slide is moving to right with direction set to right
# or toward left with direction set to left
if ((_offset > 0 and direction[0] == 'r') or
(_offset < 0 and direction[0] == 'l')):
# put last_slide before first slide
last_slide.pos = (x_prev[direction[0]], y)
skip_next = True
if _current:
_current.pos = (xoff, y)
if skip_next:
return
if _next:
_next.pos = (x_next[direction[0]], y)
elif _loop and _prev and index == no_of_slides:
if ((_offset < 0 and direction[0] == 'r') or
(_offset > 0 and direction[0] == 'l')):
first_slide.pos = (x_next[direction[0]], y)
if direction[0] in ['t', 'b']:
yoff = y + _offset
y_prev = {'t': yoff - height, 'b': yoff + height}
y_next = {'t': yoff + height, 'b': yoff - height}
if _prev:
_prev.pos = (x, y_prev[direction[0]])
elif _loop and _next and index == 0:
if ((_offset > 0 and direction[0] == 't') or
(_offset < 0 and direction[0] == 'b')):
last_slide.pos = (x, y_prev[direction[0]])
skip_next = True
if _current:
_current.pos = (x, yoff)
if skip_next:
return
if _next:
_next.pos = (x, y_next[direction[0]])
elif _loop and _prev and index == no_of_slides:
if ((_offset < 0 and direction[0] == 't') or
(_offset > 0 and direction[0] == 'b')):
first_slide.pos = (x, y_next[direction[0]])
def on_size(self, *args):
size = self.size
for slide in self.slides_container:
slide.size = size
self._trigger_position_visible_slides()
def on_pos(self, *args):
self._trigger_position_visible_slides()
def on_index(self, *args):
self._insert_visible_slides()
self._trigger_position_visible_slides()
self._offset = 0
def on_slides(self, *args):
if self.slides:
self.index = self.index % len(self.slides)
self._insert_visible_slides()
self._trigger_position_visible_slides()
def on__offset(self, *args):
self._trigger_position_visible_slides()
# if reached full offset, switch index to next or prev
direction = self.direction
_offset = self._offset
width = self.width
height = self.height
index = self.index
if self._skip_slide is not None:
return
if direction[0] == 'r':
if _offset <= -width:
index += 1
if _offset >= width:
index -= 1
if direction[0] == 'l':
if _offset <= -width:
index -= 1
if _offset >= width:
index += 1
if direction[0] == 't':
if _offset <= - height:
index += 1
if _offset >= height:
index -= 1
if direction[0] == 'b':
if _offset <= -height:
index -= 1
if _offset >= height:
index += 1
self.index = index
def _start_animation(self, *args, **kwargs):
# compute target offset for ease back, next or prev
new_offset = 0
direction = kwargs.get('direction', self.direction)
is_horizontal = direction[0] in ['r', 'l']
extent = self.width if is_horizontal else self.height
min_move = kwargs.get('min_move', self.min_move)
_offset = kwargs.get('offset', self._offset)
if _offset < min_move * -extent:
new_offset = -extent
elif _offset > min_move * extent:
new_offset = extent
# if new_offset is 0, it wasnt enough to go next/prev
dur = self.anim_move_duration
if new_offset == 0:
dur = self.anim_cancel_duration
# detect edge cases if not looping
len_slides = len(self.slides)
index = self.index
if not self.loop or len_slides == 1:
is_first = (index == 0)
is_last = (index == len_slides - 1)
if direction[0] in ['r', 't']:
towards_prev = (new_offset > 0)
towards_next = (new_offset < 0)
else:
towards_prev = (new_offset < 0)
towards_next = (new_offset > 0)
if (is_first and towards_prev) or (is_last and towards_next):
new_offset = 0
anim = Animation(_offset=new_offset, d=dur, t=self.anim_type)
anim.cancel_all(self)
def _cmp(*l):
if self._skip_slide is not None:
self.index = self._skip_slide
self._skip_slide = None
anim.bind(on_complete=_cmp)
anim.start(self)
def _get_uid(self, prefix='sv'):
return '{0}.{1}'.format(prefix, self.uid)
def on_touch_down(self, touch):
if not self.collide_point(*touch.pos):
touch.ud[self._get_uid('cavoid')] = True
return
if self.disabled:
return True
if self._touch:
return super(Carousel, self).on_touch_down(touch)
Animation.cancel_all(self)
self._touch = touch
uid = self._get_uid()
touch.grab(self)
touch.ud[uid] = {
'mode': 'unknown',
'time': touch.time_start}
Clock.schedule_once(self._change_touch_mode,
self.scroll_timeout / 1000.)
return True
def on_touch_move(self, touch):
if self._get_uid('cavoid') in touch.ud:
return
if self._touch is not touch:
super(Carousel, self).on_touch_move(touch)
return self._get_uid() in touch.ud
if touch.grab_current is not self:
return True
ud = touch.ud[self._get_uid()]
direction = self.direction
if ud['mode'] == 'unknown':
if direction[0] in ('r', 'l'):
distance = abs(touch.ox - touch.x)
else:
distance = abs(touch.oy - touch.y)
if distance > self.scroll_distance:
Clock.unschedule(self._change_touch_mode)
ud['mode'] = 'scroll'
else:
if direction[0] in ('r', 'l'):
self._offset += touch.dx
if direction[0] in ('t', 'b'):
self._offset += touch.dy
return True
def on_touch_up(self, touch):
if self._get_uid('cavoid') in touch.ud:
return
if self in [x() for x in touch.grab_list]:
touch.ungrab(self)
self._touch = None
ud = touch.ud[self._get_uid()]
if ud['mode'] == 'unknown':
Clock.unschedule(self._change_touch_mode)
super(Carousel, self).on_touch_down(touch)
Clock.schedule_once(partial(self._do_touch_up, touch), .1)
else:
self._start_animation()
else:
if self._touch is not touch and self.uid not in touch.ud:
super(Carousel, self).on_touch_up(touch)
return self._get_uid() in touch.ud
def _do_touch_up(self, touch, *largs):
super(Carousel, self).on_touch_up(touch)
# don't forget about grab event!
for x in touch.grab_list[:]:
touch.grab_list.remove(x)
x = x()
if not x:
continue
touch.grab_current = x
super(Carousel, self).on_touch_up(touch)
touch.grab_current = None
def _change_touch_mode(self, *largs):
if not self._touch:
return
self._start_animation()
uid = self._get_uid()
touch = self._touch
ud = touch.ud[uid]
if ud['mode'] == 'unknown':
touch.ungrab(self)
self._touch = None
super(Carousel, self).on_touch_down(touch)
return
def add_widget(self, widget, index=0):
slide = RelativeLayout(size=self.size, x=self.x - self.width, y=self.y)
slide.add_widget(widget)
super(Carousel, self).add_widget(slide, index)
if index != 0:
self.slides.insert(index, widget)
else:
self.slides.append(widget)
def remove_widget(self, widget, *args, **kwargs):
# XXX be careful, the widget.parent refer to the RelativeLayout
# added in add_widget(). But it will break if RelativeLayout
# implementation change.
# if we passed the real widget
if widget in self.slides:
slide = widget.parent
self.slides.remove(widget)
return slide.remove_widget(widget, *args, **kwargs)
return super(Carousel, self).remove_widget(widget, *args, **kwargs)
def clear_widgets(self):
for slide in self.slides[:]:
self.remove_widget(slide)
super(Carousel, self).clear_widgets()
if __name__ == '__main__':
from kivy.app import App
class Example1(App):
def build(self):
carousel = Carousel(direction='left',
loop=True)
for i in range(4):
src = "http://placehold.it/480x270.png&text=slide-%d&.png" % i
image = Factory.AsyncImage(source=src, allow_stretch=True)
carousel.add_widget(image)
return carousel
Example1().run()
| niavlys/kivy | kivy/uix/carousel.py | Python | mit | 21,776 | 0.000092 |
from django.dispatch import Signal
user_email_bounced = Signal() # args: ['bounce', 'should_deactivate']
email_bounced = Signal() # args: ['bounce', 'should_deactivate']
email_unsubscribed = Signal() # args: ['email', 'reference']
| fin/froide | froide/bounce/signals.py | Python | mit | 236 | 0 |
#!/usr/bin/env python
#
# Copyright 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.appengine.ext.webapp import template
from models.employee import Employee
from models.constrain import Constrain
from models.preparingSchdule import PreparingSchedule
from models.submittedShifts import SubmittedShifts
import json
import time
from datetime import date
from datetime import timedelta
from Dates import Dates
import webapp2
class MainHandler(webapp2.RequestHandler):
def get(self):
userName = None
if self.request.cookies.get('our_token'): #the cookie that should contain the access token!
userName = Employee.checkToken(self.request.cookies.get('our_token'))
template_variables = {}
if userName:
template_variables['userName'] = userName.userName
dates = Dates(template_variables)
template_variables = dates.nextTwoWeeks()
#### First week ####
sunday0date = dates.createDateObject(0,1)
monday0date = dates.createDateObject(0,2)
tuesday0date = dates.createDateObject(0,3)
wednesday0date = dates.createDateObject(0,4)
thursday0date = dates.createDateObject(0,5)
friday0date = dates.createDateObject(0,6)
saturday0date = dates.createDateObject(0,7)
sunday1date = dates.createDateObject(1,1)
monday1date = dates.createDateObject(1,2)
tuesday1date = dates.createDateObject(1,3)
wednesday1date = dates.createDateObject(1,4)
thursday1date = dates.createDateObject(1,5)
friday1date = dates.createDateObject(1,6)
saturday1date = dates.createDateObject(1,7)
# Add default "white" constrains to employees who hasn't added constrains on their side.
employees = Employee.query().fetch()
if employees:
for e in employees:
constrains = Constrain.query(Constrain.employeeUN == e.userName).fetch()
if not constrains:
Constrain.addConstrains(e.userName,sunday0date)
# Sunday0 night info:
head_nurse_want = Constrain.getShiftHeads(sunday0date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(sunday0date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(sunday0date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(sunday0date, 0, 3)
want = Constrain.getCrew(sunday0date, 0, 1)
dont_care = Constrain.getCrew(sunday0date, 0, 0)
prefer_not = Constrain.getCrew(sunday0date, 0, 2)
cant = Constrain.getCrew(sunday0date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(sunday0date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(sunday0date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(sunday0date, 0, 3)
if assignBeforeHead:
template_variables['Sunday0NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Sunday0NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Sunday0NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSunday0Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSunday0Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSunday0Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSunday0Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSunday0Night'] = want
if dont_care:
template_variables['NurseWhoDontCareSunday0Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSunday0Night'] = prefer_not
if cant:
template_variables['NurseWhoCantSunday0Night'] = cant
# Sunday0 morning info:
head_nurse_want = Constrain.getShiftHeads(sunday0date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(sunday0date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(sunday0date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(sunday0date, 1, 3)
want = Constrain.getCrew(sunday0date, 1, 1)
dont_care = Constrain.getCrew(sunday0date, 1, 0)
prefer_not = Constrain.getCrew(sunday0date, 1, 2)
cant = Constrain.getCrew(sunday0date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(sunday0date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(sunday0date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(sunday0date, 1, 3)
if assignBeforeHead:
template_variables['Sunday0MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Sunday0MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Sunday0MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSunday0Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSunday0Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSunday0Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSunday0Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSunday0Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareSunday0Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSunday0Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantSunday0Morning'] = cant
# Sunday0 noon info:
head_nurse_want = Constrain.getShiftHeads(sunday0date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(sunday0date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(sunday0date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(sunday0date, 2, 3)
want = Constrain.getCrew(sunday0date, 2, 1)
dont_care = Constrain.getCrew(sunday0date, 2, 0)
prefer_not = Constrain.getCrew(sunday0date, 2, 2)
cant = Constrain.getCrew(sunday0date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(sunday0date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(sunday0date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(sunday0date, 2, 3)
if assignBeforeHead:
template_variables['Sunday0NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Sunday0NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Sunday0NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSunday0Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSunday0Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSunday0Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSunday0Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSunday0Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareSunday0Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSunday0Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantSunday0Noon'] = cant
# Monday0 night info:
head_nurse_want = Constrain.getShiftHeads(monday0date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(monday0date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(monday0date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(monday0date, 0, 3)
want = Constrain.getCrew(monday0date, 0, 1)
dont_care = Constrain.getCrew(monday0date, 0, 0)
prefer_not = Constrain.getCrew(monday0date, 0, 2)
cant = Constrain.getCrew(monday0date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(monday0date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(monday0date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(monday0date, 0, 3)
if assignBeforeHead:
template_variables['Monday0NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Monday0NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Monday0NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantMonday0Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareMonday0Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotMonday0Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantMonday0Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantMonday0Night'] = want
if dont_care:
template_variables['NurseWhoDontCareMonday0Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotMonday0Night'] = prefer_not
if cant:
template_variables['NurseWhoCantMonday0Night'] = cant
# Monday0 morning info:
head_nurse_want = Constrain.getShiftHeads(monday0date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(monday0date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(monday0date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(monday0date, 1, 3)
want = Constrain.getCrew(monday0date, 1, 1)
dont_care = Constrain.getCrew(monday0date, 1, 0)
prefer_not = Constrain.getCrew(monday0date, 1, 2)
cant = Constrain.getCrew(monday0date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(monday0date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(monday0date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(monday0date, 1, 3)
if assignBeforeHead:
template_variables['Monday0MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Monday0MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Monday0MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantMonday0Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareMonday0Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotMonday0Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantMonday0Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantMonday0Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareMonday0Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotMonday0Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantMonday0Morning'] = cant
# Monday0 noon info:
head_nurse_want = Constrain.getShiftHeads(monday0date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(monday0date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(monday0date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(monday0date, 2, 3)
want = Constrain.getCrew(monday0date, 2, 1)
dont_care = Constrain.getCrew(monday0date, 2, 0)
prefer_not = Constrain.getCrew(monday0date, 2, 2)
cant = Constrain.getCrew(monday0date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(monday0date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(monday0date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(monday0date, 2, 3)
if assignBeforeHead:
template_variables['Monday0NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Monday0NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Monday0NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantMonday0Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareMonday0Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotMonday0Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantMonday0Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantMonday0Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareMonday0Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotMonday0Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantMonday0Noon'] = cant
# Tuesday0 night info:
head_nurse_want = Constrain.getShiftHeads(tuesday0date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(tuesday0date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(tuesday0date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(tuesday0date, 0, 3)
want = Constrain.getCrew(tuesday0date, 0, 1)
dont_care = Constrain.getCrew(tuesday0date, 0, 0)
prefer_not = Constrain.getCrew(tuesday0date, 0, 2)
cant = Constrain.getCrew(tuesday0date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(tuesday0date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(tuesday0date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(tuesday0date, 0, 3)
if assignBeforeHead:
template_variables['Tuesday0NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Tuesday0NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Tuesday0NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantTuesday0Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareTuesday0Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotTuesday0Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantTuesday0Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantTuesday0Night'] = want
if dont_care:
template_variables['NurseWhoDontCareTuesday0Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotTuesday0Night'] = prefer_not
if cant:
template_variables['NurseWhoCantTuesday0Night'] = cant
# Tuesday0 morning info:
head_nurse_want = Constrain.getShiftHeads(tuesday0date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(tuesday0date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(tuesday0date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(tuesday0date, 1, 3)
want = Constrain.getCrew(tuesday0date, 1, 1)
dont_care = Constrain.getCrew(tuesday0date, 1, 0)
prefer_not = Constrain.getCrew(tuesday0date, 1, 2)
cant = Constrain.getCrew(tuesday0date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(tuesday0date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(tuesday0date, 1, 1)
assignBeforeThird = PreparingSchedule.checkIfAssignAlready(tuesday0date, 1, 2)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(tuesday0date, 1, 3)
if assignBeforeHead:
template_variables['Tuesday0MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Tuesday0MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeThird:
template_variables['Tuesday0MorningAssignBeforeThird'] = assignBeforeThird
if assignBeforeStandBy:
template_variables['Tuesday0MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantTuesday0Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareTuesday0Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotTuesday0Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantTuesday0Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantTuesday0Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareTuesday0Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotTuesday0Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantTuesday0Morning'] = cant
# Tuesday0 noon info:
head_nurse_want = Constrain.getShiftHeads(tuesday0date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(tuesday0date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(tuesday0date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(tuesday0date, 2, 3)
want = Constrain.getCrew(tuesday0date, 2, 1)
dont_care = Constrain.getCrew(tuesday0date, 2, 0)
prefer_not = Constrain.getCrew(tuesday0date, 2, 2)
cant = Constrain.getCrew(tuesday0date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(tuesday0date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(tuesday0date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(tuesday0date, 2, 3)
if assignBeforeHead:
template_variables['Tuesday0NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Tuesday0NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Tuesday0NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantTuesday0Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareTuesday0Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotTuesday0Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantTuesday0Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantTuesday0Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareTuesday0Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotTuesday0Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantTuesday0Noon'] = cant
# Wednesday0 night info:
head_nurse_want = Constrain.getShiftHeads(wednesday0date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(wednesday0date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(wednesday0date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(wednesday0date, 0, 3)
want = Constrain.getCrew(wednesday0date, 0, 1)
dont_care = Constrain.getCrew(wednesday0date, 0, 0)
prefer_not = Constrain.getCrew(wednesday0date, 0, 2)
cant = Constrain.getCrew(wednesday0date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(wednesday0date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(wednesday0date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(wednesday0date, 0, 3)
if assignBeforeHead:
template_variables['Wednesday0NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Wednesday0NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Wednesday0NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantWednesday0Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareWednesday0Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotWednesday0Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantWednesday0Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantWednesday0Night'] = want
if dont_care:
template_variables['NurseWhoDontCareWednesday0Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotWednesday0Night'] = prefer_not
if cant:
template_variables['NurseWhoCantWednesday0Night'] = cant
# Wednesday0 morning info:
head_nurse_want = Constrain.getShiftHeads(wednesday0date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(wednesday0date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(wednesday0date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(wednesday0date, 1, 3)
want = Constrain.getCrew(wednesday0date, 1, 1)
dont_care = Constrain.getCrew(wednesday0date, 1, 0)
prefer_not = Constrain.getCrew(wednesday0date, 1, 2)
cant = Constrain.getCrew(wednesday0date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(wednesday0date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(wednesday0date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(wednesday0date, 1, 3)
if assignBeforeHead:
template_variables['Wednesday0MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Wednesday0MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Wednesday0MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantWednesday0Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareWednesday0Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotWednesday0Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantWednesday0Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantWednesday0Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareWednesday0Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotWednesday0Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantWednesday0Morning'] = cant
# Wednesday0 noon info:
head_nurse_want = Constrain.getShiftHeads(wednesday0date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(wednesday0date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(wednesday0date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(wednesday0date, 2, 3)
want = Constrain.getCrew(wednesday0date, 2, 1)
dont_care = Constrain.getCrew(wednesday0date, 2, 0)
prefer_not = Constrain.getCrew(wednesday0date, 2, 2)
cant = Constrain.getCrew(wednesday0date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(wednesday0date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(wednesday0date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(wednesday0date, 2, 3)
if assignBeforeHead:
template_variables['Wednesday0NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Wednesday0NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Wednesday0NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantWednesday0Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareWednesday0Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotWednesday0Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantWednesday0Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantWednesday0Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareWednesday0Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotWednesday0Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantWednesday0Noon'] = cant
# Thursday0 night info:
head_nurse_want = Constrain.getShiftHeads(thursday0date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(thursday0date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(thursday0date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(thursday0date, 0, 3)
want = Constrain.getCrew(thursday0date, 0, 1)
dont_care = Constrain.getCrew(thursday0date, 0, 0)
prefer_not = Constrain.getCrew(thursday0date, 0, 2)
cant = Constrain.getCrew(thursday0date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(thursday0date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(thursday0date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(thursday0date, 0, 3)
if assignBeforeHead:
template_variables['Thursday0NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Thursday0NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Thursday0NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantThursday0Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareThursday0Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotThursday0Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantThursday0Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantThursday0Night'] = want
if dont_care:
template_variables['NurseWhoDontCareThursday0Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotThursday0Night'] = prefer_not
if cant:
template_variables['NurseWhoCantThursday0Night'] = cant
# Thursday0 morning info:
head_nurse_want = Constrain.getShiftHeads(thursday0date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(thursday0date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(thursday0date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(thursday0date, 1, 3)
want = Constrain.getCrew(thursday0date, 1, 1)
dont_care = Constrain.getCrew(thursday0date, 1, 0)
prefer_not = Constrain.getCrew(thursday0date, 1, 2)
cant = Constrain.getCrew(thursday0date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(thursday0date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(thursday0date, 1, 1)
assignBeforeThird = PreparingSchedule.checkIfAssignAlready(thursday0date, 1, 2)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(thursday0date, 1, 3)
if assignBeforeHead:
template_variables['Thursday0MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Thursday0MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeSecond:
template_variables['Thursday0MorningAssignBeforeThird'] = assignBeforeThird
if assignBeforeStandBy:
template_variables['Thursday0MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantThursday0Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareThursday0Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotThursday0Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantThursday0Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantThursday0Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareThursday0Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotThursday0Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantThursday0Morning'] = cant
# Thursday0 noon info:
head_nurse_want = Constrain.getShiftHeads(thursday0date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(thursday0date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(thursday0date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(thursday0date, 2, 3)
want = Constrain.getCrew(thursday0date, 2, 1)
dont_care = Constrain.getCrew(thursday0date, 2, 0)
prefer_not = Constrain.getCrew(thursday0date, 2, 2)
cant = Constrain.getCrew(thursday0date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(thursday0date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(thursday0date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(thursday0date, 2, 3)
if assignBeforeHead:
template_variables['Thursday0NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Thursday0NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Thursday0NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantThursday0Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareThursday0Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotThursday0Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantThursday0Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantThursday0Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareThursday0Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotThursday0Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantThursday0Noon'] = cant
# Friday0 night info:
head_nurse_want = Constrain.getShiftHeads(friday0date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(friday0date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(friday0date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(friday0date, 0, 3)
want = Constrain.getCrew(friday0date, 0, 1)
dont_care = Constrain.getCrew(friday0date, 0, 0)
prefer_not = Constrain.getCrew(friday0date, 0, 2)
cant = Constrain.getCrew(friday0date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(friday0date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(friday0date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(friday0date, 0, 3)
if assignBeforeHead:
template_variables['Friday0NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Friday0NighAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Friday0NighAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantFriday0Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareFriday0Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotFriday0Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantFriday0Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantFriday0Night'] = want
if dont_care:
template_variables['NurseWhoDontCareFriday0Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotFriday0Night'] = prefer_not
if cant:
template_variables['NurseWhoCantFriday0Night'] = cant
# Friday0 morning info:
head_nurse_want = Constrain.getShiftHeads(friday0date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(friday0date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(friday0date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(friday0date, 1, 3)
want = Constrain.getCrew(friday0date, 1, 1)
dont_care = Constrain.getCrew(friday0date, 1, 0)
prefer_not = Constrain.getCrew(friday0date, 1, 2)
cant = Constrain.getCrew(friday0date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(friday0date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(friday0date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(friday0date, 1, 3)
if assignBeforeHead:
template_variables['Friday0MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Friday0MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Friday0MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantFriday0Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareFriday0Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotFriday0Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantFriday0Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantFriday0Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareFriday0Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotFriday0Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantFriday0Morning'] = cant
# Friday0 noon info:
head_nurse_want = Constrain.getShiftHeads(friday0date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(friday0date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(friday0date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(friday0date, 2, 3)
want = Constrain.getCrew(friday0date, 2, 1)
dont_care = Constrain.getCrew(friday0date, 2, 0)
prefer_not = Constrain.getCrew(friday0date, 2, 2)
cant = Constrain.getCrew(friday0date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(friday0date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(friday0date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(friday0date, 2, 3)
if assignBeforeHead:
template_variables['Friday0NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Friday0NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Friday0NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantFriday0Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareFriday0Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotFriday0Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantFriday0Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantFriday0Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareFriday0Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotFriday0Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantFriday0Noon'] = cant
# Saturday0 night info:
head_nurse_want = Constrain.getShiftHeads(saturday0date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(saturday0date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(saturday0date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(saturday0date, 0, 3)
want = Constrain.getCrew(saturday0date, 0, 1)
dont_care = Constrain.getCrew(saturday0date, 0, 0)
prefer_not = Constrain.getCrew(saturday0date, 0, 2)
cant = Constrain.getCrew(saturday0date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(saturday0date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(saturday0date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(saturday0date, 0, 3)
if assignBeforeHead:
template_variables['Saturday0NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Saturday0NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Saturday0NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSaturday0Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSaturday0Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSaturday0Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSaturday0Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSaturday0Night'] = want
if dont_care:
template_variables['NurseWhoDontCareSaturday0Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSaturday0Night'] = prefer_not
if cant:
template_variables['NurseWhoCantSaturday0Night'] = cant
# Saturday0 morning info:
head_nurse_want = Constrain.getShiftHeads(saturday0date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(saturday0date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(saturday0date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(saturday0date, 1, 3)
want = Constrain.getCrew(saturday0date, 1, 1)
dont_care = Constrain.getCrew(saturday0date, 1, 0)
prefer_not = Constrain.getCrew(saturday0date, 1, 2)
cant = Constrain.getCrew(saturday0date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(saturday0date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(saturday0date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(saturday0date, 1, 3)
if assignBeforeHead:
template_variables['Saturday0MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Saturday0MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Saturday0MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSaturday0Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSaturday0Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSaturday0Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSaturday0Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSaturday0Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareSaturday0Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSaturday0Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantSaturday0Morning'] = cant
# Saturday0 noon info:
head_nurse_want = Constrain.getShiftHeads(saturday0date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(saturday0date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(saturday0date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(saturday0date, 2, 3)
want = Constrain.getCrew(saturday0date, 2, 1)
dont_care = Constrain.getCrew(saturday0date, 2, 0)
prefer_not = Constrain.getCrew(saturday0date, 2, 2)
cant = Constrain.getCrew(saturday0date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(saturday0date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(saturday0date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(saturday0date, 2, 3)
if assignBeforeHead:
template_variables['Saturday0NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Saturday0NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Saturday0NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSaturday0Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSaturday0Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSaturday0Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSaturday0Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSaturday0Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareSaturday0Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSaturday0Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantSaturday0Noon'] = cant
#### Second week ####
# Sunday1 night info:
head_nurse_want = Constrain.getShiftHeads(sunday1date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(sunday1date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(sunday1date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(sunday1date, 0, 3)
want = Constrain.getCrew(sunday1date, 0, 1)
dont_care = Constrain.getCrew(sunday1date, 0, 0)
prefer_not = Constrain.getCrew(sunday1date, 0, 2)
cant = Constrain.getCrew(sunday1date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(sunday1date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(sunday1date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(sunday1date, 0, 3)
if assignBeforeHead:
template_variables['Sunday1NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Sunday1NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Sunday1NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSunday1Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSunday1Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSunday1Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSunday1Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSunday1Night'] = want
if dont_care:
template_variables['NurseWhoDontCareSunday1Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSunday1Night'] = prefer_not
if cant:
template_variables['NurseWhoCantSunday1Night'] = cant
# Sunday1 morning info:
head_nurse_want = Constrain.getShiftHeads(sunday1date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(sunday1date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(sunday1date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(sunday1date, 1, 3)
want = Constrain.getCrew(sunday1date, 1, 1)
dont_care = Constrain.getCrew(sunday1date, 1, 0)
prefer_not = Constrain.getCrew(sunday1date, 1, 2)
cant = Constrain.getCrew(sunday1date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(sunday1date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(sunday1date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(sunday1date, 1, 3)
if assignBeforeHead:
template_variables['Sunday1MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Sunday1MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Sunday1MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSunday1Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSunday1Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSunday1Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSunday1Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSunday1Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareSunday1Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSunday1Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantSunday1Morning'] = cant
# Sunday1 noon info:
head_nurse_want = Constrain.getShiftHeads(sunday1date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(sunday1date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(sunday1date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(sunday1date, 2, 3)
want = Constrain.getCrew(sunday1date, 2, 1)
dont_care = Constrain.getCrew(sunday1date, 2, 0)
prefer_not = Constrain.getCrew(sunday1date, 2, 2)
cant = Constrain.getCrew(sunday1date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(sunday1date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(sunday1date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(sunday1date, 2, 3)
if assignBeforeHead:
template_variables['Sunday1NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Sunday1NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Sunday1NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSunday1Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSunday1Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSunday1Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSunday1Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSunday1Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareSunday1Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSunday1Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantSunday1Noon'] = cant
# Monday1 night info:
head_nurse_want = Constrain.getShiftHeads(monday1date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(monday1date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(monday1date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(monday1date, 0, 3)
want = Constrain.getCrew(monday1date, 0, 1)
dont_care = Constrain.getCrew(monday1date, 0, 0)
prefer_not = Constrain.getCrew(monday1date, 0, 2)
cant = Constrain.getCrew(monday1date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(monday1date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(monday1date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(monday1date, 0, 3)
if assignBeforeHead:
template_variables['Monday1NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Monday1NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Monday1NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantMonday1Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareMonday1Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotMonday1Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantMonday1Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantMonday1Night'] = want
if dont_care:
template_variables['NurseWhoDontCareMonday1Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotMonday1Night'] = prefer_not
if cant:
template_variables['NurseWhoCantMonday1Night'] = cant
# Monday1 morning info:
head_nurse_want = Constrain.getShiftHeads(monday1date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(monday1date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(monday1date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(monday1date, 1, 3)
want = Constrain.getCrew(monday1date, 1, 1)
dont_care = Constrain.getCrew(monday1date, 1, 0)
prefer_not = Constrain.getCrew(monday1date, 1, 2)
cant = Constrain.getCrew(monday1date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(monday1date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(monday1date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(monday1date, 1, 3)
if assignBeforeHead:
template_variables['Monday1MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Monday1MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Monday1MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantMonday1Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareMonday1Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotMonday1Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantMonday1Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantMonday1Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareMonday1Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotMonday1Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantMonday1Morning'] = cant
# Monday1 noon info:
head_nurse_want = Constrain.getShiftHeads(monday1date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(monday1date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(monday1date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(monday1date, 2, 3)
want = Constrain.getCrew(monday1date, 2, 1)
dont_care = Constrain.getCrew(monday1date, 2, 0)
prefer_not = Constrain.getCrew(monday1date, 2, 2)
cant = Constrain.getCrew(monday1date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(monday1date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(monday1date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(monday1date, 2, 3)
if assignBeforeHead:
template_variables['Monday1NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Monday1NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Monday1NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantMonday1Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareMonday1Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotMonday1Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantMonday1Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantMonday1Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareMonday1Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotMonday1Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantMonday1Noon'] = cant
# Tuesday1 night info:
head_nurse_want = Constrain.getShiftHeads(tuesday1date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(tuesday1date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(tuesday1date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(tuesday1date, 0, 3)
want = Constrain.getCrew(tuesday1date, 0, 1)
dont_care = Constrain.getCrew(tuesday1date, 0, 0)
prefer_not = Constrain.getCrew(tuesday1date, 0, 2)
cant = Constrain.getCrew(tuesday1date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(tuesday1date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(tuesday1date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(tuesday1date, 0, 3)
if assignBeforeHead:
template_variables['Tuesday1NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Tuesday1NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Tuesday1NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantTuesday1Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareTuesday1Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotTuesday1Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantTuesday1Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantTuesday1Night'] = want
if dont_care:
template_variables['NurseWhoDontCareTuesday1Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotTuesday1Night'] = prefer_not
if cant:
template_variables['NurseWhoCantTuesday1Night'] = cant
# Tuesday1 morning info:
head_nurse_want = Constrain.getShiftHeads(tuesday1date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(tuesday1date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(tuesday1date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(tuesday1date, 1, 3)
want = Constrain.getCrew(tuesday1date, 1, 1)
dont_care = Constrain.getCrew(tuesday1date, 1, 0)
prefer_not = Constrain.getCrew(tuesday1date, 1, 2)
cant = Constrain.getCrew(tuesday1date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(tuesday1date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(tuesday1date, 1, 1)
assignBeforeThird = PreparingSchedule.checkIfAssignAlready(tuesday1date, 1, 2)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(tuesday1date, 1, 3)
if assignBeforeHead:
template_variables['Tuesday1MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Tuesday1MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeThird:
template_variables['Tuesday1MorningAssignBeforeThird'] = assignBeforeThird
if assignBeforeStandBy:
template_variables['Tuesday1MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantTuesday1Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareTuesday1Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotTuesday1Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantTuesday1Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantTuesday1Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareTuesday1Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotTuesday0Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantTuesday1Morning'] = cant
# Tuesday1 noon info:
head_nurse_want = Constrain.getShiftHeads(tuesday1date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(tuesday1date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(tuesday1date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(tuesday1date, 2, 3)
want = Constrain.getCrew(tuesday1date, 2, 1)
dont_care = Constrain.getCrew(tuesday1date, 2, 0)
prefer_not = Constrain.getCrew(tuesday1date, 2, 2)
cant = Constrain.getCrew(tuesday1date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(tuesday1date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(tuesday1date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(tuesday1date, 2, 3)
if assignBeforeHead:
template_variables['Tuesday1NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Tuesday1NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Tuesday1NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantTuesday1Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareTuesday1Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotTuesday1Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantTuesday1Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantTuesday1Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareTuesday1Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotTuesday1Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantTuesday1Noon'] = cant
# Wednesday1 night info:
head_nurse_want = Constrain.getShiftHeads(wednesday1date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(wednesday1date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(wednesday1date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(wednesday1date, 0, 3)
want = Constrain.getCrew(wednesday1date, 0, 1)
dont_care = Constrain.getCrew(wednesday1date, 0, 0)
prefer_not = Constrain.getCrew(wednesday1date, 0, 2)
cant = Constrain.getCrew(wednesday1date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(wednesday1date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(wednesday1date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(wednesday1date, 0, 3)
if assignBeforeHead:
template_variables['Wednesday1NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Wednesday1NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Wednesday1NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantWednesday0Night'] = head_nurse_want
if head_nurse_want:
template_variables['HeadNurseWhoWantWednesday1Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareWednesday1Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotWednesday1Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantWednesday1Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantWednesday1Night'] = want
if dont_care:
template_variables['NurseWhoDontCareWednesday1Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotWednesday1Night'] = prefer_not
if cant:
template_variables['NurseWhoCantWednesday1Night'] = cant
# Wednesday1 morning info:
head_nurse_want = Constrain.getShiftHeads(wednesday1date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(wednesday1date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(wednesday1date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(wednesday1date, 1, 3)
want = Constrain.getCrew(wednesday1date, 1, 1)
dont_care = Constrain.getCrew(wednesday1date, 1, 0)
prefer_not = Constrain.getCrew(wednesday1date, 1, 2)
cant = Constrain.getCrew(wednesday1date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(wednesday1date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(wednesday1date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(wednesday1date, 1, 3)
if assignBeforeHead:
template_variables['Wednesday1MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Wednesday1MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Wednesday1MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantWednesday1Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareWednesday1Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotWednesday1Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantWednesday1Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantWednesday1Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareWednesday1Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotWednesday1Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantWednesday1Morning'] = cant
# Wednesday1 noon info:
head_nurse_want = Constrain.getShiftHeads(wednesday1date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(wednesday1date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(wednesday1date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(wednesday1date, 2, 3)
want = Constrain.getCrew(wednesday1date, 2, 1)
dont_care = Constrain.getCrew(wednesday1date, 2, 0)
prefer_not = Constrain.getCrew(wednesday1date, 2, 2)
cant = Constrain.getCrew(wednesday1date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(wednesday1date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(wednesday1date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(wednesday1date, 2, 3)
if assignBeforeHead:
template_variables['Wednesday1NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Wednesday1NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Wednesday1NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantWednesday1Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareWednesday1Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotWednesday1Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantWednesday1Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantWednesday1Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareWednesday1Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotWednesday1Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantWednesday1Noon'] = cant
# Thursday1 night info:
head_nurse_want = Constrain.getShiftHeads(thursday1date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(thursday1date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(thursday1date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(thursday1date, 0, 3)
want = Constrain.getCrew(thursday1date, 0, 1)
dont_care = Constrain.getCrew(thursday1date, 0, 0)
prefer_not = Constrain.getCrew(thursday1date, 0, 2)
cant = Constrain.getCrew(thursday1date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(thursday1date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(thursday1date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(thursday1date, 0, 3)
if assignBeforeHead:
template_variables['Thursday1NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Thursday1NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Thursday1NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantThursday1Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareThursday1Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotThursday1Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantThursday1Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantThursday1Night'] = want
if dont_care:
template_variables['NurseWhoDontCareThursday1Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotThursday1Night'] = prefer_not
if cant:
template_variables['NurseWhoCantThursday1Night'] = cant
# Thursday1 morning info:
head_nurse_want = Constrain.getShiftHeads(thursday1date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(thursday1date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(thursday1date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(thursday1date, 1, 3)
want = Constrain.getCrew(thursday1date, 1, 1)
dont_care = Constrain.getCrew(thursday1date, 1, 0)
prefer_not = Constrain.getCrew(thursday1date, 1, 2)
cant = Constrain.getCrew(thursday1date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(thursday1date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(thursday1date, 1, 1)
assignBeforeThird = PreparingSchedule.checkIfAssignAlready(thursday1date, 1, 2)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(thursday1date, 1, 3)
if assignBeforeHead:
template_variables['Thursday1MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Thursday1MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeSecond:
template_variables['Thursday1MorningAssignBeforeThird'] = assignBeforeThird
if assignBeforeStandBy:
template_variables['Thursday1MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantThursday1Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareThursday1Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotThursday1Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantThursday1Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantThursday1Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareThursday1Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotThursday1Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantThursday1Morning'] = cant
# Thursday1 noon info:
head_nurse_want = Constrain.getShiftHeads(thursday1date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(thursday1date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(thursday1date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(thursday1date, 2, 3)
want = Constrain.getCrew(thursday1date, 2, 1)
dont_care = Constrain.getCrew(thursday1date, 2, 0)
prefer_not = Constrain.getCrew(thursday1date, 2, 2)
cant = Constrain.getCrew(thursday1date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(thursday1date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(thursday1date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(thursday1date, 2, 3)
if assignBeforeHead:
template_variables['Thursday1NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Thursday1NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Thursday1NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantThursday1Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareThursday1Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotThursday1Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantThursday1Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantThursday1Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareThursday1Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotThursday1Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantThursday1Noon'] = cant
# Friday1 night info:
head_nurse_want = Constrain.getShiftHeads(friday1date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(friday1date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(friday1date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(friday1date, 0, 3)
want = Constrain.getCrew(friday1date, 0, 1)
dont_care = Constrain.getCrew(friday1date, 0, 0)
prefer_not = Constrain.getCrew(friday1date, 0, 2)
cant = Constrain.getCrew(friday1date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(friday1date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(friday1date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(friday1date, 0, 3)
if assignBeforeHead:
template_variables['Friday1NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Friday1NighAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Friday1NighAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantFriday1Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareFriday1Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotFriday1Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantFriday1Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantFriday1Night'] = want
if dont_care:
template_variables['NurseWhoDontCareFriday1Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotFriday1Night'] = prefer_not
if cant:
template_variables['NurseWhoCantFriday1Night'] = cant
# Friday1 morning info:
head_nurse_want = Constrain.getShiftHeads(friday1date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(friday1date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(friday1date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(friday1date, 1, 3)
want = Constrain.getCrew(friday1date, 1, 1)
dont_care = Constrain.getCrew(friday1date, 1, 0)
prefer_not = Constrain.getCrew(friday1date, 1, 2)
cant = Constrain.getCrew(friday1date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(friday1date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(friday1date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(friday1date, 1, 3)
if assignBeforeHead:
template_variables['Friday1MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Friday1MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Friday1MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantFriday1Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareFriday1Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotFriday1Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantFriday1Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantFriday1Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareFriday1Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotFriday1Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantFriday1Morning'] = cant
# Friday1 noon info:
head_nurse_want = Constrain.getShiftHeads(friday1date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(friday1date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(friday1date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(friday1date, 2, 3)
want = Constrain.getCrew(friday1date, 2, 1)
dont_care = Constrain.getCrew(friday1date, 2, 0)
prefer_not = Constrain.getCrew(friday1date, 2, 2)
cant = Constrain.getCrew(friday1date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(friday1date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(friday1date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(friday1date, 2, 3)
if assignBeforeHead:
template_variables['Friday1NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Friday1NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Friday1NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantFriday1Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareFriday1Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotFriday1Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantFriday1Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantFriday1Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareFriday1Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotFriday1Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantFriday1Noon'] = cant
# Saturday1 night info:
head_nurse_want = Constrain.getShiftHeads(saturday1date, 0, 1)
head_nurse_dont_care = Constrain.getShiftHeads(saturday1date, 0, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(saturday1date, 0, 2)
head_nurse_cant = Constrain.getShiftHeads(saturday1date, 0, 3)
want = Constrain.getCrew(saturday1date, 0, 1)
dont_care = Constrain.getCrew(saturday1date, 0, 0)
prefer_not = Constrain.getCrew(saturday1date, 0, 2)
cant = Constrain.getCrew(saturday1date, 0, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(saturday1date, 0, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(saturday1date, 0, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(saturday1date, 0, 3)
if assignBeforeHead:
template_variables['Saturday1NightAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Saturday1NightAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Saturday1NightAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSaturday1Night'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSaturday1Night'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSaturday1Night'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSaturday1Night'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSaturday1Night'] = want
if dont_care:
template_variables['NurseWhoDontCareSaturday1Night'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSaturday1Night'] = prefer_not
if cant:
template_variables['NurseWhoCantSaturday1Night'] = cant
# Saturday1 morning info:
head_nurse_want = Constrain.getShiftHeads(saturday1date, 1, 1)
head_nurse_dont_care = Constrain.getShiftHeads(saturday1date, 1, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(saturday1date, 1, 2)
head_nurse_cant = Constrain.getShiftHeads(saturday1date, 1, 3)
want = Constrain.getCrew(saturday1date, 1, 1)
dont_care = Constrain.getCrew(saturday1date, 1, 0)
prefer_not = Constrain.getCrew(saturday1date, 1, 2)
cant = Constrain.getCrew(saturday1date, 1, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(saturday1date, 1, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(saturday1date, 1, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(saturday1date, 1, 3)
if assignBeforeHead:
template_variables['Saturday1MorningAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Saturday1MorningAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Saturday1MorningAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSaturday1Morning'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSaturday1Morning'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSaturday1Morning'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSaturday1Morning'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSaturday1Morning'] = want
if dont_care:
template_variables['NurseWhoDontCareSaturday1Morning'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSaturday1Morning'] = prefer_not
if cant:
template_variables['NurseWhoCantSaturday1Morning'] = cant
# Saturday1 noon info:
head_nurse_want = Constrain.getShiftHeads(saturday1date, 2, 1)
head_nurse_dont_care = Constrain.getShiftHeads(saturday1date, 2, 0)
head_nurse_prefer_not = Constrain.getShiftHeads(saturday1date, 2, 2)
head_nurse_cant = Constrain.getShiftHeads(saturday1date, 2, 3)
want = Constrain.getCrew(saturday1date, 2, 1)
dont_care = Constrain.getCrew(saturday1date, 2, 0)
prefer_not = Constrain.getCrew(saturday1date, 2, 2)
cant = Constrain.getCrew(saturday1date, 2, 3)
assignBeforeHead = PreparingSchedule.checkIfAssignAlready(saturday1date, 2, 0)
assignBeforeSecond = PreparingSchedule.checkIfAssignAlready(saturday1date, 2, 1)
assignBeforeStandBy = PreparingSchedule.checkIfAssignAlready(saturday1date, 2, 3)
if assignBeforeHead:
template_variables['Saturday1NoonAssignBeforeHead'] = assignBeforeHead
if assignBeforeSecond:
template_variables['Saturday1NoonAssignBeforeSecond'] = assignBeforeSecond
if assignBeforeStandBy:
template_variables['Saturday1NoonAssignBeforeStandBy'] = assignBeforeStandBy
if head_nurse_want:
template_variables['HeadNurseWhoWantSaturday1Noon'] = head_nurse_want
if head_nurse_dont_care:
template_variables['HeadNurseWhoDontCareSaturday1Noon'] = head_nurse_dont_care
if head_nurse_prefer_not:
template_variables['HeadNurseWhoPreferNotSaturday1Noon'] = head_nurse_prefer_not
if head_nurse_cant:
template_variables['HeadNurseWhoCantSaturday1Noon'] = head_nurse_cant
if want:
template_variables['NurseWhoWantSaturday1Noon'] = want
if dont_care:
template_variables['NurseWhoDontCareSaturday1Noon'] = dont_care
if prefer_not:
template_variables['NurseWhoPreferNotSaturday1Noon'] = prefer_not
if cant:
template_variables['NurseWhoCantSaturday1Noon'] = cant
html = template.render("web/templates/MainManager.html", template_variables)
self.response.write(html)
if not userName:
html = template.render("web/templates/LoginPage.html", template_variables)
self.response.write(html)
class SaveScheduleHandler(webapp2.RequestHandler):
def get(self):
selectedNurse_userName = self.request.get('selectedNurse_userName')
day = self.request.get('day')
shift = self.request.get('shift')
week = self.request.get('week')
rule = self.request.get('rule')
preparingSchedule = PreparingSchedule()
preparingSchedule.rule = int(rule)
preparingSchedule.nurseUserName = selectedNurse_userName
preparingSchedule.ShiftType = int(shift)
selectedDate = date.today()
selectedDate = selectedDate + timedelta(days = 14)
if(int(selectedDate.strftime("%U"))%2 == 0):
selectedDate = selectedDate - timedelta(days = 7)
if int(week) == 1:
selectedDate = selectedDate + timedelta(days = 7)
if selectedDate.weekday() != 6:
selectedDate = selectedDate - timedelta(days=(selectedDate.weekday()))
if selectedDate.weekday() == 6:
selectedDate = selectedDate + timedelta(days=1)
if int(day) == 6:
selectedDate = selectedDate - timedelta(days=1)
if int(day) != 6:
selectedDate = selectedDate + timedelta(days=int(day))
preparingSchedule.date = selectedDate
allreadyAssign = preparingSchedule.checkIfAssignAlready1(preparingSchedule.date, int(shift), int(rule))
if allreadyAssign:
allreadyAssign.key.delete()
if not preparingSchedule.checkLegalAssign_Same_Shift():
self.response.write("Illegal! Already assigned today")
if preparingSchedule.nurseUserName != "":
preparingSchedule.put()
return
if not preparingSchedule.checkLegalAssign_Night_After_Night():
self.response.write("Illegal! Night After Night")
if preparingSchedule.nurseUserName != "":
preparingSchedule.put()
return
if not preparingSchedule.checkLegalAssign_Noon_Morning_Night():
self.response.write("Illegal! Noon-Morning-Night")
if preparingSchedule.nurseUserName != "":
preparingSchedule.put()
return
if not preparingSchedule.checkLegalAssign_Following_Shifts():
self.response.write("Illegal! Following shifts ")
if preparingSchedule.nurseUserName != "":
preparingSchedule.put()
return
employee = Employee.getEmployeeByUserName(selectedNurse_userName)
sunday = date.today()
saturday = date.today()
if selectedDate.weekday() == 0:
sunday = selectedDate - timedelta(days=1)
saturday = selectedDate + timedelta(days=5)
if selectedDate.weekday() == 1:
sunday = selectedDate - timedelta(days=2)
saturday = selectedDate + timedelta(days=4)
if selectedDate.weekday() == 2:
sunday = selectedDate - timedelta(days=3)
saturday = selectedDate + timedelta(days=3)
if selectedDate.weekday() == 3:
sunday = selectedDate - timedelta(days=4)
saturday = selectedDate + timedelta(days=2)
if selectedDate.weekday() == 4:
sunday = selectedDate - timedelta(days=5)
saturday = selectedDate + timedelta(days=1)
if selectedDate.weekday() == 5:
sunday = selectedDate - timedelta(days=6)
saturday = selectedDate
if selectedDate.weekday() == 6:
sunday = selectedDate
saturday = selectedDate + timedelta(days=6)
if employee:
if(employee.percentJob == 66):
if not PreparingSchedule.checkLegalAssign_66_precent(employee.userName, sunday, saturday):
self.response.write("Illegal! 66% - 3 shifts already")
if preparingSchedule.nurseUserName != "":
preparingSchedule.put()
return
if(employee.percentJob == 88):
if not PreparingSchedule.checkLegalAssign_88_precent(employee.userName, sunday, saturday):
self.response.write("Illegal! 88% - 4 shifts already")
if preparingSchedule.nurseUserName != "":
preparingSchedule.put()
return
if(employee.percentJob == 100):
if not PreparingSchedule.checkLegalAssign_100_precent(employee.userName, sunday, saturday):
self.response.write("Illegal! 100% - 5 shifts already")
if preparingSchedule.nurseUserName != "":
preparingSchedule.put()
return
preparingSchedule.put()
constrain = Constrain.query(Constrain.employeeUN == preparingSchedule.nurseUserName, Constrain.constrainDate == preparingSchedule.date, Constrain.ShiftType == preparingSchedule.ShiftType).get()
if constrain:
self.response.write(json.dumps({'status':'OK','note': constrain.notes}))
elif not constrain:
self.response.write(json.dumps({'status':'OK','note':constrain}))
class SubmitScheduleHandler(webapp2.RequestHandler):
def get(self):
if not PreparingSchedule.checkLegalAssign_Assign_Head_Nurses():
self.response.write("Must assign all head nurses")
return
allNewAssignments = PreparingSchedule.Get_All_Assignments()
allOldAssignments = SubmittedShifts.Get_All_Assignments()
if allOldAssignments:
for a in allOldAssignments:
a.deleteItem()
if allNewAssignments:
for a in allNewAssignments:
submitted = SubmittedShifts()
submitted.date = a.date
submitted.ShiftType = a.ShiftType
submitted.nurseUserName = a.nurseUserName
submitted.rule = a.rule
submitted.put()
self.response.write(json.dumps({'status':'OK'}))
class checkSubmitSessionHandler(webapp2.RequestHandler):
def get(self):
isSubmitSession = Dates.submitSession()
self.response.write(json.dumps({'status':'OK','isSubmitSession':isSubmitSession}))
app = webapp2.WSGIApplication([
('/MainManager', MainHandler),
('/saveSchedule', SaveScheduleHandler),
('/submitSchedule', SubmitScheduleHandler),
('/check_submit_session', checkSubmitSessionHandler)
], debug=True)
| tzuria/Shift-It-Easy | webApp/shift-it-easy-2015/web/pages/MainManager.py | Python | mit | 87,797 | 0.038361 |
# Copyright (c) 2011, Peter Thatcher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from thirdparty import pyinotify
# Wow. watching ~ uses A LOT of CPU!
class Handler(pyinotify.ProcessEvent):
def process_default(self, event):
print ("default", event)
# also process_IN_CREATE and process_IN_DELETE
def process_IN_MODIFY(self, event):
print ("IN_MODIFY", event.path, event.name)
def process_IN_CREATE(self, event):
print ("IN_CREATE", event.path, event.name)
def process_IN_DELETE(self, event):
print ("IN_DELETE", event.path, event.name)
def on_loop(notifier):
print ("on_loop", notifier)
if __name__ == "__main__":
import sys
root = sys.argv[1]
# pyinotify.log.setLevel(10)
handler = Handler()
# Exclude patterns from list
excl_lst = [] # ['(^|*/).bibble']
excl = pyinotify.ExcludeFilter(excl_lst)
wm = pyinotify.WatchManager()
# first arg can be a list
# can use pyinotify.ALL_EVENTS
# rec=True means recursive. Must have!
wm.add_watch(root, pyinotify.IN_MODIFY | pyinotify.IN_CREATE | pyinotify.IN_DELETE,
rec=True, auto_add=True, exclude_filter=excl)
notifier = pyinotify.Notifier(wm, default_proc_fun=handler)
notifier.loop(callback = on_loop)
# if daemonize = True, spawns another process
# notifier.loop(daemonize=True, callback=on_loop,
# pid_file='/tmp/pyinotify.pid', stdout='/tmp/stdout.txt')
| pthatcher/psync | src/exp/watch.py | Python | bsd-3-clause | 2,850 | 0.005965 |
# Copyright (c) 2016, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from random import sample
from random import choice
from copy import deepcopy
import re
import pytest
from pynq import Overlay
from pynq.tests.util import user_answer_yes
from pynq.lib.logictools import LogicToolsController
from pynq.lib.logictools import BooleanGenerator
from pynq.lib.logictools.waveform import wave_to_bitstring
from pynq.lib.logictools import ARDUINO
from pynq.lib.logictools import PYNQZ1_LOGICTOOLS_SPECIFICATION
__author__ = "Yun Rock Qu"
__copyright__ = "Copyright 2016, Xilinx"
__email__ = "pynq_support@xilinx.com"
try:
ol = Overlay('logictools.bit', download=False)
flag0 = True
except IOError:
flag0 = False
flag1 = user_answer_yes("\nTest boolean generator?")
if flag1:
mb_info = ARDUINO
flag = flag0 and flag1
@pytest.mark.skipif(not flag, reason="need correct overlay to run")
def test_bool_state():
"""Test for the BooleanGenerator class.
This test will test configurations when all 5 pins of a LUT are
specified. Users need to manually check the output.
"""
ol.download()
input('\nDisconnect all the pins. Hit enter after done ...')
pin_dict = PYNQZ1_LOGICTOOLS_SPECIFICATION['traceable_outputs']
first_6_pins = [k for k in list(pin_dict.keys())[:6]]
out_pin = first_6_pins[5]
in_pins = first_6_pins[0:5]
or_expr = out_pin + '=' + ('|'.join(in_pins))
bool_generator = BooleanGenerator(mb_info)
assert bool_generator.status == 'RESET'
bool_generator.trace()
bool_generator.setup({'test_bool_state': or_expr})
assert bool_generator.status == 'READY'
bool_generator.run()
assert bool_generator.status == 'RUNNING'
print('Connect all of {} to GND ...'.format(in_pins))
assert user_answer_yes("{} outputs logic low?".format(out_pin)), \
"Boolean configurator fails to show logic low."
print('Connect any of {} to VCC ...'.format(in_pins))
assert user_answer_yes("{} outputs logic high?".format(out_pin)), \
"Boolean configurator fails to show logic high."
bool_generator.stop()
assert bool_generator.status == 'READY'
bool_generator.step()
assert bool_generator.status == 'RUNNING'
bool_generator.stop()
assert bool_generator.status == 'READY'
bool_generator.reset()
assert bool_generator.status == 'RESET'
del bool_generator
@pytest.mark.skipif(not flag, reason="need correct overlay to run")
def test_bool_no_trace():
"""Test for the BooleanGenerator class.
This test will test whether users can show waveform when no trace analyzer
is used. An exception should be raised.
"""
ol.download()
bool_generator = BooleanGenerator(mb_info)
bool_generator.trace(use_analyzer=False)
exception_raised = False
try:
bool_generator.show_waveform()
except ValueError:
exception_raised = True
assert exception_raised, 'Should raise exception for show_waveform().'
bool_generator.reset()
del bool_generator
@pytest.mark.skipif(not flag, reason="need correct overlay to run")
def test_bool_multiple():
"""Test for the BooleanGenerator class.
This test will test the configurations when only part of the
LUT pins are used. Multiple instances will be tested.
This is an automatic test so no user interaction is needed.
"""
ol.download()
pin_dict = PYNQZ1_LOGICTOOLS_SPECIFICATION['traceable_outputs']
first_10_pins = [k for k in list(pin_dict.keys())[:10]]
in_pins = first_10_pins[0:5]
out_pins = first_10_pins[5:10]
test_expressions = list()
operations = ['&', '|', '^']
for i in range(5):
operation = choice(operations)
test_expressions.append(out_pins[i] + '=' +
(operation.join(sample(in_pins, i+1))))
print('\nConnect randomly {} to VCC or GND.'.format(in_pins))
input('Hit enter after done ...')
bool_generator = BooleanGenerator(mb_info)
bool_generator.trace()
bool_generator.setup(expressions=test_expressions)
bool_generator.run()
for expr_label in bool_generator.expressions.keys():
waveform = bool_generator.waveforms[expr_label]
wavelanes_in = waveform.waveform_dict['signal'][0][1:]
wavelanes_out = waveform.waveform_dict['signal'][-1][1:]
expr = deepcopy(bool_generator.expressions[expr_label])
for wavelane in wavelanes_in:
if 'h' == wavelane['wave'][0]:
str_replace = '1'
elif 'l' == wavelane['wave'][0]:
str_replace = '0'
else:
raise ValueError("Unrecognizable pattern captured.")
expr = re.sub(r"\b{}\b".format(wavelane['name']),
str_replace, expr)
wavelane = wavelanes_out[0]
if 'h' == wavelane['wave'][0]:
str_replace = '1'
elif 'l' == wavelane['wave'][0]:
str_replace = '0'
else:
raise ValueError("Unrecognizable pattern captured.")
expr = re.sub(r"\b{}\b".format(wavelane['name']),
str_replace, expr)
expr = expr.replace('=', '==')
assert eval(expr), "Boolean expression {} not evaluating " \
"correctly.".format(
bool_generator.expressions[expr_label])
bool_generator.stop()
bool_generator.reset()
del bool_generator
@pytest.mark.skipif(not flag, reason="need correct overlay to run")
def test_bool_step():
"""Test for the BooleanGenerator class.
This test will test whether the `step()` method works correctly.
Users will be asked to change input values during the test. The test
scenario is also an extreme case where only 2 samples are captured.
"""
ol.download()
pin_dict = PYNQZ1_LOGICTOOLS_SPECIFICATION['traceable_outputs']
first_10_pins = [k for k in list(pin_dict.keys())[:10]]
in_pins = first_10_pins[0:5]
out_pins = first_10_pins[5:10]
test_expressions = list()
operations = ['&', '|', '^']
for i in range(5):
operation = choice(operations)
test_expressions.append(out_pins[i] + '=' +
(operation.join(sample(in_pins, i+1))))
print('\nConnect randomly {} to VCC or GND.'.format(in_pins))
input('Hit enter after done ...')
bool_generator = BooleanGenerator(mb_info)
bool_generator.trace(num_analyzer_samples=2)
bool_generator.setup(expressions=test_expressions)
for i in range(2):
print('Change some of the connections from {}.'.format(in_pins))
input('Hit enter after done ...')
bool_generator.step()
for expr_label in bool_generator.expressions.keys():
waveform = bool_generator.waveforms[expr_label]
wavelanes_in = waveform.waveform_dict['signal'][0][1:]
wavelanes_out = waveform.waveform_dict['signal'][-1][1:]
expr = deepcopy(bool_generator.expressions[expr_label])
for wavelane in wavelanes_in:
wavelane_bitstring = wave_to_bitstring(wavelane['wave'])
str_replace = wavelane_bitstring[i]
expr = re.sub(r"\b{}\b".format(wavelane['name']),
str_replace, expr)
wavelane = wavelanes_out[0]
wavelane_bitstring = wave_to_bitstring(wavelane['wave'])
str_replace = wavelane_bitstring[i]
expr = re.sub(r"\b{}\b".format(wavelane['name']),
str_replace, expr)
expr = expr.replace('=', '==')
assert eval(expr), "Boolean expression {} not evaluating " \
"correctly in step {}.".format(
bool_generator.expressions[expr_label], i)
bool_generator.stop()
bool_generator.reset()
del bool_generator
@pytest.mark.skipif(not flag, reason="need correct overlay to run")
def test_bool_zero_inputs():
"""Test for the BooleanGenerator class.
This test will test whether 0-input expressions are accepted.
"""
ol.download()
pin_dict = PYNQZ1_LOGICTOOLS_SPECIFICATION['traceable_outputs']
first_1_pin = list(pin_dict.keys())[0]
expr_no_input = first_1_pin + '='
bool_generator = BooleanGenerator(mb_info)
exception_raised = False
try:
bool_generator.trace()
bool_generator.setup(expressions=[expr_no_input])
except ValueError:
exception_raised = True
assert exception_raised, 'Should raise exception if function has 0 input.'
bool_generator.reset()
del bool_generator
@pytest.mark.skipif(not flag, reason="need correct overlay to run")
def test_bool_six_inputs():
"""Test for the BooleanGenerator class.
This test will test whether 6-input expressions are accepted.
"""
ol.download()
pin_dict = PYNQZ1_LOGICTOOLS_SPECIFICATION['traceable_outputs']
first_1_pin = list(pin_dict.keys())[0]
next_6_pins = [k for k in list(pin_dict.keys())[1:7]]
expr_6_inputs = first_1_pin + '=' + ('&'.join(next_6_pins))
bool_generator = BooleanGenerator(mb_info)
exception_raised = False
try:
bool_generator.trace()
bool_generator.setup(expressions=[expr_6_inputs])
except ValueError:
exception_raised = True
assert exception_raised, 'Should raise exception if function has 6 inputs.'
bool_generator.reset()
del bool_generator
@pytest.mark.skipif(not flag, reason="need correct overlay to run")
def test_bool_zero_outputs():
"""Test for the BooleanGenerator class.
This test will test whether 0-output expressions are accepted.
"""
ol.download()
pin_dict = PYNQZ1_LOGICTOOLS_SPECIFICATION['traceable_outputs']
first_1_pin = list(pin_dict.keys())[0]
expr_no_rhs = first_1_pin
bool_generator = BooleanGenerator(mb_info)
exception_raised = False
try:
bool_generator.trace()
bool_generator.setup(expr_no_rhs)
except ValueError:
exception_raised = True
assert exception_raised, 'Should raise exception if function has no RHS.'
bool_generator.reset()
del bool_generator
@pytest.mark.skipif(not flag, reason="need correct overlay to run")
def test_bool_max_num_expr():
"""Test for the BooleanGenerator class.
This test will implement a maximum number of boolean generators,
each having 1 input. For example, PYNQ-Z1 has 20 pins for Arduino header,
so 19 boolean generators will be implemented, each having 1 output
assigned to 1 pin. All the generators share the same input pin.
"""
ol.download()
pin_dict = PYNQZ1_LOGICTOOLS_SPECIFICATION['traceable_outputs']
interface_width = PYNQZ1_LOGICTOOLS_SPECIFICATION['interface_width']
all_pins = [k for k in list(pin_dict.keys())[:interface_width]]
num_expressions = interface_width - 1
in_pin = all_pins[0]
out_pins = all_pins[1:]
test_expressions = list()
for i in range(num_expressions):
test_expressions.append(out_pins[i] + '=' + in_pin)
print("")
bool_generator = BooleanGenerator(mb_info)
for voltage in ['VCC', 'GND']:
print('Disconnect all the pins. Connect only {} to {}.'.format(
in_pin, voltage))
input('Press enter when done ...')
bool_generator.trace()
bool_generator.setup(expressions=test_expressions)
bool_generator.run()
for expr_label in bool_generator.expressions.keys():
waveform = bool_generator.waveforms[expr_label]
wavelanes_in = waveform.waveform_dict['signal'][0][1:]
wavelanes_out = waveform.waveform_dict['signal'][-1][1:]
expr = deepcopy(bool_generator.expressions[expr_label])
wavelane = wavelanes_in[0]
wavelane_bitstring = wave_to_bitstring(wavelane['wave'])
str_replace = wavelane_bitstring[0]
expr = re.sub(r"\b{}\b".format(wavelane['name']),
str_replace, expr)
wavelane = wavelanes_out[0]
wavelane_bitstring = wave_to_bitstring(wavelane['wave'])
str_replace = wavelane_bitstring[0]
expr = re.sub(r"\b{}\b".format(wavelane['name']),
str_replace, expr)
expr = expr.replace('=', '==')
assert eval(expr), "Boolean expression {} not evaluating " \
"correctly.".format(
bool_generator.expressions[expr_label])
bool_generator.stop()
bool_generator.reset()
del bool_generator
| schelleg/PYNQ | pynq/lib/logictools/tests/test_boolean_generator.py | Python | bsd-3-clause | 14,127 | 0.000354 |
# -*- coding: utf-8 -*-
"""
@brief test log(time=92s)
"""
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder, add_missing_development_version
import ensae_teaching_cs
class TestNotebookRunner1a_soft_sql(unittest.TestCase):
def setUp(self):
add_missing_development_version(["pymyinstall", "pyensae", "pymmails", "jyquickhelper", "mlstatpy"],
__file__, hide=True)
def test_notebook_runner_soft_sql(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
temp = get_temp_folder(__file__, "temp_notebook1a_soft_sql")
from ensae_teaching_cs.automation.notebook_test_helper import ls_notebooks, execute_notebooks, clean_function_1a
keepnote = ls_notebooks("td1a_soft")
for n in keepnote:
fLOG(n)
execute_notebooks(temp, keepnote,
lambda i, n: "csharp" not in n and "cython" not in n,
fLOG=fLOG,
clean_function=clean_function_1a,
dump=ensae_teaching_cs)
if __name__ == "__main__":
unittest.main()
| sdpython/ensae_teaching_cs | _unittests/ut_dnotebooks/test_1A_notebook_soft_sql.py | Python | mit | 1,241 | 0.002417 |
import os
IGNORE = (
"/test/",
"/tests/gtests/",
"/BSP_GhostTest/",
"/release/",
"/xembed/",
"/TerraplayNetwork/",
"/ik_glut_test/",
# specific source files
"extern/Eigen2/Eigen/src/Cholesky/CholeskyInstantiations.cpp",
"extern/Eigen2/Eigen/src/Core/CoreInstantiations.cpp",
"extern/Eigen2/Eigen/src/QR/QrInstantiations.cpp",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btInternalEdgeUtility.cpp",
"extern/bullet2/src/BulletCollision/CollisionShapes/btBox2dShape.cpp",
"extern/bullet2/src/BulletCollision/CollisionShapes/btConvex2dShape.cpp",
"extern/bullet2/src/BulletDynamics/Character/btKinematicCharacterController.cpp",
"extern/bullet2/src/BulletDynamics/ConstraintSolver/btHinge2Constraint.cpp",
"extern/bullet2/src/BulletDynamics/ConstraintSolver/btUniversalConstraint.cpp",
"extern/eltopo/common/meshes/ObjLoader.cpp",
"extern/eltopo/common/meshes/meshloader.cpp",
"extern/eltopo/common/openglutils.cpp",
"extern/eltopo/eltopo3d/broadphase_blenderbvh.cpp",
"source/blender/imbuf/intern/imbuf_cocoa.m",
"extern/recastnavigation/Recast/Source/RecastLog.cpp",
"extern/recastnavigation/Recast/Source/RecastTimer.cpp",
"intern/audaspace/SRC/AUD_SRCResampleFactory.cpp",
"intern/audaspace/SRC/AUD_SRCResampleReader.cpp",
"intern/cycles/render/film_response.cpp",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_2_2.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_2_3.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_2_4.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_2_d.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_3_3.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_3_4.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_3_9.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_3_d.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_4_3.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_4_4.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_2_4_d.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_4_4_2.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_4_4_3.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_4_4_4.cc",
"extern/libmv/third_party/ceres/internal/ceres/generated/schur_eliminator_4_4_d.cc",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.h",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.h",
"extern/bullet2/src/BulletCollision/CollisionDispatch/btInternalEdgeUtility.h",
"extern/bullet2/src/BulletCollision/CollisionShapes/btBox2dShape.h",
"extern/bullet2/src/BulletCollision/CollisionShapes/btConvex2dShape.h",
"extern/bullet2/src/BulletDynamics/Character/btKinematicCharacterController.h",
"extern/bullet2/src/BulletDynamics/ConstraintSolver/btHinge2Constraint.h",
"extern/bullet2/src/BulletDynamics/ConstraintSolver/btUniversalConstraint.h",
"extern/eltopo/common/meshes/Edge.hpp",
"extern/eltopo/common/meshes/ObjLoader.hpp",
"extern/eltopo/common/meshes/TriangleIndex.hpp",
"extern/eltopo/common/meshes/meshloader.h",
"extern/eltopo/eltopo3d/broadphase_blenderbvh.h",
"extern/recastnavigation/Recast/Include/RecastLog.h",
"extern/recastnavigation/Recast/Include/RecastTimer.h",
"intern/audaspace/SRC/AUD_SRCResampleFactory.h",
"intern/audaspace/SRC/AUD_SRCResampleReader.h",
"intern/cycles/render/film_response.h",
"extern/carve/include/carve/config.h",
"extern/carve/include/carve/external/boost/random.hpp",
"extern/carve/patches/files/config.h",
"extern/carve/patches/files/random.hpp",
)
UTF8_CHECK = True
SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(os.path.dirname(__file__), "..", ".."))))
# doesn't have to exist, just use as reference
BUILD_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(SOURCE_DIR, "..", "build"))))
| pawkoz/dyplom | blender/build_files/cmake/cmake_consistency_check_config.py | Python | gpl-2.0 | 4,572 | 0.006124 |
"""linter_test_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| David-Wobrock/django-fake-database-backends | tests/test_project/test_project/urls.py | Python | mit | 776 | 0 |
##
## This file is part of the libsigrok project.
##
## Copyright (C) 2014 Martin Ling <martin-sigrok@earth.li>
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
from __future__ import print_function
from xml.etree import ElementTree
import sys, os
language, input_file = sys.argv[1:3]
if len(sys.argv) == 4:
mode = sys.argv[3]
input_dir = os.path.dirname(input_file)
index = ElementTree.parse(input_file)
def get_text(node):
paras = node.findall('para')
return str.join('\n\n', [p.text.rstrip() for p in paras if p.text])
for compound in index.findall('compound'):
if compound.attrib['kind'] != 'class':
continue
class_name = compound.find('name').text
if not class_name.startswith('sigrok::'):
continue
trimmed_name = class_name.split('::')[1]
doc = ElementTree.parse("%s/%s.xml" % (input_dir, compound.attrib['refid']))
cls = doc.find('compounddef')
brief = get_text(cls.find('briefdescription'))
if brief:
if language == 'python':
print('%%feature("docstring") %s "%s";' % (class_name, brief))
elif language == 'java':
print('%%typemap(javaclassmodifiers) %s "/** %s */\npublic class"' % (
class_name, brief))
constants = []
for section in cls.findall('sectiondef'):
kind = section.attrib['kind']
if kind not in ('public-func', 'public-static-attrib'):
continue
for member in section.findall('memberdef'):
member_name = member.find('name').text
brief = get_text(member.find('briefdescription')).replace('"', '\\"')
parameters = {}
for para in member.find('detaileddescription').findall('para'):
paramlist = para.find('parameterlist')
if paramlist is not None:
for param in paramlist.findall('parameteritem'):
namelist = param.find('parameternamelist')
name = namelist.find('parametername').text
description = get_text(param.find('parameterdescription'))
if description:
parameters[name] = description
if brief:
if language == 'python' and kind == 'public-func':
print(str.join('\n', [
'%%feature("docstring") %s::%s "%s' % (
class_name, member_name, brief)] + [
'@param %s %s' % (name, desc)
for name, desc in parameters.items()]) + '";')
elif language == 'java' and kind == 'public-func':
print(str.join('\n', [
'%%javamethodmodifiers %s::%s "/** %s' % (
class_name, member_name, brief)] + [
' * @param %s %s' % (name, desc)
for name, desc in parameters.items()])
+ ' */\npublic"')
elif kind == 'public-static-attrib':
constants.append((member_name, brief))
if language == 'java' and constants:
print('%%typemap(javacode) %s %%{' % class_name)
for member_name, brief in constants:
print(' /** %s */\n public static final %s %s = new %s(classesJNI.%s_%s_get(), false);\n' % (
brief, trimmed_name, member_name, trimmed_name,
trimmed_name, member_name))
print('%}')
elif language == 'python' and constants:
if mode == 'start':
print('%%extend %s {\n%%pythoncode %%{' % class_name)
for member_name, brief in constants:
print(' ## @brief %s\n %s = None' % (brief, member_name))
print('%}\n}')
elif mode == 'end':
print('%pythoncode %{')
for member_name, brief in constants:
print('%s.%s.__doc__ = """%s"""' % (
trimmed_name, member_name, brief))
print('%}')
| mtitinger/libsigrok | bindings/swig/doc.py | Python | gpl-3.0 | 4,655 | 0.005371 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for storing and getting objects from datastore.
This module provides Get, Set and Delete functions for storing pickleable
objects in datastore, with support for large objects greater than 1 MB.
Although this module contains ndb.Model classes, these are not intended
to be used directly by other modules.
App Engine datastore limits entity size to less than 1 MB; this module
supports storing larger objects by splitting the data and using multiple
datastore entities and multiple memcache keys. Using ndb.get and pickle, a
complex data structure can be retrieved more quickly than datastore fetch.
Example:
john = Account()
john.username = 'John'
john.userid = 123
stored_object.Set(john.userid, john)
"""
import cPickle as pickle
import logging
from google.appengine.api import memcache
from google.appengine.ext import ndb
_MULTIPART_ENTITY_MEMCACHE_KEY = 'multipart_entity_'
# Maximum number of entities and memcache to save a value.
# The limit for data stored in one datastore entity is 1 MB,
# and the limit for memcache batch operations is 32 MB. See:
# https://cloud.google.com/appengine/docs/python/memcache/#Python_Limits
_MAX_NUM_PARTS = 16
# Max bytes per entity or value cached with memcache.
_CHUNK_SIZE = 1000 * 1000
def Get(key):
"""Gets the value.
Args:
key: String key value.
Returns:
A value for key.
"""
results = MultipartCache.Get(key)
if not results:
results = _GetValueFromDatastore(key)
MultipartCache.Set(key, results)
return results
def Set(key, value):
"""Sets the value in datastore and memcache with limit of '_MAX_NUM_PARTS' MB.
Args:
key: String key value.
value: A pickleable value to be stored limited at '_MAX_NUM_PARTS' MB.
"""
entity = ndb.Key(MultipartEntity, key).get()
if not entity:
entity = MultipartEntity(id=key)
entity.SetData(value)
entity.Save()
MultipartCache.Set(key, value)
def Delete(key):
"""Deletes the value in datastore and memcache."""
ndb.Key(MultipartEntity, key).delete()
MultipartCache.Delete(key)
class MultipartEntity(ndb.Model):
"""Container for PartEntity."""
# Number of entities use to store serialized.
size = ndb.IntegerProperty(default=0, indexed=False)
@classmethod
def _post_get_hook(cls, key, future): # pylint: disable=unused-argument
"""Deserializes data from multiple PartEntity."""
entity = future.get_result()
if entity is None or not entity.size:
return
string_id = entity.key.string_id()
part_keys = [ndb.Key(MultipartEntity, string_id, PartEntity, i + 1)
for i in xrange(entity.size)]
part_entities = ndb.get_multi(part_keys)
serialized = ''.join(p.value for p in part_entities if p is not None)
entity.SetData(pickle.loads(serialized))
@classmethod
def _pre_delete_hook(cls, key):
"""Deletes PartEntity entities."""
part_keys = PartEntity.query(ancestor=key).fetch(keys_only=True)
ndb.delete_multi(part_keys)
def Save(self):
"""Stores serialized data over multiple PartEntity."""
serialized_parts = _Serialize(self.GetData())
if len(serialized_parts) > _MAX_NUM_PARTS:
logging.error('Max number of parts reached.')
return
part_list = []
num_parts = len(serialized_parts)
for i in xrange(num_parts):
if serialized_parts[i] is not None:
part = PartEntity(id=i + 1, parent=self.key, value=serialized_parts[i])
part_list.append(part)
self.size = num_parts
ndb.put_multi(part_list + [self])
def GetData(self):
return getattr(self, '_data', None)
def SetData(self, data):
setattr(self, '_data', data)
class PartEntity(ndb.Model):
"""Holds a part of serialized data for MultipartEntity.
This entity key has the form:
ndb.Key('MultipartEntity', multipart_entity_id, 'PartEntity', part_index)
"""
value = ndb.BlobProperty()
class MultipartCache(object):
"""Contains operations for storing values over multiple memcache keys.
Values are serialized, split, and stored over multiple memcache keys. The
head cache stores the expected size.
"""
@classmethod
def Get(cls, key):
"""Gets value in memcache."""
keys = cls._GetCacheKeyList(key)
head_key = cls._GetCacheKey(key)
cache_values = memcache.get_multi(keys)
# Whether we have all the memcache values.
if len(keys) != len(cache_values) or head_key not in cache_values:
return None
serialized = ''
cache_size = cache_values[head_key]
keys.remove(head_key)
for key in keys[:cache_size]:
if key not in cache_values:
return None
if cache_values[key] is not None:
serialized += cache_values[key]
return pickle.loads(serialized)
@classmethod
def Set(cls, key, value):
"""Sets a value in memcache."""
serialized_parts = _Serialize(value)
if len(serialized_parts) > _MAX_NUM_PARTS:
logging.error('Max number of parts reached.')
return
cached_values = {}
cached_values[cls._GetCacheKey(key)] = len(serialized_parts)
for i in xrange(len(serialized_parts)):
cached_values[cls._GetCacheKey(key, i)] = serialized_parts[i]
memcache.set_multi(cached_values)
@classmethod
def Delete(cls, key):
"""Deletes all cached values for key."""
memcache.delete_multi(cls._GetCacheKeyList(key))
@classmethod
def _GetCacheKeyList(cls, key):
"""Gets a list of head cache key and cache key parts."""
keys = [cls._GetCacheKey(key, i) for i in xrange(_MAX_NUM_PARTS)]
keys.append(cls._GetCacheKey(key))
return keys
@classmethod
def _GetCacheKey(cls, key, index=None):
"""Returns either head cache key or cache key part."""
if index is not None:
return _MULTIPART_ENTITY_MEMCACHE_KEY + '%s.%s' % (key, index)
return _MULTIPART_ENTITY_MEMCACHE_KEY + key
def _GetValueFromDatastore(key):
entity = ndb.Key(MultipartEntity, key).get()
if not entity:
return None
return entity.GetData()
def _Serialize(value):
"""Serializes value and returns a list of its parts.
Args:
value: A pickleable value.
Returns:
A list of string representation of the value that has been pickled and split
into _CHUNK_SIZE.
"""
serialized = pickle.dumps(value, 2)
length = len(serialized)
values = []
for i in xrange(0, length, _CHUNK_SIZE):
values.append(serialized[i:i + _CHUNK_SIZE])
for i in xrange(len(values), _MAX_NUM_PARTS):
values.append(None)
return values
| SummerLW/Perf-Insight-Report | dashboard/dashboard/stored_object.py | Python | bsd-3-clause | 6,638 | 0.008888 |
def extractLittlebambooHomeBlog(item):
'''
Parser for 'littlebamboo.home.blog'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('FW', 'Fortunate Wife', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractLittlebambooHomeBlog.py | Python | bsd-3-clause | 622 | 0.028939 |
#@+leo-ver=5-thin
#@+node:2014spring.20140628104046.1746: * @file openshiftlibs.py
#@@language python
#@@tabwidth -4
#@+others
#@+node:2014spring.20140628104046.1747: ** openshiftlibs declarations
#!/usr/bin/env python
import hashlib, inspect, os, random, sys
#@+node:2014spring.20140628104046.1748: ** get_openshift_secret_token
# Gets the secret token provided by OpenShift
# or generates one (this is slightly less secure, but good enough for now)
def get_openshift_secret_token():
token = os.getenv('OPENSHIFT_SECRET_TOKEN')
name = os.getenv('OPENSHIFT_APP_NAME')
uuid = os.getenv('OPENSHIFT_APP_UUID')
if token is not None:
return token
elif (name is not None and uuid is not None):
return hashlib.sha256(name.encode('utf-8') + '-'.encode('utf-8') + uuid.encode('utf-8')).hexdigest()
return None
#@+node:2014spring.20140628104046.1749: ** openshift_secure
# Loop through all provided variables and generate secure versions
# If not running on OpenShift, returns defaults and logs an error message
#
# This function calls secure_function and passes an array of:
# {
# 'hash': generated sha hash,
# 'variable': name of variable,
# 'original': original value
# }
def openshift_secure(default_keys, secure_function = 'make_secure_key'):
# Attempts to get secret token
my_token = get_openshift_secret_token()
# Only generate random values if on OpenShift
my_list = default_keys
if my_token is not None:
# Loop over each default_key and set the new value
for key, value in default_keys.items():
# Create hash out of token and this key's name
sha = hashlib.sha256(my_token.encode('utf-8') + '-'.encode('utf-8') + key.encode('utf-8')).hexdigest()
# Pass a dictionary so we can add stuff without breaking existing calls
vals = { 'hash': sha, 'variable': key, 'original': value }
# Call user specified function or just return hash
my_list[key] = sha
if secure_function is not None:
# Pick through the global and local scopes to find the function.
possibles = globals().copy()
possibles.update(locals())
supplied_function = possibles.get(secure_function)
if not supplied_function:
raise Exception("Cannot find supplied security function")
else:
my_list[key] = supplied_function(vals)
else:
calling_file = inspect.stack()[1][1]
if os.getenv('OPENSHIFT_REPO_DIR'):
base = os.getenv('OPENSHIFT_REPO_DIR')
calling_file.replace(base,'')
sys.stderr.write("OPENSHIFT WARNING: Using default values for secure variables, please manually modify in " + calling_file + "\n")
return my_list
#@+node:2014spring.20140628104046.1750: ** make_secure_key
# This function transforms default keys into per-deployment random keys;
def make_secure_key(key_info):
hashcode = key_info['hash']
key = key_info['variable']
original = key_info['original']
# These are the legal password characters
# as per the Django source code
# (django/contrib/auth/models.py)
chars = 'abcdefghjkmnpqrstuvwxyz'
chars += 'ABCDEFGHJKLMNPQRSTUVWXYZ'
chars += '23456789'
# Use the hash to seed the RNG
random.seed(int("0x" + hashcode[:8], 0))
# Create a random string the same length as the default
rand_key = ''
for _ in range(len(original)):
rand_pos = random.randint(0,len(chars))
rand_key += chars[rand_pos:(rand_pos+1)]
# Reset the RNG
random.seed()
# Set the value
return rand_key
#@-others
#@-leo
| coursemdetw/2015wcms | wsgi/openshift/openshiftlibs.py | Python | gpl-2.0 | 3,730 | 0.008043 |
#! usr/bin/python3
# -*- coding: utf8 -*-
import datetime
import json
import os
from flask_script import Command
from scripts.users_export_to_json import json_user_file
from application import db
from application.flicket.models.flicket_user import FlicketUser
class JsonUser:
def __init__(self, username, name, email, password):
self.username = username
self.name = name
self.email = email
self.name = name
self.password = password
class ImportUsersFromJson(Command):
"""
Command used by manage.py to import users from a json file formatted such:
[
{ username, name, email, password.
]
"""
@staticmethod
def run():
# check if file exists
if not os.path.isfile(json_user_file):
print('Could not find json file "{}". Exiting ....'.format(json_user_file))
exit()
# read json file
with open(json_user_file) as data_file:
json_users = json.load(data_file)
# check formatting of json file
valid_json_fields = ['username', 'name', 'email', 'password']
for user in json_users:
if not all(f in user for f in valid_json_fields):
print('json file not formatted correctly. Exiting.')
exit()
# add users to database.
for user in json_users:
# encode password to bytes
password = str.encode(user['password'])
# create json_user object
json_user = JsonUser(user['username'], user['name'], user['email'], password)
# check tht user doesn't already exist.
existing_user = FlicketUser.query.filter_by(email=json_user.email)
if existing_user.count() > 0:
print('User {} {} already exists in the database.'.format(json_user.name, json_user.email))
continue
# add the user
print('Adding the user {} {} to the database.'.format(json_user.name, json_user.email))
new_user = FlicketUser(username=json_user.username, name=json_user.name, email=json_user.email,
password=json_user.password, date_added=datetime.datetime.now())
db.session.add(new_user)
db.session.commit()
| evereux/flicket | scripts/users_import_from_json.py | Python | mit | 2,390 | 0.00251 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "RandomPasswordGenerator.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| AgapiGit/RandomPasswordGenerator | RandomPasswordGenerator/manage.py | Python | mit | 843 | 0.001186 |
from datetime import datetime
import logging
import sys
from django.conf import settings
log = logging.getLogger('agro.sources')
tree_modules_to_try = [ "xml.etree.cElementTree", "elementtree.ElementTree", "cElementTree", ]
element_tree = None
for tree in tree_modules_to_try:
try:
try:
element_tree = __import__('%s' % tree, {}, {}, [''], -1)
except:
element_tree = __import__('%s' % tree, {}, {}, [''])
break
except ImportError, e:
continue
except Exception, e:
log.error("%s" % e)
raise
if element_tree is None:
raise ImportError("No ElementTree found.")
log.debug("Using specified etree module: %s" % element_tree)
def import_source_modules(source_list=settings.AGRO_SETTINGS['source_list'], class_name=''):
sources = []
for source in source_list:
try:
log.debug('trying to load %s' % source)
try:
s = __import__("agro.sources.%s" % source, {}, {}, ['%s%s' % (source, class_name)], -1)
except:
s = __import__("agro.sources.%s" % source, {}, {}, ['%s%s' % (source, class_name)])
if s:
sources.append(s)
except Exception, e:
log.error('unable to load %s: %s', source, e)
return sources
| camflan/agro | agro/sources/__init__.py | Python | bsd-3-clause | 1,319 | 0.006823 |
# -*- coding: utf-8 -*-
"""
@file
@brief Customer notebook exporters.
"""
import os
from textwrap import indent
from traitlets import default
from traitlets.config import Config
from jinja2 import DictLoader
from nbconvert.exporters import RSTExporter
from nbconvert.filters.pandoc import convert_pandoc
def convert_pandoc_rst(source, from_format, to_format, extra_args=None):
"""
Overwrites `convert_pandoc
<https://github.com/jupyter/nbconvert/blob/master/nbconvert/filters/pandoc.py>`_.
@param source string to convert
@param from_format from format
@param to_format to format
@param extra_args extra arguments
@return results
"""
return convert_pandoc(source, from_format, to_format, extra_args=None)
def process_raw_html(source, extra_args=None):
"""
Replaces the output of
`add_menu_notebook
<http://www.xavierdupre.fr/app/jyquickhelper/helpsphinx/jyquickhelper/
helper_in_notebook.html#jyquickhelper.helper_in_notebook.add_notebook_menu>`_
by:
::
.. contents::
:local:
"""
if source is None:
return source # pragma: no cover
if 'var update_menu = function() {' in source:
return "\n\n.. contents::\n :local:\n\n"
return "\n\n.. raw:: html\n\n" + indent(source, prefix=' ')
class UpgradedRSTExporter(RSTExporter):
"""
Exports :epkg:`rst` documents.
Overwrites `RSTExporter <https://github.com/jupyter/
nbconvert/blob/master/nbconvert/exporters/rst.py>`_.
* It replaces `convert_pandoc <https://github.com/jupyter/
nbconvert/blob/master/nbconvert/filters/pandoc.py>`_
by @see fn convert_pandoc_rst.
* It converts :epkg:`svg` into :epkg:`png` if possible,
see @see fn process_raw_html.
* It replaces some known :epkg:`javascript`. The output of function
`add_menu_notebook <http://www.xavierdupre.fr/app/jyquickhelper/helpsphinx/jyquickhelper/
helper_in_notebook.html#jyquickhelper.helper_in_notebook.add_notebook_menu>`_
is replaced by ``.. contents::``.
.. index:: notebook export, nbconvert
It extends the template
`rst.tpl <https://github.com/jupyter/nbconvert/blob/master/nbconvert/templates/rst.tpl>`_.
New template is `rst_modified.tpl <https://github.com/sdpython/pyquickhelper/blob/master/
src/pyquickhelper/helpgen/rst_modified.tpl>`_.
It follows the hints given at
`Programatically creating templates
<https://nbconvert.readthedocs.io/en/latest/
nbconvert_library.html#Programatically-creating-templates>`_.
:epkg:`jyquickhelper` should add a string highly recognizable when adding a menu.
"""
def __init__(self, *args, **kwargs):
"""
Overwrites the extra loaders to get the right template.
"""
filename = os.path.join(os.path.dirname(__file__), 'rst_modified.tpl')
with open(filename, 'r', encoding='utf-8') as f:
content = f.read()
filename = os.path.join(os.path.dirname(__file__), 'rst.tpl')
with open(filename, 'r', encoding='utf-8') as f:
content2 = f.read()
dl = DictLoader({'rst_modified.tpl': content, 'rst.tpl': content2})
kwargs['extra_loaders'] = [dl]
RSTExporter.__init__(self, *args, **kwargs)
def default_filters(self):
"""
Overrides in subclasses to provide extra filters.
This should return an iterable of 2-tuples: (name, class-or-function).
You should call the method on the parent class and include the filters
it provides.
If a name is repeated, the last filter provided wins. Filters from
user-supplied config win over filters provided by classes.
"""
for k, v in RSTExporter.default_filters(self):
yield (k, v)
yield ('convert_pandoc_rst', convert_pandoc_rst)
yield ('process_raw_html', process_raw_html)
output_mimetype = 'text/restructuredtext'
export_from_notebook = "reST"
@default('template_file')
def _template_file_default(self):
return "rst_modified.tpl"
@default('file_extension')
def _file_extension_default(self):
return '.rst'
@default('template_name')
def _template_name_default(self):
return 'rst'
@property
def default_config(self):
c = Config({
'ExtractOutputPreprocessor': {
'enabled': True,
'output_filename_template': '{unique_key}_{cell_index}_{index}{extension}'
},
'HighlightMagicsPreprocessor': {
'enabled': True
},
})
c.merge(super(UpgradedRSTExporter, self).default_config)
return c
| sdpython/pyquickhelper | src/pyquickhelper/helpgen/notebook_exporter.py | Python | mit | 4,770 | 0.001048 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import math
import numpy as np
from scipy.interpolate import interp1d
def _avgdiff(x):
dx = np.diff(x)
dx2 = np.zeros_like(x)
dx2[0], dx2[-1] = dx[0], dx[-1]
dx2[1:-1] = 0.5 * (dx[1:] + dx[:-1])
return dx2
def rebalanced_grid(
grid, err, base=0.25, num=None, resolution_factor=10, smooth_fact=1.0
):
if num is None:
num = grid.size
dx = np.diff(grid)
area_err = 0.5 * np.dot(err[1:] + err[:-1], dx) # trapezoidal rule
dx2 = _avgdiff(grid)
def smooth_err(x):
tot = 0
for i, (gx, e) in enumerate(zip(grid, err)):
fwhm = dx2[i] * smooth_fact
tot += e * np.exp(-((x - gx) ** 2) / (2 * (fwhm / 2.35482) ** 2))
return tot
finegrid = np.zeros((grid.size - 1) * resolution_factor + 1)
for i in range(grid.size - 1):
finegrid[i * resolution_factor : (i + 1) * resolution_factor] = np.linspace(
grid[i], grid[i + 1], resolution_factor + 1
)[:-1]
finegrid[-resolution_factor - 1 :] = np.linspace(
grid[-2], grid[-1], resolution_factor + 1
)
smoothed = smooth_err(finegrid) + base * area_err / (grid[-1] - grid[0])
assert np.all(smoothed > 0)
assert np.all(_avgdiff(finegrid) > 0)
interr = np.cumsum(smoothed * _avgdiff(finegrid))
cb = interp1d(interr, finegrid)
return cb(np.linspace(interr[0], interr[-1], num))
def pre_pruning_mask(grid, rtol=1e-12, atol=0.0):
"""Returns a mask for grid pruning.
Any grid spacing smaller than ``rtol*gridvalue + atol`` will
be pruned. In general the value on the right is removed unless it is
the last point in the grid.
Parameters
----------
grid : array
rtol : float
atol : float
Returns
-------
NumPy array of ``numpy.bool_`` (to be used as mask).
"""
if np.any(np.diff(grid) < 0):
raise ValueError("grid needs to be monotonic")
limit = grid[-1] - (atol + abs(rtol * grid[-1]))
mask = np.empty(grid.size, dtype=np.bool_)
mask[grid.size - 1] = True # rightmost point included
for ridx in range(grid.size - 2, -1, -1):
if grid[ridx] < limit:
mask[ridx] = True
break
else:
mask[ridx] = False
else:
raise ValueError("no grid-points left")
mask[0] = True # leftmost point included
limit = grid[0] + abs(rtol * grid[0]) + atol
for idx in range(1, ridx):
if grid[idx] < limit:
mask[idx] = False
else:
mask[idx] = True
limit = grid[idx] + abs(rtol * grid[idx]) + atol
return mask
def combine_grids(grids, **kwargs):
"""Combines multiple grids and prunes them using pre_pruning mask
Parameters
----------
grids : iterable of array_like grids
\\*\\* : dict
Keyword arguments passed on to pre_pruning_mask
Returns
-------
Strictly increasing monotonic array
"""
supergrid = np.sort(np.concatenate(grids))
mask = pre_pruning_mask(supergrid, **kwargs)
return supergrid[mask]
def grid_pruning_mask(grid, err, ndrop=None, protect_sparse=None, pow_err=2, pow_dx=2):
"""Returns a mask for grid pruning.
Parameters
----------
grid : array
err : array
ndrop : int
If not provided taken as 25% of grid size (rounded upward).
protect_sparse : int
If not provided taken as 25% of grid size (rounded upward).
pow_err : number
Exponent of error in weighting.
pow_dx : number
Exponent of grid spacing in weighting.
"""
if ndrop is None:
ndrop = math.ceil(grid.size * 0.25)
if protect_sparse is None:
protect_sparse = math.ceil(grid.size * 0.25)
dx = _avgdiff(grid)
protected = np.argsort(dx)[-protect_sparse:]
score = err ** pow_err * dx ** pow_dx
importance = np.argsort(score)
drop = []
for considered in importance:
if considered in protected:
continue
if considered - 1 in drop or considered + 1 in drop:
continue
drop.append(considered)
if len(drop) == ndrop:
break
return ~np.in1d(np.arange(grid.size), drop)
| bjodah/finitediff | finitediff/grid/rebalance.py | Python | bsd-2-clause | 4,269 | 0.000937 |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import subprocess
import tempfile
import luigi
from helpers import with_config, unittest
from luigi.contrib.pig import PigJobError, PigJobTask
from mock import patch
class SimpleTestJob(PigJobTask):
def output(self):
return luigi.LocalTarget('simple-output')
def pig_script_path(self):
return "my_simple_pig_script.pig"
class ComplexTestJob(PigJobTask):
def output(self):
return luigi.LocalTarget('complex-output')
def pig_script_path(self):
return "my_complex_pig_script.pig"
def pig_env_vars(self):
return {'PIG_CLASSPATH': '/your/path'}
def pig_properties(self):
return {'pig.additional.jars': '/path/to/your/jar'}
def pig_parameters(self):
return {'YOUR_PARAM_NAME': 'Your param value'}
def pig_options(self):
return ['-x', 'local']
class SimplePigTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('subprocess.Popen')
def test_run__success(self, mock):
arglist_result = []
p = subprocess.Popen
subprocess.Popen = _get_fake_Popen(arglist_result, 0)
try:
job = SimpleTestJob()
job.run()
self.assertEqual([['/usr/share/pig/bin/pig', '-f', 'my_simple_pig_script.pig']], arglist_result)
finally:
subprocess.Popen = p
@patch('subprocess.Popen')
def test_run__fail(self, mock):
arglist_result = []
p = subprocess.Popen
subprocess.Popen = _get_fake_Popen(arglist_result, 1)
try:
job = SimpleTestJob()
job.run()
self.assertEqual([['/usr/share/pig/bin/pig', '-f', 'my_simple_pig_script.pig']], arglist_result)
except PigJobError as e:
p = e
self.assertEqual('stderr', p.err)
else:
self.fail("Should have thrown PigJobError")
finally:
subprocess.Popen = p
class ComplexPigTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('subprocess.Popen')
def test_run__success(self, mock):
arglist_result = []
p = subprocess.Popen
subprocess.Popen = _get_fake_Popen(arglist_result, 0)
try:
job = ComplexTestJob()
job.run()
self.assertEqual([['/usr/share/pig/bin/pig', '-x', 'local', '-p', 'YOUR_PARAM_NAME=Your param value', '-propertyFile', 'pig_property_file', '-f', 'my_complex_pig_script.pig']], arglist_result)
# Check property file
with open('pig_property_file') as pprops_file:
pprops = pprops_file.readlines()
self.assertEqual(1, len(pprops))
self.assertEqual('pig.additional.jars=/path/to/your/jar\n', pprops[0])
finally:
subprocess.Popen = p
@patch('subprocess.Popen')
def test_run__fail(self, mock):
arglist_result = []
p = subprocess.Popen
subprocess.Popen = _get_fake_Popen(arglist_result, 1)
try:
job = ComplexTestJob()
job.run()
except PigJobError as e:
p = e
self.assertEqual('stderr', p.err)
self.assertEqual([['/usr/share/pig/bin/pig', '-x', 'local', '-p', 'YOUR_PARAM_NAME=Your param value', '-propertyFile', 'pig_property_file', '-f', 'my_complex_pig_script.pig']], arglist_result)
# Check property file
with open('pig_property_file') as pprops_file:
pprops = pprops_file.readlines()
self.assertEqual(1, len(pprops))
self.assertEqual('pig.additional.jars=/path/to/your/jar\n', pprops[0])
else:
self.fail("Should have thrown PigJobError")
finally:
subprocess.Popen = p
def _get_fake_Popen(arglist_result, return_code, *args, **kwargs):
def Popen_fake(arglist, shell=None, stdout=None, stderr=None, env=None, close_fds=True):
arglist_result.append(arglist)
class P(object):
def wait(self):
pass
def poll(self):
return 0
def communicate(self):
return 'end'
def env(self):
return self.env
p = P()
p.returncode = return_code
p.stderr = tempfile.TemporaryFile()
p.stdout = tempfile.TemporaryFile()
p.stdout.write(b'stdout')
p.stderr.write(b'stderr')
# Reset temp files so the output can be read.
p.stdout.seek(0)
p.stderr.seek(0)
return p
return Popen_fake
| torypages/luigi | test/contrib/pig_test.py | Python | apache-2.0 | 5,241 | 0.001336 |
from django.core.management.base import BaseCommand, CommandError
from survey.models import Record
from fuzzywuzzy import fuzz
class Command(BaseCommand):
help = 'Finds fuzzy name matches and allows to alter their relation'
def add_arguments(self, parser):
parser.add_argument('start', nargs='?', type=int, default=0)
def handle(self, *args, **options):
rx = Record.objects.all()
all = rx.count()
cnt = 0
print "Iterating over " + str(all) + " database records, starting at " + str(options['start'])
for i,r1 in enumerate(rx):
# Obey start position argument
if i < options['start']: continue
for j,r2 in enumerate(rx):
if j <= i: continue
ratio = fuzz.ratio(r1.name,r2.name)
if ratio < 75:
continue
if r1.person_id == r2.person_id:
continue
if r1.country != r2.country:
continue
if r1.gender != r2.gender:
continue
# Print leftovers:
print ""
print u"Score: {0:3d} {1:30}{2}".format(ratio,r1.name,r2.name)
print u"Person-ID: {1:30}{2}".format(ratio,r1.person_id,r2.person_id)
print u"Follow-up: {0!r:<30}{1}".format(r1.follow_up_case,r2.follow_up_case)
print u"Date intervention: {0:30}{1}".format(str(r1.date_intervention),str(r2.date_intervention))
print u"Issue area: {0:30}{1}".format(r1.issue_area,r2.issue_area)
print u"Activities: {0:30}{1}".format(r1.relevant_activities,r2.relevant_activities)
if Record.objects.filter(pk=r1.pk, follow_ups__pk=r2.pk).exists():
print u"Relation exists? ************** YES ****************"
else:
print u"Relation exists? .............. NO ................"
while True:
data = str(raw_input("(a)dd, (r)emove relation, (s)kip or (p)ause: "))
if data.lower() not in ('a', 'r', 's', 'p'):
print("Not an appropriate choice.")
else:
break
if data == "a":
r1.follow_ups.add(r2)
r1.save()
elif data == "r":
r1.follow_ups.remove(r2)
r1.save()
elif data == "s":
continue;
elif data == "p":
print "Restart with argument: " + str(i)
self.stdout.write(self.style.SUCCESS('Paused at %i' % i))
return
cnt += 1
print "Status: {:2.1f}".format((100.0*i)/all)
self.stdout.write(self.style.SUCCESS('Successfully edited all fuzzy relations'))
| simonspa/django-datacollect | datacollect/survey/management/commands/edit_relations.py | Python | gpl-3.0 | 2,980 | 0.00906 |
# Importing Modules from PyQt5
from PyQt5.QtWidgets import QSizePolicy, QPushButton, QFrame, QWidget, QStackedWidget
from PyQt5.QtGui import QColor
# Importing Modules from the App
from Gui import Table, Plot, Funcs, Budget
from Settings import StyleSheets as St
def smallerNumber(number1, number2):
if number1 < number2:
return number1
else:
return number2
def fill_a_list(List, filler, length):
List = List + [filler for i in range(length)]
return List
class App(QWidget):
# The Main Window... This Widget will be the main window.
# Other widgets such as the TablePage and PlotPage will be called from here in a StackedWidget
def __init__(self):
super(App, self).__init__()
self.setWindowTitle('Finances App 2') # Set the title of the app
self.setGeometry(500, 500, 1600, 880) # Set the Geometry of the Window
### Setting the Colour of the app background
p = self.palette()
b_col = QColor(St.background_colour)
p.setColor(self.backgroundRole(), b_col)
self.setPalette(p)
self.initUI()
def initUI(self):
self.TableStackItem = Table.TablePage()
self.PlotStackItem = Plot.App_Bit()
self.BudgetStackItem = Budget.SettingsPage()
sidebar_frame = self.sideBar()
self.FullStack = QStackedWidget(self)
self.FullStack.addWidget(self.TableStackItem)
self.FullStack.addWidget(self.PlotStackItem)
self.FullStack.addWidget(self.BudgetStackItem)
self.onTabButton()
Funcs.AllInOneLayout(self,[sidebar_frame,self.FullStack],Stretches=[1,10],VH="H")
self.show()
def sideBar(self):
sidebar_frame = QFrame()
sidebar_frame.setMinimumWidth(110)
#sidebar_frame.setStyleSheet(St.StyleSheets['Sidebar'])
button_titles = ['Data\nTables','Plotting','Budget']
button_titles = fill_a_list(button_titles, '', St.number_of_buttons_on_sidebar-len(button_titles))
self.buttons = []
but_funcs = [self.onTabButton, self.onPlotButton, self.onBudgetButton ]
but_funcs = fill_a_list(but_funcs, self.emptyFunc, St.number_of_buttons_on_sidebar-len(but_funcs))
for i in range(St.number_of_buttons_on_sidebar):
button = QPushButton(button_titles[i])
button.setStyleSheet(St.StyleSheets['Button%i'%i])
button.clicked.connect(but_funcs[i])
button.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
button.setCheckable(True)
self.buttons.append(button)
Funcs.AllInOneLayout(sidebar_frame, self.buttons, VH='V')# add button and button2 to the sidebar_frame vertically, aligning them at the top.
#frame_layout.setSizeLayout(QSizePolicy.Expanding, QSizePolicy.Expanding)
return sidebar_frame
# These buttons change which widget we can see in the stacked widget
def onTabButton(self):
self.TableStackItem.setFocus()
self.FullStack.setCurrentIndex(0)
def onPlotButton(self):
self.PlotStackItem.setFocus()
self.FullStack.setCurrentIndex(1)
def onBudgetButton(self):
self.BudgetStackItem.setFocus()
self.FullStack.setCurrentIndex(2)
def emptyFunc(self):
return 0 | 95ellismle/FinancesApp2 | Gui/App.py | Python | gpl-3.0 | 3,438 | 0.013089 |
#!/usr/bin/env python
import os, sys
sys.path.insert( 0, os.path.dirname( __file__ ) )
from common import delete
try:
assert sys.argv[2]
except IndexError:
print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] )
sys.exit( 1 )
try:
data = {}
data[ 'purge' ] = sys.argv[3]
except IndexError:
pass
delete( sys.argv[1], sys.argv[2], data )
| mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/scripts/api/history_delete_history.py | Python | gpl-3.0 | 389 | 0.03856 |
#!/usr/bin/env python
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding: utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# BornProfiler --- A package to calculate electrostatic free energies with APBS
# Written by Kaihsu Tai, Lennard van der Feltz, and Oliver Beckstein
# Released under the GNU Public Licence, version 3
#
import bornprofiler
import logging
logger = logging.getLogger('bornprofiler')
usage = """%prog [options]
Set up the BornProfiler configuration directories. This only has to be
done once (but it will not cause any damage to run this script again).
"""
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser(usage=usage)
opts, args = parser.parse_args()
bornprofiler.start_logging()
bornprofiler.config.setup()
if bornprofiler.config.check_setup():
logger.info("Init successful: you now have the template directories under %r.",
bornprofiler.config.configdir)
logger.info("The package can also be customized by editing %r.",
bornprofiler.config.CONFIGNAME)
logger.info("Questions and feedback: Oliver Beckstein <obeckste@asu.edu>")
else:
logger.error("Something is wrong: Failed to setup the template directories.")
logger.warn("You can proceed but problems migh arise and you will not be able "
"to easily customize generation of runs and submission scripts.")
bornprofiler.stop_logging()
| Becksteinlab/BornProfiler | scripts/apbs-bornprofile-init.py | Python | gpl-3.0 | 1,462 | 0.008208 |
# -*- coding: utf-8 -*-
import re
import pandas as pd
from scrapy.spiders import Spider
from scrapy.selector import Selector
from ..parsing.zone import (
CityZoneParser,
EPCIZoneParser,
DepartmentZoneParser,
RegionZoneParser
)
from ..item import LocalFinance
from ..utils import DOM_DEP_MAPPING, uniformize_code, convert_dom_code, convert_city
class LocalFinanceSpider(Spider):
"""Basic spider which crawls all pages of finance of french towns, departments
regions and EPCI.
"""
name = "localfinance"
domain = "http://alize2.finances.gouv.fr"
allowed_domains = [domain]
def __init__(self, year=2014, zone_type='city'):
"""Load insee code of every commune in france and generate all the urls to
crawl."""
self.start_urls = []
if zone_type == 'city' or zone_type == 'all':
self.start_urls += self.get_commune_urls(year)
if zone_type == 'department' or zone_type == 'all':
self.start_urls += self.get_dep_urls(year)
if zone_type == 'region' or zone_type == 'all':
self.start_urls += self.get_reg_urls(year)
if zone_type == 'epci' or zone_type == 'all':
self.start_urls += self.get_epci_urls(year)
def get_dep_urls(self, year):
insee_code_file = "data/locality/depts2013.txt"
data = pd.io.parsers.read_csv(insee_code_file, '\t')
data['DEP'] = uniformize_code(data, 'DEP')
data['DEP'] = convert_dom_code(data)
baseurl = "%s/departements/detail.php?dep=%%(DEP)s&exercice=%s" % (self.domain, year)
return [baseurl % row for __, row in data.iterrows()]
def get_reg_urls(self, year):
insee_code_file = "data/locality/reg2013.txt"
data = pd.io.parsers.read_csv(insee_code_file, '\t')
data['REGION'] = uniformize_code(data, 'REGION')
# Special case for DOM as usual
def set_dom_code(reg):
if reg == '001':
return '101'
elif reg == '002':
return '103'
elif reg == '003':
return '102'
elif reg == '004':
return '104'
else:
return reg
data['REGION'] = data['REGION'].apply(set_dom_code)
baseurl = "%s/regions/detail.php?reg=%%(REGION)s&exercice=%s" % (self.domain, year)
return [baseurl % row for __, row in data.iterrows()]
def get_epci_urls(self, year):
data = pd.read_csv('data/locality/epci.csv')
base_url = "%s/communes/eneuro/detail_gfp.php?siren=%%(siren)s&dep=%%(dep)s&type=BPS&exercice=%s" % (self.domain, str(year))
return [base_url % row for __, row in data.iterrows()]
def get_commune_urls(self, year):
"""
The communes pages urls depends on 5 parameters:
- COM: the insee code of the commune
- DEP: the department code on 3 characters
- type: type of financial data, BPS is for the whole data.
- exercise: year of financial data
"""
insee_code_file = "data/locality/france2013.txt"
data = pd.io.parsers.read_csv(insee_code_file, '\t')
# XXX: insee_communes file contains also "cantons", filter out these lines
mask = data['ACTUAL'].apply(lambda v: v in [1, 2, 3])
data = data[mask].reindex()
# XXX: as always paris is the exception. City code is 101 for years < 2010 and 056 for years >= 2010
# 056 is the right code, add 101 also to crawl pages for years < 2010
paris_row = data[(data.COM == 56) & (data.DEP == '75')].copy()
paris_row.COM = 101
data = data.append(paris_row)
data['DEP'] = uniformize_code(data, 'DEP')
data['COM'] = uniformize_code(data, 'COM')
data['DEP'] = convert_dom_code(data)
data['COM'] = data.apply(convert_city, axis=1)
base_url = "%s/communes/eneuro/detail.php?icom=%%(COM)s&dep=%%(DEP)s&type=BPS¶m=0&exercice=%s" % (self.domain, str(year))
return [base_url % row for __, row in data.iterrows()]
def parse(self, response):
if "/communes/eneuro/detail_gfp.php" in response.url:
return self.parse_epci(response)
elif "/communes/eneuro/detail.php" in response.url:
return self.parse_commune(response)
elif "/departements/detail.php" in response.url:
return self.parse_dep(response)
elif "/regions/detail.php" in response.url:
return self.parse_reg(response)
def parse_commune(self, response):
"""Parse the response and return an Account object"""
hxs = Selector(response)
h3_strings = hxs.xpath("//body/h3/text()").extract()
if h3_strings and h3_strings[0].startswith("Aucune commune"):
return []
icom, dep, year = re.search('icom=(\d{3})&dep=(\w{3})&type=\w{3}¶m=0&exercice=(\d{4})', response.url).groups()
# XXX: better to use the real insee code for later analysis, not icom and dep in url.
real_dep = dict([(val, key) for key, val in DOM_DEP_MAPPING.items()]).get(dep, dep[1:])
real_com = icom if dep not in DOM_DEP_MAPPING.values() else icom[1:]
real_insee_code = real_dep + real_com
# XXX: hack for paris ! \o/
if real_insee_code == '75101':
real_insee_code = '75056'
data = CityZoneParser(real_insee_code, year, response.url).parse(hxs)
return LocalFinance(id=real_insee_code, data=data)
def parse_epci(self, response):
hxs = Selector(response)
siren, year = re.search('siren=(\d+)&dep=\w{3}&type=BPS&exercice=(\d{4})', response.url).groups()
if 'Aucun GFP correspondant' in response.body:
self.logger.warning("No epci for siren=%s and year=%s (%s)" % (siren, year, response.url))
return
data = EPCIZoneParser("", year, response.url, siren).parse(hxs)
return LocalFinance(id=siren, data=data)
def parse_dep(self, response):
hxs = Selector(response)
h3_strings = hxs.xpath("//body/h3/text()").extract()
department_id, year = re.search('dep=(\w{3})&exercice=(\d{4})', response.url).groups()
if h3_strings and h3_strings[0].startswith(u'Aucune donn\xe9e'):
self.logger.warning("No data found for department=%s and year=%s (%s)" % (department_id, year, response.url))
return
data = DepartmentZoneParser(department_id, year, response.url).parse(hxs)
return LocalFinance(id=department_id, data=data)
def parse_reg(self, response):
hxs = Selector(response)
h3_strings = hxs.xpath("//body/h3/text()").extract()
region_id, year = re.search('reg=(\w{3})&exercice=(\d{4})', response.url).groups()
if h3_strings and h3_strings[0].startswith(u'Aucune donn\xe9e'):
self.logger.warning("No data found for region=%s and year=%s (%s)" % (region_id, year, response.url))
return
data = RegionZoneParser(region_id, year, response.url).parse(hxs)
return LocalFinance(id=region_id, data=data) | regardscitoyens/nosfinanceslocales_scraper | localfinance/spiders/localfinance_spider.py | Python | mit | 7,108 | 0.004361 |
""" :mod:`eleve.segment`
==========================
The segmenter is available by importing ``eleve.Segmenter``. It is used to
segment sentences (regroup tokens that goes together).
"""
import logging
from math import isnan
logger = logging.getLogger(__name__)
class Segmenter:
def __init__(self, storage, max_ngram_length=None):
""" Create a segmenter.
:param storage: A storage object that has been trained on a corpus (should have a ``query_autonomy`` method).
:param max_ngram_length: The maximum length of n-gram that can be "merged".
It should be strictly smaller to the storage's n-gram length.
"""
assert hasattr(
storage, "query_autonomy"
), "The storage object should have a query_autonomy method."
self.storage = storage
if max_ngram_length is None:
assert hasattr(
storage, "default_ngram_length"
), "The storage should have a default_ngram_length attribute."
self.max_ngram_length = storage.default_ngram_length - 1
else:
assert (
isinstance(max_ngram_length, int) and max_ngram_length > 1
), "max_ngram_length should be an integer bigger than one"
if max_ngram_length >= storage.default_ngram_length:
logger.warning(
"consider n-grams of size %d at max, BUT storage backend has a default ngram length of %s."
% (max_ngram_length, storage.default_ngram_length)
)
self.max_ngram_length = max_ngram_length
def segment(self, sentence):
""" Segment a sentence.
:param sentence: A list of tokens.
:returns: A list of sentence fragments. A sentence fragment is a list of tokens.
"""
if len(sentence) > 1000:
logger.warning(
"The sentence you want to segment is HUGE. This will take a lot of memory."
)
# sentence = (
# [self.storage.sentence_start] + sentence + [self.storage.sentence_end]
# )
# dynamic programming to segment the sentence
best_segmentation = [[]] * (len(sentence) + 1)
best_score = [0] + [float("-inf")] * len(sentence)
# best_score[1] -> autonomy of the first word
# best_score[2] -> sum of autonomy of the first two words, or autonomy of the first two
# ...
order = self.max_ngram_length
query_autonomy = self.storage.query_autonomy
for i in range(1, len(sentence) + 1):
for j in range(1, order + 1):
if i - j < 0:
break
a = query_autonomy(sentence[i - j : i])
if isnan(a):
a = -100.0
score = best_score[i - j] + a * j
if score > best_score[i]:
best_score[i] = score
best_segmentation[i] = best_segmentation[i - j] + [
sentence[i - j : i]
]
# keep the best segmentation and remove the None
best_segmentation = best_segmentation[len(sentence)]
best_segmentation = list(filter(None, best_segmentation))
# best_segmentation.pop(0)
# best_segmentation.pop()
return best_segmentation
def segment_nbest(self, sentence, nbest=3):
""" Segment a sentence.
:param sentence: A list of tokens.
:returns: A list of sentence fragments. A sentence fragment is a list of tokens.
"""
from collections import namedtuple
SegResult = namedtuple("SegResult", "score words")
if len(sentence) > 1000:
logger.warning(
"The sentence you want to segment is HUGE. This will take a lot of memory."
)
sentence = (
[self.storage.sentence_start] + sentence + [self.storage.sentence_end]
)
# dynamic programming to segment the sentence
# list of lists of SegResult
best_segmentations = [[SegResult(0.0, [])]] * (len(sentence) + 1)
best_score = [0] + [float("-inf")] * len(sentence)
# best_score[1] -> autonomy of the first word
# best_score[2] -> sum of autonomy of the first two words, or autonomy of the first two
# ...
order = self.max_ngram_length
query_autonomy = self.storage.query_autonomy
for i in range(1, len(sentence) + 1):
segmentations_at_i = []
for j in range(1, order + 1):
if i - j < 0:
break
a = query_autonomy(sentence[i - j : i])
if isnan(a):
a = -100.0
else:
a = a*j
segmentations_at_i.extend([SegResult(previous_best.score + a, previous_best.words + [sentence[i-j: i]]) for previous_best in best_segmentations[i-j] ])
best_segmentations[i] = sorted(segmentations_at_i, key=lambda x:x.score)[-nbest:]
#return [seg.words for seg in best_segmentations[-1][-nbest:]]
return [seg.words[1:-1] for seg in best_segmentations[-1][-nbest:]]
@staticmethod
def tokenInWord(w):
for i,c in enumerate(w):
yield "{}-{}_{}".format(c, "".join(w[0:max(i,0)]),"".join(w[i+1:]))
@staticmethod
def formatSentenceTokenInWord(sent):
return " ".join([c for w in sent for c in Segmenter.tokenInWord(w)])
def segmentSentenceTIW(self, sent: str) -> str:
return Segmenter.formatSentenceTokenInWord(self.segment(tuple(sent.split(" "))))
def segmentSentenceTIWBIES(self, sent:str) -> str:
tokens = tuple(sent.split(" "))
words = self.segment(tokens)
bies = []
for w in words:
chartoks = list(self.tokenInWord(w))
if len(w) == 1:
bies.append(chartoks[0] + "-S")
else:
bies.append(chartoks[0] + "-B")
for i in chartoks[1:-1]:
bies.append(i + "-I")
bies.append(chartoks[-1] + "-E")
return " ".join(bies)
def segmentSentenceBIES(self, sent: str) -> str:
tokens = tuple(sent.split(" "))
words = self.segment(tokens)
bies = []
for w in words:
if len(w) == 1:
bies.append(w[0] + "-S")
else:
bies.append(w[0] + "-B")
for i in w[1:-1]:
bies.append(i + "-I")
bies.append(w[-1] + "-E")
return " ".join(bies)
| kodexlab/eleve | eleve/segment.py | Python | lgpl-3.0 | 6,623 | 0.004379 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the output modules CLI arguments helper."""
import argparse
import unittest
from plaso.cli import tools
from plaso.cli.helpers import output_modules
from plaso.lib import errors
from tests.cli import test_lib as cli_test_lib
class OutputModulesArgumentsHelperTest(cli_test_lib.CLIToolTestCase):
"""Tests for the output modules CLI arguments helper."""
# pylint: disable=no-member,protected-access
_EXPECTED_OUTPUT = """\
usage: cli_helper.py [-o FORMAT] [-w OUTPUT_FILE] [--fields FIELDS]
[--additional_fields ADDITIONAL_FIELDS]
Test argument parser.
optional arguments:
--additional_fields ADDITIONAL_FIELDS, --additional-fields ADDITIONAL_FIELDS
Defines extra fields to be included in the output, in
addition to the default fields, which are datetime,
timestamp_desc, source, source_long, message, parser,
display_name, tag.
--fields FIELDS Defines which fields should be included in the output.
-o FORMAT, --output_format FORMAT, --output-format FORMAT
The output format. Use "-o list" to see a list of
available output formats.
-w OUTPUT_FILE, --write OUTPUT_FILE
Output filename.
"""
def testAddArguments(self):
"""Tests the AddArguments function."""
argument_parser = argparse.ArgumentParser(
prog='cli_helper.py', description='Test argument parser.',
add_help=False,
formatter_class=cli_test_lib.SortedArgumentsHelpFormatter)
output_modules.OutputModulesArgumentsHelper.AddArguments(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_OUTPUT)
def testParseOptions(self):
"""Tests the ParseOptions function."""
options = cli_test_lib.TestOptions()
options.output_format = 'dynamic'
options.write = 'output.dynamic'
test_tool = tools.CLITool()
output_modules.OutputModulesArgumentsHelper.ParseOptions(
options, test_tool)
self.assertEqual(test_tool._output_format, options.output_format)
self.assertEqual(test_tool._output_filename, options.write)
# Test with a configuration object missing.
with self.assertRaises(errors.BadConfigObject):
output_modules.OutputModulesArgumentsHelper.ParseOptions(options, None)
if __name__ == '__main__':
unittest.main()
| kiddinn/plaso | tests/cli/helpers/output_modules.py | Python | apache-2.0 | 2,511 | 0.002788 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2008 Francisco José Rodríguez Bogado #
# (pacoqueen@users.sourceforge.net) #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
###################################################################
## consulta_ventas_ticket.py - sum((PVP - IVA) * porcentaje_tarifa)
###################################################################
## NOTAS:
## - No cuenta prefacturas.
## - El criterio de búsqueda para facturas es la fecha de factura,
## no la fecha de cobro. El por qué es muy sencillo: el objetivo
## de la consulta es ver el montante de cada LDV por familia y
## su beneficio. Dado que una factura puede tener cobrarse en
## varios vencimientos, ¿qué parte del cobro va para cada LDV si
## aún no se ha cobrado por completo?
## - Solo cuenta lineas de venta, no servicios (que además no se
## pueden vender por TPV, ergo no tienen ticket).
###################################################################
## Changelog:
##
###################################################################
##
###################################################################
from ventana import Ventana
import utils
import pygtk
pygtk.require('2.0')
import gtk, gtk.glade, time, sqlobject
import sys, os
try:
import pclases
except ImportError:
sys.path.append(os.path.join('..', 'framework'))
import pclases
import datetime
try:
import geninformes
except ImportError:
sys.path.append(os.path.join('..', 'informes'))
import geninformes
try:
from treeview2pdf import treeview2pdf
except ImportError:
sys.path.append(os.path.join("..", "informes"))
from treeview2pdf import treeview2pdf
try:
from treeview2csv import treeview2csv
except ImportError:
sys.path.append(os.path.join("..", "informes"))
from treeview2pdf import treeview2pdf
from informes import abrir_pdf, abrir_csv
import ventana_progreso
class ConsultaBeneficioTicket(Ventana):
def __init__(self, objeto = None, usuario = None):
self.usuario = usuario
Ventana.__init__(self, 'consulta_ventas_ticket.glade', objeto)
connections = {'b_salir/clicked': self.salir,
'b_buscar/clicked': self.buscar,
'b_imprimir/clicked': self.imprimir,
'b_exportar/clicked': self.exportar,
'b_fecha_inicio/clicked': self.set_inicio,
'b_fecha_fin/clicked': self.set_fin}
self.add_connections(connections)
cols = (('Fecha', 'gobject.TYPE_STRING', False, True, False, None),
('T./Alb./Fra.','gobject.TYPE_STRING',False,True,True,None),
('Imp. total', 'gobject.TYPE_STRING',False,True,False,None),
('Imp. (s/IVA)','gobject.TYPE_STRING',False,True,False,None),
('Ben. sobre tarifa', 'gobject.TYPE_STRING',
False, True, False, None),
('ID','gobject.TYPE_STRING', False, False, False, None))
utils.preparar_treeview(self.wids['tv_datos'], cols)
for col in self.wids['tv_datos'].get_columns()[2:]:
for cell in col.get_cell_renderers():
cell.set_property("xalign", 1.0)
col.set_alignment(0.5)
self.wids['tv_datos'].connect("row-activated", self.abrir_producto)
self.fin = datetime.date.today()
self.inicio = self.fin
self.wids['e_fechafin'].set_text(utils.str_fecha(self.fin))
self.wids['e_fechainicio'].set_text(utils.str_fecha(self.inicio))
gtk.main()
def abrir_producto(self, tv, path, vc):
"""
Abre el producto al que se le ha hecho doble clic en una ventana nueva.
"""
model = tv.get_model()
tipo_e_id = model[path][-1]
if "LDV" in tipo_e_id:
tipo, id = tipo_e_id.split(':')
ldv = pclases.LineaDeVenta.get(id)
producto = ldv.producto
if isinstance(producto, pclases.ProductoVenta):
if producto.es_rollo():
import productos_de_venta_rollos
ventana_producto = productos_de_venta_rollos.ProductosDeVentaRollos(producto, usuario = self.usuario)
elif producto.es_bala() or producto.es_bigbag():
import productos_de_venta_balas
ventana_producto = productos_de_venta_balas.ProductosDeVentaBalas(producto, usuario = self.usuario)
elif isinstance(producto, pclases.ProductoCompra):
import productos_compra
ventana_producto = productos_compra.ProductosCompra(producto, usuario = self.usuario)
def chequear_cambios(self):
pass
def rellenar_tabla(self, resultados):
"""
Rellena el model con los items de la consulta
"""
model = self.wids['tv_datos'].get_model()
model.clear()
totfact = totsiniva = totbeneficio = totbeneficio_cobro = 0.0
self.wids['tv_datos'].freeze_child_notify()
self.wids['tv_datos'].set_model(None)
totcobrado = totpendiente = 0.0
total_costo = total_costo_cobrado = 0.0
for material in resultados:
if material != None:
nombre_mat = material.descripcion
else:
nombre_mat = ""
padre_mat = model.append(None, (nombre_mat,
"",
"0",
"0",
"0",
"M:%d" % (material
and material.id
or -1)))
for fecha in resultados[material]:
if fecha != None:
str_fecha = utils.str_fecha(fecha)
else:
str_fecha = ""
padre_fec = model.append(padre_mat, (str_fecha,
"",
"0",
"0",
"0",
""))
for ldv in resultados[material][fecha]:
subtotal = ldv.get_subtotal(iva = True)
subtotal_siva = ldv.get_subtotal(iva = False)
beneficio = ldv.calcular_beneficio()
costo = ldv.calcular_precio_costo() * ldv.cantidad
if ldv.facturaVenta:
fac_alb_tic = ldv.facturaVenta.numfactura
cobradofra = ldv.facturaVenta.calcular_cobrado()
pendientefra = ldv.facturaVenta.calcular_pendiente_cobro()
try:
fraccion = cobradofra / (cobradofra + pendientefra)
except ZeroDivisionError:
fraccion = 1.0
cobrado = subtotal * fraccion
pendiente = subtotal - cobrado
beneficio_cobro = beneficio * fraccion
costo_cobrado = costo * fraccion
elif ldv.albaranSalida:
fac_alb_tic = ldv.albaranSalida.numalbaran
cobrado = 0.0
pendiente = subtotal
beneficio_cobro = 0.0
costo_cobrado = 0.0
elif ldv.ticket:
fac_alb_tic = "Ticket %d" % ldv.ticket.numticket
cobrado = subtotal
pendiente = 0.0
beneficio_cobro = beneficio
costo_cobrado = costo
# Los tickets se asume que se cobran siempre, por
# tanto el costo de los productos sobre lo cobrado
# es del 100%.
else:
fac_alb_tic = ""
cobrado = pendiente = beneficio_cobro = 0.0
costo_cobrado = 0.0
desc_producto = utils.wrap(ldv.producto.descripcion, 40)
try:
beneficio_costo = 100.0 * beneficio / costo
except ZeroDivisionError:
beneficio_costo = 0.0
model.append(padre_fec, (desc_producto,
fac_alb_tic,
utils.float2str(subtotal),
utils.float2str(subtotal_siva),
"%s (%s%%)" % (
utils.float2str(beneficio),
utils.float2str(
beneficio_costo)),
"LDV:%d" % ldv.id))
# Actualizo totales en memoria y en nodos padre TreeView
totfact += subtotal
totsiniva += subtotal_siva
totbeneficio += beneficio
totbeneficio_cobro += beneficio_cobro
totcobrado += cobrado
totpendiente += pendiente
total_costo += costo
total_costo_cobrado += costo_cobrado
model[padre_fec][2] = utils.float2str(
utils._float(model[padre_fec][2])
+ subtotal)
model[padre_fec][3] = utils.float2str(
utils._float(model[padre_fec][3])
+ subtotal_siva)
model[padre_fec][4] = utils.float2str(
utils._float(model[padre_fec][4])
+ beneficio)
model[padre_mat][2] = utils.float2str(
utils._float(model[padre_mat][2])
+ subtotal)
model[padre_mat][3] = utils.float2str(
utils._float(model[padre_mat][3])
+ subtotal_siva)
model[padre_mat][4] = utils.float2str(
utils._float(model[padre_mat][4])
+ beneficio)
self.rellenar_totales(totfact, totsiniva, totbeneficio,
totcobrado, totpendiente, totbeneficio_cobro,
total_costo, total_costo_cobrado)
self.wids['tv_datos'].set_model(model)
self.wids['tv_datos'].thaw_child_notify()
def rellenar_totales(self,
total_facturado,
total_siniva,
total_beneficio,
total_cobrado,
total_pendiente_de_cobro,
total_beneficio_de_lo_cobrado,
total_costo,
total_costo_cobrado):
"""
Introduce los totales en los "entries".
"""
self.wids['e_total'].set_text(utils.float2str(total_facturado))
self.wids['e_siniva'].set_text(utils.float2str(total_siniva))
try:
beneficio = total_beneficio * 100.0 / total_siniva
except ZeroDivisionError:
beneficio = 0
try:
beneficio_sobre_costo = total_beneficio * 100.0 / total_costo
except ZeroDivisionError:
beneficio_sobre_costo = 0
try:
beneficio_cobro = (total_beneficio_de_lo_cobrado * 100.0
/ total_cobrado)
except ZeroDivisionError:
beneficio_cobro = 0
try:
beneficio_cobro_sobre_costo = (100 *
total_beneficio_de_lo_cobrado / total_costo_cobrado)
except ZeroDivisionError:
beneficio_cobro_sobre_costo = 0
self.wids['e_beneficio'].set_text("%s (%s%% de las ventas; %s%% sobre precio defecto)" % (
utils.float2str(total_beneficio),
utils.float2str(beneficio, 2, autodec = True),
utils.float2str(beneficio_sobre_costo, 2, autodec = True)))
self.wids['e_beneficio_cobro'].set_text("%s (%s%% de lo cobrado; %s%% sobre precio defecto en cobros)" % (
utils.float2str(total_beneficio_de_lo_cobrado),
utils.float2str(beneficio_cobro, 2, autodec = True),
utils.float2str(beneficio_cobro_sobre_costo, 2, autodec = True)))
self.wids['e_cobrado'].set_text(utils.float2str(total_cobrado))
self.wids['e_pendiente'].set_text(
utils.float2str(total_pendiente_de_cobro))
def set_inicio(self, boton):
temp = utils.mostrar_calendario(fecha_defecto = self.inicio, padre = self.wids['ventana'])
self.inicio = datetime.date(day = temp[0], month = temp[1], year = temp[2])
self.wids['e_fechainicio'].set_text(utils.str_fecha(self.inicio))
self.fin = self.inicio
self.wids['e_fechafin'].set_text(utils.str_fecha(self.fin))
def set_fin(self, boton):
temp = utils.mostrar_calendario(fecha_defecto = self.fin, padre = self.wids['ventana'])
self.fin = temp
self.fin = datetime.date(day = temp[0], month = temp[1], year = temp[2])
self.wids['e_fechafin'].set_text(utils.str_fecha(self.fin))
def buscar(self,boton):
"""
Dadas fecha de inicio y de fin, busca todas las LDV de facturas,
albaranes sin facturar y tickets sin factura (ni albarán, lógicamente).
OJO: NO CUENTA SERVICIOS.
"""
vpro = ventana_progreso.VentanaProgreso(padre = self.wids['ventana'])
vpro.mostrar()
inicio = self.inicio
fin = self.fin
LDV = pclases.LineaDeVenta
FV = pclases.FacturaVenta
AS = pclases.AlbaranSalida
T = pclases.Ticket
ldvsf = LDV.select(pclases.AND(
LDV.q.facturaVentaID == FV.q.id,
FV.q.fecha >= inicio,
FV.q.fecha <= fin))
ldvsa = LDV.select(pclases.AND(
LDV.q.albaranSalidaID == AS.q.id,
LDV.q.facturaVentaID == None,
AS.q.fecha >= inicio,
AS.q.fecha <= fin))
ldvst = LDV.select(pclases.AND(LDV.q.ticketID == T.q.id,
LDV.q.albaranSalidaID == None,
LDV.q.facturaVentaID == None,
T.q.fechahora >= inicio,
T.q.fechahora < fin + datetime.timedelta(days = 1)))
self.resultados = {}
act = 0.0; tot = ldvsf.count() + ldvsa.count() + ldvst.count()
for ldv in ldvsf:
vpro.set_valor(act/tot, "Calculando beneficio facturas...")
add_ldv_a_diccionario_resultados(ldv, self.resultados)
act += 1
for ldv in ldvsa:
vpro.set_valor(act/tot, "Calculando beneficio albaranes...")
add_ldv_a_diccionario_resultados(ldv, self.resultados)
act += 1
for ldv in ldvst:
vpro.set_valor(act/tot, "Calculando beneficio tickets...")
add_ldv_a_diccionario_resultados(ldv, self.resultados)
act += 1
vpro.set_valor(1.0, "Calculando totales...")
self.rellenar_tabla(self.resultados)
vpro.ocultar()
def imprimir(self, boton):
"""
Prepara la vista preliminar para la impresión del informe.
"""
# TODO: Faltan totales
resp = utils.dialogo(titulo = "¿IMPRIMIR DESGLOSE?",
texto = "Puede imprimir un resumen o todo el contenido de la consulta\n¿Desea imprimir toda la información desglosada?",
padre = self.wids['ventana'])
if resp:
tv = self.wids['tv_datos']
tv.expand_all()
while gtk.events_pending(): gtk.main_iteration(False)
else:
tv = self.wids['tv_datos']
tv.collapse_all()
while gtk.events_pending(): gtk.main_iteration(False)
from consulta_ventas_por_producto import convertir_a_listview
tv = convertir_a_listview(tv)
strfecha = "De %s a %s" % (self.wids['e_fechainicio'].get_text(), self.wids['e_fechafin'].get_text())
abrir_pdf(treeview2pdf(tv, titulo = "Beneficio sobre tarifa", fecha = strfecha))
def exportar(self, boton):
"""
Exporta el TreeView a CSV.
"""
abrir_csv(treeview2csv(self.wids['tv_datos']))
def add_ldv_a_diccionario_resultados(ldv, r):
if ldv.productoCompra:
material = ldv.productoCompra.tipoDeMaterial
else:
material = None
if material not in r:
r[material] = {}
if ldv.facturaVenta:
fecha = ldv.facturaVenta.fecha
elif ldv.albaranSalida:
fecha = ldv.albaranSalida.fecha
elif ldv.ticket:
fecha = utils.abs_fecha(ldv.ticket.fechahora)
else:
fecha = None
if fecha not in r[material]:
r[material][fecha] = [ldv]
else:
r[material][fecha].append(ldv)
if __name__ == '__main__':
t = ConsultaBeneficioTicket()
| pacoqueen/upy | formularios/consulta_ventas_ticket.py | Python | gpl-2.0 | 19,273 | 0.009814 |
__all__ = ['gtk_element_editor', 'main_window_handler', 'sortiment', 'window_creator', 'error_handler']
| peto2006/sortiment-frontent | sortimentGUI/__init__.py | Python | mit | 104 | 0.009615 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for nova.cert.rpcapi
"""
from nova.cert import rpcapi as cert_rpcapi
from nova import context
from nova import flags
from nova.openstack.common import rpc
from nova import test
FLAGS = flags.FLAGS
class CertRpcAPITestCase(test.TestCase):
def setUp(self):
super(CertRpcAPITestCase, self).setUp()
def tearDown(self):
super(CertRpcAPITestCase, self).tearDown()
def _test_cert_api(self, method, **kwargs):
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = cert_rpcapi.CertAPI()
expected_retval = 'foo'
expected_msg = rpcapi.make_msg(method, **kwargs)
expected_msg['version'] = rpcapi.BASE_RPC_API_VERSION
self.call_ctxt = None
self.call_topic = None
self.call_msg = None
self.call_timeout = None
def _fake_call(_ctxt, _topic, _msg, _timeout):
self.call_ctxt = _ctxt
self.call_topic = _topic
self.call_msg = _msg
self.call_timeout = _timeout
return expected_retval
self.stubs.Set(rpc, 'call', _fake_call)
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(retval, expected_retval)
self.assertEqual(self.call_ctxt, ctxt)
self.assertEqual(self.call_topic, FLAGS.cert_topic)
self.assertEqual(self.call_msg, expected_msg)
self.assertEqual(self.call_timeout, None)
def test_revoke_certs_by_user(self):
self._test_cert_api('revoke_certs_by_user', user_id='fake_user_id')
def test_revoke_certs_by_project(self):
self._test_cert_api('revoke_certs_by_project',
project_id='fake_project_id')
def test_revoke_certs_by_user_and_project(self):
self._test_cert_api('revoke_certs_by_user_and_project',
user_id='fake_user_id',
project_id='fake_project_id')
def test_generate_x509_cert(self):
self._test_cert_api('generate_x509_cert',
user_id='fake_user_id',
project_id='fake_project_id')
def test_fetch_ca(self):
self._test_cert_api('fetch_ca', project_id='fake_project_id')
def test_fetch_crl(self):
self._test_cert_api('fetch_crl', project_id='fake_project_id')
def test_decrypt_text(self):
self._test_cert_api('decrypt_text',
project_id='fake_project_id', text='blah')
| tylertian/Openstack | openstack F/nova/nova/tests/cert/test_rpcapi.py | Python | apache-2.0 | 3,147 | 0 |
"""Django app config for the analytics app."""
from django.apps import AppConfig
class AnalyticsAppConfig(AppConfig):
"""Analytics app init code."""
name = 'readthedocs.analytics'
verbose_name = 'Analytics'
| rtfd/readthedocs.org | readthedocs/analytics/apps.py | Python | mit | 224 | 0 |
#!/usr/bin/env python3
import logging
from . import SubprocessHook
logger = logging.getLogger("barython")
class PulseAudioHook(SubprocessHook):
"""
Listen on pulseaudio events with pactl
"""
def __init__(self, cmd=["pactl", "subscribe", "-n", "barython"],
*args, **kwargs):
super().__init__(*args, **kwargs, cmd=cmd)
| Anthony25/barython | barython/hooks/audio.py | Python | bsd-3-clause | 363 | 0 |
import numpy as np
import pygame
from sklearn.mixture import GMM
from math import sqrt, atan, pi
def emFit(results, numComponents):
if len(results) == 0:
return None
m =np.matrix(results)
gmm = GMM(numComponents,covariance_type='full', n_iter= 100, n_init = 4)
gmm.fit(results)
components = []
for componentID in xrange(numComponents):
mu = gmm.means_[componentID]
cov = gmm.covars_[componentID]
proba = gmm.weights_[componentID]
components.append((mu,cov,proba))
components = sorted(components,key=lambda x: x[0][0])
return components
def drawComponents(surface, windowSize, scaleFactor, components):
if components is None:
return
colors = [(255, 150, 150),(150, 150, 255),(150, 255, 150)]
for color,(mu,cov, proba) in zip(colors[:len(components)],components):
eigenvalues, eigenvectors = np.linalg.eig(cov)
major = 2.0 * sqrt(5.991 * eigenvalues.max())
minor = 2.0 * sqrt(5.991 * eigenvalues.min())
angle1 = atan(eigenvectors[1][0]/eigenvectors[0][0])
angle2 = atan(eigenvectors[1][1]/eigenvectors[0][1])
if eigenvalues[0] > eigenvalues[1]:
angle = angle1
else:
angle = angle2
mu_x,mu_y = mu
if major < 1.0 or minor < 1.0:
continue
s = pygame.Surface((major*scaleFactor[0], minor*scaleFactor[1]),pygame.SRCALPHA, 32)
ellipse = pygame.draw.ellipse(s, color, (0, 0, major*scaleFactor[0], minor*scaleFactor[0]))
s2 = pygame.transform.rotate(s, angle*360.0/(2.0*pi))
height, width = s2.get_rect().height,s2.get_rect().width
surface.blit(s2,(mu_x*scaleFactor[0]-width/2.0,mu_y*scaleFactor[1]-height/2.0))#(mu_x*scaleFactor[0]-height/2.0,mu_y*scaleFactor[1]-width/2.0))
#s = pygame.Surface((major*scaleFactor[0], minor*scaleFactor[1]))
#s.fill((255,255,255))
#s.set_alpha(128)
#ellipse = pygame.draw.ellipse(s, blue, (0, 0, major*scaleFactor[0], minor*scaleFactor[0]))
#s3 = pygame.transform.rotate(s, angle1*360.0/(2.0*pi))
#height, width = s3.get_rect().height,s3.get_rect().width
#surface.blit(s3,(mu_x*scaleFactor[0]-width/2.0,mu_y*scaleFactor[1]-height/2.0))#(mu_x*scaleFactor[0]-height/2.0,mu_y*scaleFactor[1]-width/2.0))
#surface.blit(s,(0,0))
#print angle*360.0/(2.0*pi)
| sondree/Master-thesis | Python Simulator/simulator/FMM.py | Python | gpl-3.0 | 2,479 | 0.020976 |
import unittest
from chainer import cuda
from chainer import initializers
from chainer import testing
from chainer.testing import attr
import numpy
@testing.parameterize(*testing.product({
'target': [
initializers.Uniform,
initializers.LeCunUniform,
initializers.HeUniform,
initializers.GlorotUniform,
],
'shape': [(2, 3), (2, 3, 4)],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
class TestUniform(unittest.TestCase):
scale = 0.1
def check_initializer(self, w):
initializer = self.target(scale=self.scale)
initializer(w)
self.assertTupleEqual(w.shape, self.shape)
self.assertEqual(w.dtype, self.dtype)
def test_initializer_cpu(self):
w = numpy.empty(self.shape, dtype=self.dtype)
self.check_initializer(w)
@attr.gpu
def test_initializer_gpu(self):
w = cuda.cupy.empty(self.shape, dtype=self.dtype)
self.check_initializer(w)
def check_shaped_initializer(self, xp):
initializer = self.target(scale=self.scale, dtype=self.dtype)
w = initializers.generate_array(initializer, self.shape, xp)
self.assertIs(cuda.get_array_module(w), xp)
self.assertTupleEqual(w.shape, self.shape)
self.assertEqual(w.dtype, self.dtype)
def test_shaped_initializer_cpu(self):
self.check_shaped_initializer(numpy)
@attr.gpu
def test_shaped_initializer_gpu(self):
self.check_shaped_initializer(cuda.cupy)
testing.run_module(__name__, __file__)
| kiyukuta/chainer | tests/chainer_tests/initializer_tests/test_uniform.py | Python | mit | 1,548 | 0 |
#!/usr/bin/env python
#
# (c) 2013 Joost Yervante Damad <joost@damad.be>
# License: GPL
VERSION='1.2.1'
import glob, sys, platform
from setuptools import setup
with open('README.md') as file:
long_description = file.read()
arch = platform.uname()[4]
extra_data_files = []
if sys.platform == 'darwin':
OPTIONS = {
'argv_emulation': True,
#'includes': ['sip', 'PyQt4', 'PyQt4.QtCore', 'PyQt4.QtGui', 'simplejson'],
#'excludes': ['PyQt4.QtDesigner', 'PyQt4.QtNetwork', 'PyQt4.QtOpenGL', 'PyQt4.QtScript', 'PyQt4.QtSql', 'PyQt4.QtTest', 'PyQt4.QtWebKit', 'PyQt4.QtXml', 'PyQt4.phonon'],
}
extra_options = dict(
setup_requires=['py2app'],
app=['madparts'],
# Cross-platform applications generally expect sys.argv to
# be used for opening files.
options=dict(py2app=OPTIONS),
)
elif sys.platform == 'win32':
import py2exe
OPTIONS = {
'includes': [
"OpenGL.arrays._buffers",
"OpenGL.arrays._numeric",
"OpenGL.arrays._strings",
"OpenGL.arrays.arraydatatype",
"OpenGL.arrays.arrayhelpers",
"OpenGL.arrays.buffers",
"OpenGL.arrays.ctypesarrays",
"OpenGL.arrays.ctypesparameters",
"OpenGL.arrays.ctypespointers",
"OpenGL.arrays.formathandler",
"OpenGL.arrays.lists",
"OpenGL.arrays.nones",
"OpenGL.arrays.numbers",
"OpenGL.arrays.numeric",
"OpenGL.arrays.numericnames",
"OpenGL.arrays.numpymodule",
"OpenGL.arrays.strings",
"OpenGL.arrays.vbo",
"OpenGL.platform.ctypesloader",
"OpenGL.platform.win32",
"OpenGL_accelerate.formathandler",
"OpenGL_accelerate.arraydatatype",
"OpenGL_accelerate.errorchecker",
"OpenGL_accelerate.latebind",
"OpenGL_accelerate.nones_formathandler",
"OpenGL_accelerate.numpy_formathandler",
"OpenGL_accelerate.vbo",
"OpenGL_accelerate.wrapper",
]
}
extra_data_files = ['msvcp90.dll',]
extra_options = dict(
setup_requires=['py2exe'],
console=['madparts'],
options=dict(py2exe=OPTIONS)
)
elif sys.platform.startswith('linux'):
extra_options = dict(
# Normally unix-like platforms will use "setup.py install"
# and install the main script as such
scripts=['madparts'],
)
if not arch in ['x86_64']:
raise Exception("unsupported arch %s" % (arch))
else:
raise Exception("unsupported platform %s" % (sys.platform))
setup(
name = 'madparts',
description = 'a functional footprint editor',
long_description = long_description,
author = 'Joost Yervante Damad',
author_email = 'joost@damad.be',
version = VERSION,
url = 'http://madparts.org/',
packages = [
'coffee',
'export',
'gui',
'inter',
'main',
'syntax',
'mutil',
],
package_data= {
'gui': [
'../COPYING', '../README.md', # dirty trick ;)
],
},
data_files = [
('share/madparts/examples', glob.glob('examples/*.coffee')),
('share/madparts/grind', glob.glob('grind/*.coffee')),
('share/madparts/coffeescript', ['coffeescript/LICENSE', 'coffeescript/README'] + glob.glob('coffeescript/*.js')),
('share/madparts/shaders', glob.glob('shaders/*.vert') + glob.glob('shaders/*.frag')),
('share/madparts/gui', ['gui/freefont.COPYING', 'gui/FreeMonoBold.ttf'] ),
] + extra_data_files,
platforms = ["Windows", "Linux", "Mac OS-X"],
**extra_options
)
| spanner888/madparts | setup.py | Python | gpl-3.0 | 3,599 | 0.012781 |
"""
IAudioEndpointVolumeCallback.OnNotify() example.
The OnNotify() callback method gets called on volume change.
"""
from __future__ import print_function
from ctypes import POINTER, cast
from comtypes import CLSCTX_ALL, COMObject
from pycaw.pycaw import (AudioUtilities, IAudioEndpointVolume,
IAudioEndpointVolumeCallback)
class AudioEndpointVolumeCallback(COMObject):
_com_interfaces_ = [IAudioEndpointVolumeCallback]
def OnNotify(self, pNotify):
print('OnNotify callback')
def main():
devices = AudioUtilities.GetSpeakers()
interface = devices.Activate(
IAudioEndpointVolume._iid_, CLSCTX_ALL, None)
volume = cast(interface, POINTER(IAudioEndpointVolume))
callback = AudioEndpointVolumeCallback()
volume.RegisterControlChangeNotify(callback)
for i in range(3):
volume.SetMute(0, None)
volume.SetMute(1, None)
if __name__ == "__main__":
main()
| AndreMiras/pycaw | examples/volume_callback_example.py | Python | mit | 950 | 0 |
#!/bin/python3
import sys
n = int(input().strip())
s = input().strip()
k = int(input().strip())
d = {}
for c in (65, 97):
for i in range(26):
d[chr(i+c)] = chr((i+k) % 26 + c)
print(''.join([d.get(c, c) for c in s]))
| avtomato/HackerRank | Algorithms/_03_Strings/_04_Caesar_Cipher/solution.py | Python | mit | 233 | 0 |
import sys
vi = sys.version_info
if vi < (3, 5):
raise RuntimeError('httptools require Python 3.5 or greater')
else:
import os.path
import pathlib
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext as build_ext
CFLAGS = ['-O2']
ROOT = pathlib.Path(__file__).parent
CYTHON_DEPENDENCY = 'Cython(>=0.29.24,<0.30.0)'
class httptools_build_ext(build_ext):
user_options = build_ext.user_options + [
('cython-always', None,
'run cythonize() even if .c files are present'),
('cython-annotate', None,
'Produce a colorized HTML version of the Cython source.'),
('cython-directives=', None,
'Cythion compiler directives'),
('use-system-llhttp', None,
'Use the system provided llhttp, instead of the bundled one'),
('use-system-http-parser', None,
'Use the system provided http-parser, instead of the bundled one'),
]
boolean_options = build_ext.boolean_options + [
'cython-always',
'cython-annotate',
'use-system-llhttp',
'use-system-http-parser',
]
def initialize_options(self):
# initialize_options() may be called multiple times on the
# same command object, so make sure not to override previously
# set options.
if getattr(self, '_initialized', False):
return
super().initialize_options()
self.use_system_llhttp = False
self.use_system_http_parser = False
self.cython_always = False
self.cython_annotate = None
self.cython_directives = None
def finalize_options(self):
# finalize_options() may be called multiple times on the
# same command object, so make sure not to override previously
# set options.
if getattr(self, '_initialized', False):
return
need_cythonize = self.cython_always
cfiles = {}
for extension in self.distribution.ext_modules:
for i, sfile in enumerate(extension.sources):
if sfile.endswith('.pyx'):
prefix, ext = os.path.splitext(sfile)
cfile = prefix + '.c'
if os.path.exists(cfile) and not self.cython_always:
extension.sources[i] = cfile
else:
if os.path.exists(cfile):
cfiles[cfile] = os.path.getmtime(cfile)
else:
cfiles[cfile] = 0
need_cythonize = True
if need_cythonize:
try:
import Cython
except ImportError:
raise RuntimeError(
'please install Cython to compile httptools from source')
if Cython.__version__ < '0.29':
raise RuntimeError(
'httptools requires Cython version 0.29 or greater')
from Cython.Build import cythonize
directives = {}
if self.cython_directives:
for directive in self.cython_directives.split(','):
k, _, v = directive.partition('=')
if v.lower() == 'false':
v = False
if v.lower() == 'true':
v = True
directives[k] = v
self.distribution.ext_modules[:] = cythonize(
self.distribution.ext_modules,
compiler_directives=directives,
annotate=self.cython_annotate)
super().finalize_options()
self._initialized = True
def build_extensions(self):
mod_parser, mod_url_parser = self.distribution.ext_modules
if self.use_system_llhttp:
mod_parser.libraries.append('llhttp')
if sys.platform == 'darwin' and \
os.path.exists('/opt/local/include'):
# Support macports on Mac OS X.
mod_parser.include_dirs.append('/opt/local/include')
else:
mod_parser.include_dirs.append(
str(ROOT / 'vendor' / 'llhttp' / 'include'))
mod_parser.include_dirs.append(
str(ROOT / 'vendor' / 'llhttp' / 'src'))
mod_parser.sources.append('vendor/llhttp/src/api.c')
mod_parser.sources.append('vendor/llhttp/src/http.c')
mod_parser.sources.append('vendor/llhttp/src/llhttp.c')
if self.use_system_http_parser:
mod_url_parser.libraries.append('http_parser')
if sys.platform == 'darwin' and \
os.path.exists('/opt/local/include'):
# Support macports on Mac OS X.
mod_url_parser.include_dirs.append('/opt/local/include')
else:
mod_url_parser.include_dirs.append(
str(ROOT / 'vendor' / 'http-parser'))
mod_url_parser.sources.append(
'vendor/http-parser/http_parser.c')
super().build_extensions()
with open(str(ROOT / 'README.md')) as f:
long_description = f.read()
with open(str(ROOT / 'httptools' / '_version.py')) as f:
for line in f:
if line.startswith('__version__ ='):
_, _, version = line.partition('=')
VERSION = version.strip(" \n'\"")
break
else:
raise RuntimeError(
'unable to read the version from httptools/_version.py')
setup_requires = []
if (not (ROOT / 'httptools' / 'parser' / 'parser.c').exists() or
'--cython-always' in sys.argv):
# No Cython output, require Cython to build.
setup_requires.append(CYTHON_DEPENDENCY)
setup(
name='httptools',
version=VERSION,
description='A collection of framework independent HTTP protocol utils.',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/MagicStack/httptools',
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Environment :: Web Environment',
'Development Status :: 5 - Production/Stable',
],
platforms=['macOS', 'POSIX', 'Windows'],
python_requires='>=3.5.0',
zip_safe=False,
author='Yury Selivanov',
author_email='yury@magic.io',
license='MIT',
packages=['httptools', 'httptools.parser'],
cmdclass={
'build_ext': httptools_build_ext,
},
ext_modules=[
Extension(
"httptools.parser.parser",
sources=[
"httptools/parser/parser.pyx",
],
extra_compile_args=CFLAGS,
),
Extension(
"httptools.parser.url_parser",
sources=[
"httptools/parser/url_parser.pyx",
],
extra_compile_args=CFLAGS,
),
],
include_package_data=True,
test_suite='tests.suite',
setup_requires=setup_requires,
extras_require={
'test': [
CYTHON_DEPENDENCY
]
}
)
| MagicStack/httptools | setup.py | Python | mit | 7,252 | 0 |
from django import forms
from faculty.event_types.base import BaseEntryForm
from faculty.event_types.base import CareerEventHandlerBase
from faculty.event_types.choices import Choices
from faculty.event_types.base import TeachingAdjust
from faculty.event_types.fields import TeachingCreditField
from faculty.event_types.mixins import TeachingCareerEvent
from faculty.event_types.search import ChoiceSearchRule
from faculty.event_types.search import ComparableSearchRule
class AdminPositionEventHandler(CareerEventHandlerBase, TeachingCareerEvent):
"""
Given admin position
"""
EVENT_TYPE = 'ADMINPOS'
NAME = 'Admin Position'
TO_HTML_TEMPLATE = """
{% extends "faculty/event_base.html" %}{% load event_display %}{% block dl %}
<dt>Position</dt><dd>{{ handler|get_display:'position' }}</dd>
<dt>Teaching Credit</dt><dd>{{ handler|get_display:'teaching_credit' }}</dd>
{% endblock %}
"""
class EntryForm(BaseEntryForm):
POSITIONS = Choices(
('UGRAD_DIRECTOR', 'Undergrad Program Director'),
('GRAD_DIRECTOR', 'Graduate Program Director'),
('DDP_DIRECTOR', 'Dual-Degree Program Director'),
('ASSOC_DIRECTOR', 'Associate Director/Chair'),
('DIRECTOR', 'School Director/Chair'),
('ASSOC_DEAN', 'Associate Dean'),
('DEAN', 'Dean'),
('OTHER', 'Other Admin Position'),
)
position = forms.ChoiceField(required=True, choices=POSITIONS)
teaching_credit = TeachingCreditField(required=False, initial=None)
SEARCH_RULES = {
'position': ChoiceSearchRule,
'teaching_credit': ComparableSearchRule,
}
SEARCH_RESULT_FIELDS = [
'position',
'teaching_credit',
]
def get_position_display(self):
return self.EntryForm.POSITIONS.get(self.get_config('position'), 'N/A')
def get_teaching_credit_display(self):
return self.get_config('teaching_credit', default='N/A')
@classmethod
def default_title(cls):
return 'Admin Position'
def short_summary(self):
position = self.get_position_display()
return 'Admin Position: {0}'.format(position)
def teaching_adjust_per_semester(self):
credit = self.get_config('teaching_credit', 0)
return TeachingAdjust(credit, 0)
| sfu-fas/coursys | faculty/event_types/position.py | Python | gpl-3.0 | 2,366 | 0.000845 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: kubevirt_template
short_description: Manage KubeVirt templates
description:
- Use Openshift Python SDK to manage the state of KubeVirt templates.
version_added: "2.8"
author: KubeVirt Team (@kubevirt)
options:
name:
description:
- Name of the Template object.
required: true
type: str
namespace:
description:
- Namespace where the Template object exists.
required: true
type: str
objects:
description:
- List of any valid API objects, such as a I(DeploymentConfig), I(Service), etc. The object
will be created exactly as defined here, with any parameter values substituted in prior to creation.
The definition of these objects can reference parameters defined earlier.
- As part of the the list user can pass also I(VirtualMachine) kind. When passing I(VirtualMachine)
user must use Ansible structure of the parameters not the Kubernetes API structure. For more information
please take a look at M(kubevirt_vm) module and at EXAMPLES section, where you can see example.
type: list
merge_type:
description:
- Whether to override the default patch merge approach with a specific type. By default, the strategic
merge will typically be used.
type: list
choices: [ json, merge, strategic-merge ]
display_name:
description:
- "A brief, user-friendly name, which can be employed by user interfaces."
type: str
description:
description:
- A description of the template.
- Include enough detail that the user will understand what is being deployed...
and any caveats they need to know before deploying. It should also provide links to additional information,
such as a README file."
type: str
long_description:
description:
- "Additional template description. This may be displayed by the service catalog, for example."
type: str
provider_display_name:
description:
- "The name of the person or organization providing the template."
type: str
documentation_url:
description:
- "A URL referencing further documentation for the template."
type: str
support_url:
description:
- "A URL where support can be obtained for the template."
type: str
editable:
description:
- "Extension for hinting at which elements should be considered editable.
List of jsonpath selectors. The jsonpath root is the objects: element of the template."
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: list
default_disk:
description:
- "The goal of default disk is to define what kind of disk is supported by the OS mainly in
terms of bus (ide, scsi, sata, virtio, ...)"
- The C(default_disk) parameter define configuration overlay for disks that will be applied on top of disks
during virtual machine creation to define global compatibility and/or performance defaults defined here.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: dict
default_volume:
description:
- "The goal of default volume is to be able to configure mostly performance parameters like
caches if those are exposed by the underlying volume implementation."
- The C(default_volume) parameter define configuration overlay for volumes that will be applied on top of volumes
during virtual machine creation to define global compatibility and/or performance defaults defined here.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: dict
default_nic:
description:
- "The goal of default network is similar to I(default_disk) and should be used as a template
to ensure OS compatibility and performance."
- The C(default_nic) parameter define configuration overlay for nic that will be applied on top of nics
during virtual machine creation to define global compatibility and/or performance defaults defined here.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: dict
default_network:
description:
- "The goal of default network is similar to I(default_volume) and should be used as a template
that specifies performance and connection parameters (L2 bridge for example)"
- The C(default_network) parameter define configuration overlay for networks that will be applied on top of networks
during virtual machine creation to define global compatibility and/or performance defaults defined here.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: dict
icon_class:
description:
- "An icon to be displayed with your template in the web console. Choose from our existing logo
icons when possible. You can also use icons from FontAwesome. Alternatively, provide icons through
CSS customizations that can be added to an OpenShift Container Platform cluster that uses your template.
You must specify an icon class that exists, or it will prevent falling back to the generic icon."
type: str
parameters:
description:
- "Parameters allow a value to be supplied by the user or generated when the template is instantiated.
Then, that value is substituted wherever the parameter is referenced. References can be defined in any
field in the objects list field. This is useful for generating random passwords or allowing the user to
supply a host name or other user-specific value that is required to customize the template."
- "More information can be found at: U(https://docs.openshift.com/container-platform/3.6/dev_guide/templates.html#writing-parameters)"
type: list
version:
description:
- Template structure version.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: str
extends_documentation_fragment:
- k8s_auth_options
- k8s_state_options
requirements:
- python >= 2.7
- openshift >= 0.8.2
'''
EXAMPLES = '''
- name: Create template 'mytemplate'
kubevirt_template:
state: present
name: myvmtemplate
namespace: templates
display_name: Generic cirros template
description: Basic cirros template
long_description: Verbose description of cirros template
provider_display_name: Just Be Cool, Inc.
documentation_url: http://theverycoolcompany.com
support_url: http://support.theverycoolcompany.com
icon_class: icon-linux
default_disk:
disk:
bus: virtio
default_nic:
model: virtio
default_network:
resource:
resourceName: bridge.network.kubevirt.io/cnvmgmt
default_volume:
containerDisk:
image: kubevirt/cirros-container-disk-demo:latest
objects:
- name: ${NAME}
kind: VirtualMachine
memory: ${MEMORY_SIZE}
state: present
namespace: vms
parameters:
- name: NAME
description: VM name
generate: expression
from: 'vm-[A-Za-z0-9]{8}'
- name: MEMORY_SIZE
description: Memory size
value: 1Gi
- name: Remove template 'myvmtemplate'
kubevirt_template:
state: absent
name: myvmtemplate
namespace: templates
'''
RETURN = '''
kubevirt_template:
description:
- The template dictionary specification returned by the API.
returned: success
type: complex
contains: {}
'''
import copy
import traceback
from ansible.module_utils.k8s.common import AUTH_ARG_SPEC
from ansible.module_utils.kubevirt import (
virtdict,
KubeVirtRawModule,
API_GROUP,
MAX_SUPPORTED_API_VERSION
)
TEMPLATE_ARG_SPEC = {
'name': {'required': True},
'namespace': {'required': True},
'state': {
'default': 'present',
'choices': ['present', 'absent'],
},
'force': {
'type': 'bool',
'default': False,
},
'merge_type': {
'type': 'list',
'choices': ['json', 'merge', 'strategic-merge']
},
'objects': {
'type': 'list',
},
'display_name': {
'type': 'str',
},
'description': {
'type': 'str',
},
'long_description': {
'type': 'str',
},
'provider_display_name': {
'type': 'str',
},
'documentation_url': {
'type': 'str',
},
'support_url': {
'type': 'str',
},
'icon_class': {
'type': 'str',
},
'version': {
'type': 'str',
},
'editable': {
'type': 'list',
},
'default_disk': {
'type': 'dict',
},
'default_volume': {
'type': 'dict',
},
'default_network': {
'type': 'dict',
},
'default_nic': {
'type': 'dict',
},
'parameters': {
'type': 'list',
},
}
class KubeVirtVMTemplate(KubeVirtRawModule):
@property
def argspec(self):
""" argspec property builder """
argument_spec = copy.deepcopy(AUTH_ARG_SPEC)
argument_spec.update(TEMPLATE_ARG_SPEC)
return argument_spec
def execute_module(self):
# Parse parameters specific for this module:
definition = virtdict()
# Execute the CRUD of VM template:
kind = 'Template'
template_api_version = 'template.openshift.io/v1'
# Fill in template parameters:
definition['parameters'] = self.params.get('parameters')
# Fill in the default Label
labels = definition['metadata']['labels']
labels['template.cnv.io/type'] = 'vm'
# Fill in Openshift/Kubevirt template annotations:
annotations = definition['metadata']['annotations']
if self.params.get('display_name'):
annotations['openshift.io/display-name'] = self.params.get('display_name')
if self.params.get('description'):
annotations['description'] = self.params.get('description')
if self.params.get('long_description'):
annotations['openshift.io/long-description'] = self.params.get('long_description')
if self.params.get('provider_display_name'):
annotations['openshift.io/provider-display-name'] = self.params.get('provider_display_name')
if self.params.get('documentation_url'):
annotations['openshift.io/documentation-url'] = self.params.get('documentation_url')
if self.params.get('support_url'):
annotations['openshift.io/support-url'] = self.params.get('support_url')
if self.params.get('icon_class'):
annotations['iconClass'] = self.params.get('icon_class')
if self.params.get('version'):
annotations['template.cnv.io/version'] = self.params.get('version')
# TODO: Make it more Ansiblish, so user don't have to specify API JSON path, but rather Ansible params:
if self.params.get('editable'):
annotations['template.cnv.io/editable'] = self.params.get('editable')
# Set defaults annotations:
if self.params.get('default_disk'):
annotations['defaults.template.cnv.io/disk'] = self.params.get('default_disk').get('name')
if self.params.get('default_volume'):
annotations['defaults.template.cnv.io/volume'] = self.params.get('default_volume').get('name')
if self.params.get('default_nic'):
annotations['defaults.template.cnv.io/nic'] = self.params.get('default_nic').get('name')
if self.params.get('default_network'):
annotations['defaults.template.cnv.io/network'] = self.params.get('default_network').get('name')
# Process objects:
self.client = self.get_api_client()
definition['objects'] = []
objects = self.params.get('objects') or []
for obj in objects:
if obj['kind'] != 'VirtualMachine':
definition['objects'].append(obj)
else:
vm_definition = virtdict()
# Set VM defaults:
if self.params.get('default_disk'):
vm_definition['spec']['template']['spec']['domain']['devices']['disks'] = [self.params.get('default_disk')]
if self.params.get('default_volume'):
vm_definition['spec']['template']['spec']['volumes'] = [self.params.get('default_volume')]
if self.params.get('default_nic'):
vm_definition['spec']['template']['spec']['domain']['devices']['interfaces'] = [self.params.get('default_nic')]
if self.params.get('default_network'):
vm_definition['spec']['template']['spec']['networks'] = [self.params.get('default_network')]
# Set kubevirt API version:
vm_definition['apiVersion'] = '%s/%s' % (API_GROUP, MAX_SUPPORTED_API_VERSION)
# Construct k8s vm API object:
vm_template = vm_definition['spec']['template']
dummy, vm_def = self.construct_vm_template_definition('VirtualMachine', vm_definition, vm_template, obj)
definition['objects'].append(vm_def)
# Create template:
resource = self.client.resources.get(api_version=template_api_version, kind=kind, name='templates')
definition = self.set_defaults(resource, definition)
result = self.perform_action(resource, definition)
# Return from the module:
self.exit_json(**{
'changed': result['changed'],
'kubevirt_template': result.pop('result'),
'result': result,
})
def main():
module = KubeVirtVMTemplate()
try:
module.execute_module()
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| thaim/ansible | lib/ansible/modules/cloud/kubevirt/kubevirt_template.py | Python | mit | 14,884 | 0.004367 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.forms import models
from djanban.apps.hourly_rates.models import HourlyRate
from django import forms
# Hourly rate creation and edition form
class HourlyRateForm(models.ModelForm):
class Meta:
model = HourlyRate
fields = ["name", "start_date", "end_date", "amount", "is_active"]
widgets = {
'start_date': forms.SelectDateWidget(),
'end_date': forms.SelectDateWidget(empty_label=u"Until now"),
}
def __init__(self, *args, **kwargs):
super(HourlyRateForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(HourlyRateForm, self).clean()
if cleaned_data.get("end_date") and cleaned_data.get("start_date") > cleaned_data.get("end_date"):
raise ValidationError(u"Start date can't be greater that end date")
return cleaned_data
class DeleteHourlyRateForm(forms.Form):
confirmed = forms.BooleanField(label=u"Please confirm you really want to do this action", required=True)
| diegojromerolopez/djanban | src/djanban/apps/hourly_rates/forms.py | Python | mit | 1,137 | 0.001759 |
import os
import sys
import pandas as pd
import numpy as np
from numpy.random import poisson, uniform
from numpy import mean
import time
import math
po = True
teamsheetpath = sys.path[0] + '/teamcsvs/'
compstat = {'TDF': 'TDA', 'TDA': 'TDF', #Dictionary to use to compare team stats with opponent stats
'FGF': 'FGA', 'FGA': 'FGF',
'SFF': 'SFA', 'SFA': 'SFF',
'PAT1%F': 'PAT1%A', 'PAT1%A': 'PAT1%F',
'PAT2%F': 'PAT2%A', 'PAT2%A': 'PAT2%F'}
def get_opponent_stats(opponent): #Gets summaries of statistics for opponent each week
opponent_stats = {}
global teamsheetpath
opp_stats = pd.DataFrame.from_csv(teamsheetpath + opponent + '.csv')
for stat in opp_stats.columns:
if stat in ['TDF', 'FGF', 'SFF', 'TDA', 'FGA', 'SFA']:
opponent_stats.update({stat: opp_stats[stat].mean()})
try:
opponent_stats.update({'PAT1%F': float(opp_stats['PAT1FS'].sum()) / opp_stats['PAT1FA'].sum()})
except ZeroDivisionError:
opponent_stats.update({'PAT1%F': .99})
try:
opponent_stats.update({'PAT2%F': float(opp_stats['PAT2FS'].sum()) / opp_stats['PAT2FA'].sum()})
except ZeroDivisionError:
opponent_stats.update({'PAT2%F': .5})
try:
opponent_stats.update({'PAT1%A': float(opp_stats['PAT1AS'].sum()) / opp_stats['PAT1AA'].sum()})
except ZeroDivisionError:
opponent_stats.update({'PAT1%A': .99})
try:
opponent_stats.update({'PAT2%A': float(opp_stats['PAT2AS'].sum()) / opp_stats['PAT2AA'].sum()})
except ZeroDivisionError:
opponent_stats.update({'PAT2%A': .5})
return opponent_stats
def get_residual_performance(team): #Get how each team has done compared to the average performance of their opponents
global teamsheetpath
score_df = pd.DataFrame.from_csv(teamsheetpath + team + '.csv')
residual_stats = {}
score_df['PAT1%F'] = np.nan
score_df['PAT2%F'] = np.nan
score_df['PAT1%A'] = np.nan
score_df['PAT2%A'] = np.nan
for week in score_df.index:
try:
score_df['PAT1%F'][week] = float(score_df['PAT1FS'][week]) / score_df['PAT1FA'][week]
except ZeroDivisionError:
score_df['PAT1%F'][week] = 0.99
#print ('For: ' + str(score_df['PAT1%F'][week]))
try:
score_df['PAT2%F'][week] = float(score_df['PAT2FS'][week]) / score_df['PAT2FA'][week]
except ZeroDivisionError:
score_df['PAT2%F'][week] = 0.5
try:
score_df['PAT1%A'][week] = float(score_df['PAT1AS'][week]) / score_df['PAT1AA'][week]
except ZeroDivisionError:
score_df['PAT1%A'][week] = 0.99
#print ('Against: ' + str(score_df['PAT1%F'][week]))
try:
score_df['PAT2%A'][week] = float(score_df['PAT2AS'][week]) / score_df['PAT2AA'][week]
except ZeroDivisionError:
score_df['PAT2%A'][week] = 0.5
opponent_stats = get_opponent_stats(score_df['OPP'][week])
for stat in opponent_stats:
if week == 1:
score_df['OPP_' + stat] = np.nan
score_df['OPP_' + stat][week] = opponent_stats[stat]
for stat in opponent_stats:
score_df['R_' + stat] = score_df[stat] - score_df['OPP_' + compstat[stat]]
if stat in ['TDF', 'FGF', 'SFF', 'TDA', 'FGA', 'SFA']:
residual_stats.update({stat: score_df['R_' + stat].mean()})
elif stat == 'PAT1%F':
residual_stats.update({stat: (score_df['R_PAT1%F'].multiply(score_df['PAT1FA'])).sum() / score_df['PAT1FA'].sum()})
elif stat == 'PAT2%F':
residual_stats.update({stat: (score_df['R_PAT2%F'].multiply(score_df['PAT2FA'])).sum() / score_df['PAT2FA'].sum()})
elif stat == 'PAT1%A':
residual_stats.update({stat: (score_df['R_PAT1%A'].multiply(score_df['PAT1AA'])).sum() / score_df['PAT1AA'].sum()})
elif stat == 'PAT2%A':
residual_stats.update({stat: (score_df['R_PAT2%A'].multiply(score_df['PAT2AA'])).sum() / score_df['PAT2AA'].sum()})
try:
residual_stats.update({'GOFOR2': float(score_df['PAT2FA'].sum()) / score_df['TDF'].sum()})
except ZeroDivisionError:
residual_stats.update({'GOFOR2': .1})
#print team
#print residual_stats
return residual_stats
def get_score(expected_scores): #Get the score for a team based on expected scores
score = 0
if expected_scores['TD'] > 0:
tds = poisson(expected_scores['TD'])
else:
tds = poisson(0.01)
score = score + 6 * tds
if expected_scores['FG'] > 0:
fgs = poisson(expected_scores['FG'])
else:
fgs = poisson(0.01)
score = score + 3 * fgs
if expected_scores['S'] > 0:
sfs = poisson(expected_scores['S'])
else:
sfs = poisson(0.01)
score = score + 2 * sfs
for td in range(tds):
go_for_2_determinant = uniform(0, 1)
if go_for_2_determinant <= expected_scores['GOFOR2']: #Going for 2
successful_pat_determinant = uniform(0, 1)
if successful_pat_determinant <= expected_scores['PAT2PROB']:
score = score + 2
else:
continue
else: #Going for 1
#print(expected_scores['PAT1PROB'])
successful_pat_determinant = uniform(0, 1)
if successful_pat_determinant <= expected_scores['PAT1PROB']:
score = score + 1
else:
continue
return score
def game(team_1, team_2,
expected_scores_1, expected_scores_2,
playoff): #Get two scores and determine a winner
score_1 = get_score(expected_scores_1)
score_2 = get_score(expected_scores_2)
if score_1 > score_2:
win_1 = 1
win_2 = 0
draw_1 = 0
draw_2 = 0
elif score_2 > score_1:
win_1 = 0
win_2 = 1
draw_1 = 0
draw_2 = 0
else:
if playoff:
win_1 = 0.5
win_2 = 0.5
draw_1 = 0
draw_2 = 0
else:
win_1 = 0
win_2 = 0
draw_1 = 1
draw_2 = 1
summary = {team_1: [win_1, draw_1, score_1]}
summary.update({team_2: [win_2, draw_2, score_2]})
return summary
def get_expected_scores(team_1_stats, team_2_stats, team_1_df, team_2_df): #Get the expected scores for a matchup based on the previous teams' performances
expected_scores = {}
for stat in team_1_stats:
expected_scores.update({'TD': mean([team_1_stats['TDF'] + team_2_df['TDA'].mean(),
team_2_stats['TDA'] + team_1_df['TDF'].mean()])})
expected_scores.update({'FG': mean([team_1_stats['FGF'] + team_2_df['FGA'].mean(),
team_2_stats['FGA'] + team_1_df['FGF'].mean()])})
expected_scores.update({'S': mean([team_1_stats['SFF'] + team_2_df['SFA'].mean(),
team_2_stats['SFA'] + team_1_df['SFF'].mean()])})
#print mean([team_1_stats['PAT1%F'] + team_2_df['PAT1AS'].astype('float').sum() / team_2_df['PAT1AA'].sum(),
# team_2_stats['PAT1%A'] + team_1_df['PAT1FS'].astype('float').sum() / team_1_df['PAT1FA'].sum()])
expected_scores.update({'GOFOR2': team_1_stats['GOFOR2']})
pat1prob = mean([team_1_stats['PAT1%F'] + team_2_df['PAT1AS'].astype('float').sum() / team_2_df['PAT1AA'].sum(),
team_2_stats['PAT1%A'] + team_1_df['PAT1FS'].astype('float').sum() / team_1_df['PAT1FA'].sum()])
if not math.isnan(pat1prob):
expected_scores.update({'PAT1PROB': pat1prob})
else:
expected_scores.update({'PAT1PROB': 0.99})
#print(expected_scores['PAT1PROB'])
pat2prob = mean([team_1_stats['PAT2%F'] + team_2_df['PAT2AS'].astype('float').sum() / team_2_df['PAT2AA'].sum(),
team_2_stats['PAT2%A'] + team_1_df['PAT2FS'].astype('float').sum() / team_1_df['PAT2FA'].sum()])
if not math.isnan(pat2prob):
expected_scores.update({'PAT2PROB': pat2prob})
else:
expected_scores.update({'PAT2PROB': 0.5})
#print(expected_scores)
return expected_scores
def matchup(team_1, team_2):
ts = time.time()
team_1_season = pd.DataFrame.from_csv(teamsheetpath + team_1 + '.csv')
team_2_season = pd.DataFrame.from_csv(teamsheetpath + team_2 + '.csv')
stats_1 = get_residual_performance(team_1)
stats_2 = get_residual_performance(team_2)
expected_scores_1 = get_expected_scores(stats_1, stats_2, team_1_season, team_2_season)
expected_scores_2 = get_expected_scores(stats_2, stats_1, team_2_season, team_1_season)
team_1_wins = 0
team_2_wins = 0
team_1_draws = 0
team_2_draws = 0
team_1_scores = []
team_2_scores = []
i = 0
error = 1
while error > 0.000001 or i < 5000000: #Run until convergence after 5 million iterations
summary = game(team_1, team_2,
expected_scores_1, expected_scores_2,
po)
team_1_prev_wins = team_1_wins
team_1_wins += summary[team_1][0]
team_2_wins += summary[team_2][0]
team_1_draws += summary[team_1][1]
team_2_draws += summary[team_2][1]
team_1_scores.append(summary[team_1][2])
team_2_scores.append(summary[team_2][2])
team_1_prob = float(team_1_wins) / len(team_1_scores)
team_2_prob = float(team_2_wins) / len(team_2_scores)
if i > 0:
team_1_prev_prob = float(team_1_prev_wins) / i
error = team_1_prob - team_1_prev_prob
i = i + 1
if i == 5000000:
print('Probability converged within 5 million iterations')
else:
print('Probability converged after ' + str(i) + ' iterations')
games = pd.DataFrame.from_items([(team_1, team_1_scores), (team_2, team_2_scores)])
summaries = games.describe(percentiles = [0.025, 0.1, 0.25, 0.5, 0.75, 0.9, 0.975])
output = {'ProbWin': {team_1: team_1_prob, team_2: team_2_prob}, 'Scores': summaries}
print(team_1 + '/' + team_2 + ' score distributions computed in ' + str(round(time.time() - ts, 1)) + ' seconds')
return output | JoeJimFlood/NFLPrediction2014 | matchup.py | Python | mit | 10,272 | 0.007496 |
#!/usr/bin/python
from __future__ import print_function
import random
import re
import datetime
import os
import sys
import time
from optparse import make_option
import urllib2
import tarfile
from multiprocessing import cpu_count
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection, reset_queries, OperationalError
from django.db.transaction import commit_on_success, commit
from django.utils import timezone
from joblib import Parallel, delayed
from six.moves import cPickle as pickle
from six.moves import range as xrange
from six.moves import input as raw_input
from six import u
from asklet import constants as c
from asklet.utils import MatrixUser
from asklet import models
class ConceptNetEdge(object):
def __init__(self, *args):
fields = [
'uri', 'rel', 'start', 'end', 'context',
'weight', 'sources', 'id', 'dataset', 'surfaceText']
# print('fields:',fields)
# print('args:',args)
assert len(args) == len(fields), '%i != %i' % (len(args), len(fields))
self.__dict__.update(zip(fields, args))
self.surfaceText = self.surfaceText.strip()
@classmethod
def from_string(cls, s):
return cls(*s.split('\t'))
@property
def surface_parts(self):
text = self.surfaceText
parts = [_ for _ in re.split('\[\[|\]\]', text) if _.strip()]
if len(parts) == 3:
return parts
@property
def target_text(self):
parts = self.surface_parts
if parts:
return parts[0].strip()
text = re.sub('[^a-zA-Z0-9]+', ' ', self.start.split('/')[-1])
text = re.sub('[ ]+', ' ', text)
return text
@property
def target_slug(self):
return self.start
@property
def question_text(self):
parts = self.surface_parts
if parts:
return '[%s] [%s]' % (parts[1].strip(), parts[2].strip())
#Not reliable. Makes broken segments.
# text = re.sub('[^a-zA-Z0-9]+', ' ', self.rel.split('/')[-1].lower() + ' ' + self.end.split('/')[-1])
# text = re.sub('[ ]+', ' ', text)
# return text
@property
def question_slug(self):
return '%s,%s' % (self.rel, self.end)
@property
def weight_int(self):
#typical=1, our typical is 2
weight = float(self.weight)*2
weight = min(max(weight, c.NO), c.YES)
return int(round(weight*1000))
def __str__(self):
return '%s->%s->%s' % (self.start, self.rel, self.end)
def download_concept():
base = 'http://conceptnet5.media.mit.edu/downloads/current/'
html = urllib2.urlopen(base).read()
local_dir = '/tmp'
matches = re.findall('"(conceptnet5_csv_[^"]+)"', html)
if matches:
fn = matches[-1]
local_fqfn = os.path.join(local_dir, fn)
if os.path.isfile(local_fqfn):
print('File %s already downloaded' % local_fqfn)
return local_fqfn
url = base + fn
print('Downloading %s...' % url)
os.system('wget --directory-prefix=/tmp %s' % url)
return local_fqfn
else:
print(('No Conceptnet URL found! Perhaps the '
'page %s has changed?') % base, file=sys.stderr)
@commit_on_success
def process(fn, part_name, domain_slug, commit_freq=10):
print('%s: Processing...' % part_name)
connection.close()
domain = models.Domain.objects.get(slug=domain_slug)
models.SET_QUESTION_INDEX = False
models.SET_TARGET_INDEX = False
fi, _ = models.FileImport.objects.get_or_create(
domain=domain,
filename=fn.split('/')[-1],
part=part_name)
if fi.total_lines is None:
tar = tarfile.open(fn, 'r')
fin = tar.extractfile(part_name)
print('%s: Counting lines...' % part_name)
total = fin.read().decode('utf8').count('\n')
fi.current_line = 0
fi.total_lines = total
fi.save()
elif fi.done:
print('%s: Already complete.' % part_name)
return
else:
total = fi.total_lines
print('%s: %i lines found.' % (part_name, total))
tar = tarfile.open(fn, 'r')
fin = tar.extractfile(part_name)
skip_to_line = fi.current_line or 0
i = 0
for line in fin:
i += 1
if skip_to_line and i < skip_to_line:
continue
if i == 1 or not i % commit_freq or i == total:
print(
'%s: Processing line %i of %i %.02f%%.' \
% (part_name, i, total, i/float(total or i)*100))
sys.stdout.flush()
fi.current_line = i
fi.save()
commit()
reset_queries()
line = line.decode('utf8')
edge = ConceptNetEdge.from_string(line)
subject_lang = models.extract_language_code(edge.start)
object_lang = models.extract_language_code(edge.end)
# Ignore edges without sense.
# Note, this skips an estimated 85% of edges.
# start_sense = models.extract_sense(edge.start)
# if not start_sense:
# continue
# end_sense = models.extract_sense(edge.end)
# if not end_sense:
# continue
retry = 0
while 1:
try:
retry += 1
target = None
if not domain.language or subject_lang == domain.language:
target, _ = models.Target.objects.get_or_create(
domain=domain,
slug=edge.target_slug,
defaults=dict(
text=edge.target_text
)
)
target.conceptnet_subject = edge.start
target.enabled = True
target.save()
question = None
if not domain.language or object_lang == domain.language:
question, _ = models.Question.objects.get_or_create(
domain=domain,
slug=edge.question_slug,
defaults=dict(
text=edge.question_text
)
)
question.conceptnet_predicate = edge.rel
question.conceptnet_object = edge.end
question.enabled = True
question.save()
if target and question:
weight, _ = models.TargetQuestionWeight.objects.get_or_create(
target=target,
question=question,
defaults=dict(
weight=edge.weight_int,
count=1000,
))
weight.text = edge.surfaceText
weight.save()
break
except OperationalError as e:
if 'deadlock' in str(e):
print('%s: Retry %i after deadlock.' % (part_name, retry))
else:
raise
print('%s: Complete.' % part_name)
class Command(BaseCommand):
help = 'Loads targets, questions and weights from a ConceptNet5 CSV dump file.'
args = ''
option_list = BaseCommand.option_list + (
#make_option('--seed', default=None),
make_option('--domain', default=''),
make_option('--fn', default=''),
make_option('--parts', default=20),
make_option('--commit-freq', default=10),
make_option('--part-name-template',
default='assertions/part_%02i.csv'),
)
def handle(self, *args, **options):
tmp_settings = settings.DEBUG
settings.DEBUG = False
try:
commit_freq = int(options['commit_freq'])
parts = int(options['parts'])
part_name_template = options['part_name_template']
fn = options['fn'].strip()
if not fn or not os.path.isfile(fn):
fn = download_concept()
domain_slug = options['domain']
domain = models.Domain.objects.get(slug=domain_slug)
print('Launching processes...')
connection.close()
Parallel(n_jobs=cpu_count())(
delayed(process)(
fn=fn,
part_name=part_name_template % i,
domain_slug=domain_slug,
commit_freq=commit_freq,
)
for i in range(parts))
# models.SET_TARGET_INDEX = True
# q = domain.targets.filter(index__isnull=True).order_by('id')
# total = q.count()
# i = 0
# for r in q.iterator():
# i += 1
# if i == 1 or not i % 10 or i == total:
# print('Updating target index %i of %i.' % (i, total))
# r.save()
#
# models.SET_QUESTION_INDEX = True
# q = domain.questions.filter(index__isnull=True).order_by('id')
# total = q.count()
# i = 0
# for r in q.iterator():
# i += 1
# if i == 1 or not i % 10 or i == total:
# print('Updating question index %i of %i.' % (i, total))
# r.save()
finally:
settings.DEBUG = tmp_settings
| chrisspen/asklet | asklet/management/commands/asklet_load_conceptnet.py | Python | lgpl-3.0 | 9,742 | 0.005646 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
# Licensed to the BBC under a Contributor Agreement: RJL
"""\
===========================================
Peer-to-Peer Streaming System (client part)
===========================================
This example demonstrates the use of BitTorrent and HTTP to download, share
reconstruct a data stream in real-time.
It expects a webserver hosting a folder that contains:
- meta.txt (a file containing the number of chunks/torrents in the stream
so far as a decimal, ASCII string)
- 1.torrent
- 2.torrent
- ...
- 123.torrent (if meta.txt contained "123")
Only this metainfo is downloaded using HTTP. The stream itself is downloaded
(and uploaded to other downloaders) using BitTorrent.
Other users must upload the stream's chunks using BitTorrent for this demo
to work.
To listen to/view the stream, just point your favourite media player
(say, XMMS) at the reconstructed file after it's been downloading for a minute
or so.
"""
import time
from Axon.Component import component
from Kamaelia.Chassis.Pipeline import pipeline
from Kamaelia.File.Writing import SimpleFileWriter
from Kamaelia.Community.RJL.Kamaelia.File.TriggeredFileReader import TriggeredFileReader
from Kamaelia.Community.RJL.Kamaelia.Protocol.HTTP.HTTPClient import SimpleHTTPClient
from Kamaelia.Community.RJL.Kamaelia.Protocol.Torrent.TorrentPatron import TorrentPatron
from Kamaelia.Community.RJL.Kamaelia.Protocol.Torrent.TorrentIPC import TIPCNewTorrentCreated, TIPCTorrentStatusUpdate
from Kamaelia.Community.RJL.Kamaelia.Util.Clock import CheapAndCheerfulClock
from Kamaelia.Community.RJL.Kamaelia.Util.DataSource import TriggeredSource
class StreamReconstructor(component):
"""\
StreamReconstructor()
This component receives reports on the status/completion of BitTorrent
downloads from a TorrentPatron instance. It keeps a record of the
order in which torrents were started and waits until the first is
finished. It then outputs the filename of this torrent and removes
it from its list. Then it waits for the second torrent (now the first
on the list) to finish downloading, then outputs its filename and so on.
If later torrents finish before earlier ones, their filenames are not
output until their all their predecessors have finished.
The purpose of this is output the names of files whose contents should
be concatenated to a master file to reconstruct the stream.
"""
def main(self):
torrents = []
while 1:
yield 1
while self.dataReady("inbox"):
msg = self.recv("inbox")
if isinstance(msg, TIPCNewTorrentCreated):
torrents.append([msg.torrentid, msg.savefolder]) # add the new torrent to the list of known torrents
elif isinstance(msg, TIPCTorrentStatusUpdate):
# if the status update is about the oldest torrent that
# has not been completed prior to now, then...
if len(torrents) > 0 and msg.torrentid == torrents[0][0]:
# if this oldest torrent is now complete
if msg.statsdictionary.get("fractionDone",0) == 1:
# forward on the name of the file downloaded in this torrent
self.send(torrents[0][1], "outbox")
torrents.pop(0) # and remove it from our list of torrents that we care about
while self.dataReady("control"):
msg = self.recv("control")
if isinstance(msg, shutdown) or isinstance(msg, producerFinished):
# if we are being told to shutdown then do so
self.send(producerFinished(self), "signal")
return
self.pause()
class PartsFilenameGenerator(component):
"""\
PartsFilenameGenerator()
Arguments:
- prefix - string to prepend to the id of a torrent to make its URL
- [suffix] - string to append to the id of the torrent to make the URL
defaults to ".torrent"
Generate the URLs of the .torrents that make up the stream
from reports of the total number of chunks/torrents in the stream
that are received on "inbox".
e.g. Assuming it was created as
PartsFilenameGenerator("http://www.example.com/", ".torrent"),
Send it "3" and it will output (one message listed per line):
- "http://www.example.com/1.torrent"
- "http://www.example.com/2.torrent"
- "http://www.example.com/3.torrent"
Then send it "3" again and it will output nothing.
Now send it "5" and it will output:
- "http://www.example.com/4.torrent"
- "http://www.example.com/5.torrent"
"""
def __init__(self, prefix, suffix = ".torrent")
self.prefix = prefix
self.suffix = suffix
super(self, PartsFilenameGenerator).__init__()
def main(self):
highestseensofar = 0 # we have not outputted any torrent URLs so far
while 1:
yield 1
while self.dataReady("inbox"):
msg = int(self.recv("inbox"))
# output the URLs of all the torrents whose numbers are > the
# number of last torrent output and <= the value of message received
while highestsofar < msg:
highestsofar += 1
self.send(self.prefix + str(highestsofar) + self.suffix, "outbox")
while self.dataReady("control"):
msg = self.recv("control"):
if isinstance(msg, shutdown) or isinstance(msg, producerFinished):
self.send(producerFinished(self), "signal")
return
self.pause()
def P2PStreamer(torrentsfolder):
"""\
Arguments:
- torrentsfolder, e.g. "http://my.server.example.org/radioFoo/"
"""
# Create a pipeline of components whose net result is to output the contents of a certain URL
# (torrentsfolder + metafilename) every 60 seconds (the contents at the time of output, i.e.
# it fetches the page every 60 seconds).
poller = pipeline(
# This generates a message every 60 seconds to wake TriggeredSource
# allowing us to poll the meta file without busy-waiting.
CheapAndCheerfulClock(60.0),
# This sends the string (torrentsfolder + "meta.txt") every time it receives a message
# This string will be the URL of the meta file on the torrent hosting website
# e.g. "http://my.server.example.org/radioFoo/meta.txt"
TriggeredSource(torrentsfolder + "meta.txt"),
# SimpleHTTPClient retrieves the resource specified by the message it receives,
# which will be URL string.
# i.e. It fetches the page whose URL is (torrentsfolder + "meta.txt) (the string
# produced by TriggeredSource) and forwards on the contents of that page.
# The contents of that particular page will always be a number
# (in the form of a decimal ASCII string) which represents the number of
# 'chunks' of the stream that exist
SimpleHTTPClient()
)
# As a whole, streamer acts like a normal streaming client, outputting the contents of
# a stream to its outbox, although in much larger chunks with longer in between chunks
# than for a typical stream.
streamer = pipeline(
# fetch the P2P-stream meta file every 60 seconds and send its contents on
poller,
# PartsFilenameGenerator uses the number retrived by poller
# i.e. the number of chunks/torrents in the stream
# to generate the URLs of all the .torrent files
# (torrent metadata files) that make up the stream.
# (They will have been named 1.torrent,
# 2.torrent, 3.torrent ... etc. on the server).
PartsFilenameGenerator(torrentsfolder, ".torrent"),
# Download these .torrent files (each message received by resourcefetcher
# will be the URL of one .torrent file it should download). The
# contents of the page downloaded it forwarded on to the next component.
# NOTE: this downloads the .torrent file (metadata about part of the
# stream) not the stream itself
SimpleHTTPClient(),
# now use BitTorrent to download the stream itself using the
# metadata retrieved from .torrent files (each has information about a
# section of the stream - a section itself is typically a few MB of data)
# (TorrentPatron is a BitTorrent client component)
TorrentPatron(),
# output the names of the chunks of the stream as soon as they and
# all previous chunks have been downloaded
StreamReconstructor(),
# read the contents of these chunks (files)
TriggeredFileReader(),
)
return streamer
if __name__ == '__main__':
# ask the user from which website we should get the stream's metadata
# e.g. "http://my.server.example.org/radioFoo/"
torrentsfolder = raw_input("P2P-stream meta folder (URL): ")
pipeline(
# fetch the stream using BitTorrent and HTTP - see above for details
streamer = P2PStreamer(torrentsfolder),
# write the stream to a file on disk
SimpleFileWriter("myreconstructedstream.mp3")
).run()
| sparkslabs/kamaelia_ | Sketches/RJL/Packages/Examples/P2PStreamPeer/p2pstreampeer.py | Python | apache-2.0 | 10,507 | 0.007233 |
#!/usr/bin/env python
# encoding: utf-8
import pytest
from conftests import *
from rurouni.exceptions import *
from rurouni.types import *
from rurouni import Database, Column, Table
def test_column_appending(ldb):
'''
Checks column appending. To simulate this behaviour just adds two different
classes pointing to the same table.
'''
# First declaration
class Client(Table):
__db__ = ldb.db
pass
ldb.flush()
# Second declaration
class NewClient(Table):
__db__ = ldb.db
__tablename__ = 'client'
name = Column(String)
# Check logs
logs = ldb.getLog()
assert logs[0] == 'PRAGMA table_info("client")'
assert logs[1] == 'ALTER TABLE client ADD name VARCHAR'
assert logs[2] == 'COMMIT'
ldb.destroy()
def test_column_removal(ldb):
'''
Checks column removal. To simulate this behaviour just adds two different
classes pointing to the same table.
For this is needed to set the db.autoremove_columns flag as True.
'''
ldb.db.autoremove_columns = True
# First declaration
class Client(Table):
__db__ = ldb.db
firstname = Column(String)
lastname = Column(String)
ldb.flush()
# Second declaration
class NewClient(Table):
__db__ = ldb.db
__tablename__ = 'client'
firstname = Column(String)
# Check logs
logs = ldb.getLog()
assert logs[0] == 'PRAGMA table_info("client")'
assert logs[1] == 'ALTER TABLE client RENAME TO migration_tmp'
assert logs[2] == 'COMMIT'
assert logs[3] == 'CREATE TABLE client ('
assert logs[4] == 'id INTEGER NOT NULL,'
assert logs[5] == 'firstname VARCHAR,'
assert logs[6] == 'PRIMARY KEY (id)'
assert logs[7] == ')'
assert logs[8] == 'COMMIT'
assert logs[9] == 'INSERT INTO client SELECT id ,firstname from migration_tmp'
assert logs[10] == 'COMMIT'
assert logs[11] == 'DROP TABLE migration_tmp'
assert logs[12] == 'COMMIT'
ldb.destroy()
def test_table_removal(tmp_ldb):
'''
Test table removal. For this feature the db.autoClean() must be called at
the end of all table definition.
Also, is nedded to be a persistent database.
'''
# Define two tables
class Client(Table):
__db__ = tmp_ldb.db
pass
class Profession(Table):
__db__ = tmp_ldb.db
pass
# Reopen db and define only one table
tmp_ldb.reopen()
class Client(Table):
__db__ = tmp_ldb.db
pass
tmp_ldb.flush() # Flush output
tmp_ldb.db.autoClean() # Autoclean tables
# Table profession must be dropped
logs = tmp_ldb.getLog()
assert logs[0] == 'DROP TABLE profession'
assert logs[1] == 'COMMIT'
# Remove file
tmp_ldb.whipeout()
| magnunleno/Rurouni | tests/test_table_migration.py | Python | gpl-3.0 | 2,788 | 0.002869 |
# coding=UTF-8
'''
Created on 24.09.2017
@author: sysoev
'''
from google.appengine.ext import db
from google.appengine.api import users
import datetime
import time
import logging
from myusers import MyUser
def force_unicode(string):
if type(string) == unicode:
return string
return string.decode('utf-8')
class Project(db.Model):
name = db.StringProperty(multiline=False)
def getProjectsList(user):
return None
def updateProject(key, name):
p = Project.get(key)
if not p:
return
p.name = name
p.put()
def addProject(name):
p = Project()
p.name = name
p.put()
return p.key()
class UserProject(db.Model):
user_key = db.ReferenceProperty(MyUser)
project_key = db.ReferenceProperty(Project)
number = 0
def addUserProject(user_name, project_key_str):
user_query = MyUser.all()
user = user_query.filter('username = ', user_name).get()
if user is None:
return None
true_project_key = Project.get(project_key_str).key()
if check_user_have_project(user, true_project_key):
return False
up = UserProject()
up.user_key = user.key()
up.project_key = true_project_key
up.put()
return True
def check_user_have_project(user, true_project_key):
user_project_keys = [user_proj.project_key.key() for user_proj in
UserProject.all().filter('user_key = ', user.key()).fetch(None)]
return true_project_key in user_project_keys
def deleteUserProject(user_key, project_key):
query = UserProject.all()
query.filter('user_key = ', MyUser.get(user_key)).filter('project_key = ', Project.get(project_key))
user_project = query.get()
if user_project is None:
return None
# project.key().delete()
db.delete(user_project.key())
return True
def getUserProjects(user):
if user is None:
return []
query = UserProject.all().filter('user_key = ', user.key())
return [user_project.project_key for user_project in query]
# return [Project.get(user_project.project_key) for user_project in query]
class Request(db.Model):
number = int
name = db.StringProperty()
description = db.StringProperty(multiline=True)
state = int
perfomer = db.ReferenceProperty() #???
def addRequests(project_key, name, description):
print("log")
req = Request(parent=project_key)
req.name = name
req.description = description
req.perfomer = ""
req.state = 1
req.number = Request(ancestor = project_key).all().length + 1
req.put()
Project.set(project_key).number += 1
return True
def getRequests(project_key):
if project_key is None:
return []
query = Request(ancestor = project_key).all()
return query | sysoevss/WebApps17 | data.py | Python | mit | 2,774 | 0.004326 |
import csv
import gzip
def save_vector(vector, output_fname):
"""
Save the any type of vector for future use.
This could be ratings, predictions or the content vector
Results need to be collected to the local history before being read out
Args:
vector: either user ratings, predictions or the content vector
output_fname (str): Local file path to store the vector
"""
if output_fname.endswith('.gz'):
output_file = gzip.open(output_fname, 'w')
else:
output_file = open(output_fname, 'w')
csv_writer = csv.writer(output_file, delimiter=';')
for v in vector:
csv_writer.writerow(v)
output_file.close()
def load_ratings(input_fname):
"""
Loads the rating or predicted rating arrays into the format of int(user_id), int(item_id), float(rating)
The ratings array can then be put into spark by using sc.parallelize()
If you would then like a queriable ratings vector you would follow something similar to the following
ratings_sc = sc.parallelize(ratings)
fields = [StructField("user", LongType(),True),StructField("item", LongType(), True),\
StructField("rating", FloatType(), True) ]
schema = StructType(fields)
ratings_df = sqlCtx.createDataFrame(ratings_sc, schema)
ratings_df.registerTempTable("ratings")
Args:
input_fname (str): Local file path where the vector is stored
Returns:
ratings: array of user ratings or predicted rating
"""
ratings = []
if input_fname.endswith('.gz'):
input_file = gzip.open(input_fname, 'rb')
else:
input_file = open(input_fname, 'rb')
csv_reader = csv.reader(input_file, delimiter=';')
for line in csv_reader:
ratings.append((int(line[0]), int(line[1]), float(line[2])))
return ratings
def load_content_vector(input_fname):
"""
Loads the content vector array into the format of int(item_id), array[0, 1, 0, ....., 0.777773, 0]
The content vector array can then be put into spark by using sc.parallelize()
Args:
input_fname (str): Local file path where the vector is stored
Returns:
content_vector: array of the content vector
"""
content_vector = []
if input_fname.endswith('.gz'):
input_file = gzip.open(input_fname, 'rb')
else:
input_file = open(input_fname, 'rb')
csv_reader = csv.reader(input_file, delimiter=';')
for line in csv_reader:
item = int(line[0])
content1 = line[1].strip("[]")
content = [float(i) for i in str.split(content1, ' ')]
content_vector.append((item, content))
return content_vector
def save_uv_to_hadoop(vector, output_name):
vector.map(lambda x: ','.join(map(str,x))).saveAsTextFile(output_name)
def load_uv_from_hadoop(input_name, sc, num_partitions=20):
uv = sc.textFile(input_name).map(parseText)\
.repartition(num_partitions)
return uv
def parseText(row):
row = row.split(',')
return (int(row[0]), int(row[1]), float(row[2]))
def rm_hdfs_dir(hdfs_dir):
cmd = "hadoop fs -rm -R " + hdfs_dir
import subprocess
cmd_output = subprocess.check_output(cmd, shell=True)
return cmd_output
def save_to_hadoop(vector, output_name):
import subprocess
try:
# overwrite the past vector that was saved
rm_hdfs_dir(output_name)
except subprocess.CalledProcessError as e:
# hdfs directory "output_name" does not exist
# do nothing
pass
# save vector as output_name in hdfs
vector.saveAsPickleFile(output_name)
def load_from_hadoop(input_name,sc, num_partitions=20):
cv = sc.pickleFile(input_name).repartition(num_partitions)
return cv | tiffanyj41/hermes | src/utils/save_load.py | Python | apache-2.0 | 3,756 | 0.004526 |
"""
functions for evaluating spreadsheet functions
primary function is parse, which the rest revolves around
evaluate should be called with the full string by a parent program
A note on exec:
This uses the exec function repeatedly, and where possible, use of it
should be minimized, but the intention of this is only meant to be run
on trusted spreadsheets. Future development of this may focus on it being
more secure, but the primary goal is simply to evaluate the most common
functions, regardless the ability for code to be injected.
Another note:
this whole thing could stand to be redone
"""
# import spreadsheet mirroring functions
import eval.functions as functions
import eval.translate as translate
import eval.storage as global_file # historical reasons for name
__author__ = 'user0'
def evaluate(s, reference_dictionary=None):
# if included, reference dictionary is a dictionary of relevant
# cell references.
# alternatively, if reference_dictionary is None, it is presumed
# that it is not needed to replace references with values in the
# formula. The reference_type arg, if none, defaults to 'sheet'
if s[0] == '=':
# get rid of the equals sign at the beginning of the formula
s = s[1:]
# send reference dictionary to storage
global_file.formulas = reference_dictionary
# I feel like I'm forgetting something else here
return parse(s)
def parse(s, function=None):
# returns evaluation of formula via recursive function;
# before this function is run, dependencies should be
# identified and evaluated
replace = {}
it = 0
level = 0
# replace references with cell values
s = s.lower()
# for formula in global_file.formulas:
# if formula in s:
# s = s.replace(formula, str(
# global_file.formulas[formula].return_value()))
# replace values with python equivalents
# ('^' with '**' for example)
s = translate.spreadsheet_replace(s)
# evaluate formula
for char in s:
if char == '(':
level += 1
if level == 1:
parent_start = it
if char == ')':
level -= 1
if level == 0:
parent_close = it
prefix = get_prefix(s, parent_start)
body = s[parent_start + 1: parent_close]
formula = '{}({})'.format(prefix, body)
replace[formula] = str(parse(prefix, body))
verbose('replacing {} with {}'.format(formula,
replace[formula]))
it += 1
# replace strings
for entry in replace:
s = s.replace(entry, replace[entry])
# depending on the presence of a function, either simply evaluate,
# or use a function from functions
if function:
# if function is in the replacement dictionary,
# replace it with that entry
if function in functions.function_replace:
function = functions.function_replace[function]
else:
print('function %s was not in function dictionary') % function
# function just stopped sounding like a word
# insert the formula in a python-readable format
body_strings = s.split(',') # this is used below
exec_string = '%s(body_strings)' % function
else:
# replace references with values and find result
s = s.lower()
for reference in global_file.formulas:
while reference.lower() in s:
replacement_cell = global_file.formulas[reference]
if replacement_cell.data_type == 'string' and \
not replacement_cell.script:
replacement = '\'%s\'' % replacement_cell.text
else:
replacement = replacement_cell.value
s = s.replace(reference.lower(), replacement)
exec_string = s
exec_string = eval_append(exec_string)
verbose(exec_string)
exec(exec_string)
return global_file.returned
def get_prefix(formula_string, start):
alpha = 'abcdefghijklmnopqrstuvwxyz'
number = '.0123456789'
prefix = ''
string_position = start - 1
while True:
character = formula_string[string_position]
if string_position >= 0:
if character in alpha or character in number:
prefix = character + prefix
else:
return prefix
else:
return prefix
string_position -= 1
def eval_append(s):
prefix = 'global_file.returned = '
return prefix + s
def verbose(s):
# if verbose setting, print s
if global_file.verbose:
print(s)
| TryExceptElse/pysheetdata | eval/parser.py | Python | mit | 4,789 | 0 |
from eventlet import patcher
from eventlet.green import BaseHTTPServer
from eventlet.green import threading
from eventlet.green import socket
from eventlet.green import urllib2
patcher.inject('test.test_urllib2_localnet',
globals(),
('BaseHTTPServer', BaseHTTPServer),
('threading', threading),
('socket', socket),
('urllib2', urllib2))
if __name__ == "__main__":
test_main() | JeremyGrosser/python-eventlet | tests/stdlib/test_urllib2_localnet.py | Python | mit | 410 | 0.007317 |
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "WYE"
addresses_name = "2021-03-29T13:16:10.236797/Democracy_Club__06May2021.tsv"
stations_name = "2021-03-29T13:16:10.236797/Democracy_Club__06May2021.tsv"
elections = ["2021-05-06"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
uprn = record.property_urn.strip().lstrip("0")
if uprn in [
"10003382058", # THE PATCH, LEIGHT LANE, RIBBESFORD, BEWDLEY
]:
return None
if record.addressline6 in [
"DY12 2TN",
"DY10 3HJ",
"DY10 2QD",
"DY10 3TF",
"DY11 5QT",
"DY10 3HH",
"DY10 1SB",
"DY10 1LS",
"DY10 3EL",
]:
return None
return super().address_record_to_dict(record)
| DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_importers/management/commands/import_wyre_forest.py | Python | bsd-3-clause | 949 | 0.001054 |
# -*- coding: utf-8 -*-
#
# Author: Joël Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp.osv import orm
from openerp.tools.translate import _
class logistic_requisition_cost_estimate(orm.TransientModel):
_inherit = 'logistic.requisition.cost.estimate'
def _check_requisition(self, cr, uid, requisition, context=None):
""" Check the rules to create a cost estimate from the
requisition
:returns: list of tuples ('message, 'error_code')
"""
errors = []
if not requisition.budget_holder_id:
error = (_('The requisition must be validated '
'by the Budget Holder.'),
'NO_BUDGET_VALID')
errors.append(error)
return errors
| jgrandguillaume/vertical-ngo | logistic_budget/wizard/cost_estimate.py | Python | agpl-3.0 | 1,462 | 0 |
from SBaaS_base.postgresql_orm_base import *
class data_stage01_rnasequencing_analysis(Base):
__tablename__ = 'data_stage01_rnasequencing_analysis'
id = Column(Integer, Sequence('data_stage01_rnasequencing_analysis_id_seq'), primary_key=True)
analysis_id = Column(String(500))
experiment_id = Column(String(50))
sample_name_abbreviation = Column(String(500)) # equivalent to sample_name_abbreviation
sample_name = Column(String(500)) # equivalent to sample_name_abbreviation
time_point = Column(String(10)) # converted to intermediate in lineage analysis
analysis_type = Column(String(100)); # time-course (i.e., multiple time points), paired (i.e., control compared to multiple replicates), group (i.e., single grouping of samples).
used_ = Column(Boolean);
comment_ = Column(Text);
__table_args__ = (
UniqueConstraint('experiment_id','sample_name_abbreviation','sample_name','time_point','analysis_type','analysis_id'),
)
def __init__(self,
row_dict_I,
):
self.analysis_id=row_dict_I['analysis_id'];
self.experiment_id=row_dict_I['experiment_id'];
self.sample_name_abbreviation=row_dict_I['sample_name_abbreviation'];
self.sample_name=row_dict_I['sample_name'];
self.time_point=row_dict_I['time_point'];
self.analysis_type=row_dict_I['analysis_type'];
self.used_=row_dict_I['used_'];
self.comment_=row_dict_I['comment_'];
def __set__row__(self,analysis_id_I,
experiment_id_I,
sample_name_abbreviation_I,
sample_name_I,
time_point_I,
analysis_type_I,
used__I,
comment__I):
self.analysis_id=analysis_id_I
self.experiment_id=experiment_id_I
self.sample_name_abbreviation=sample_name_abbreviation_I
self.sample_name=sample_name_I
self.time_point=time_point_I
self.analysis_type=analysis_type_I
self.used_=used__I
self.comment_=comment__I
def __repr__dict__(self):
return {'id':self.id,
'analysis_id':self.analysis_id,
'experiment_id':self.experiment_id,
'sample_name_abbreviation':self.sample_name_abbreviation,
'sample_name':self.sample_name,
'time_point':self.time_point,
'analysis_type':self.analysis_type,
'used_':self.used_,
'comment_':self.comment_}
def __repr__json__(self):
return json.dumps(self.__repr__dict__()) | dmccloskey/SBaaS_rnasequencing | SBaaS_rnasequencing/stage01_rnasequencing_analysis_postgresql_models.py | Python | mit | 2,579 | 0.027918 |
# standard library
import logging
# Django
from django.contrib.auth.models import BaseUserManager
# logger instance
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class UserManager(BaseUserManager):
def create_user(self, email, password, username, first_name, **kwargs):
logger.info("Creating user.")
user = self.model(email=self.normalize_email(email),
username=username,
first_name=first_name,
is_active=False,
**kwargs)
user.set_password(password)
user.save(using=self.db)
return user
def create_superuser(self, email, password,
first_name, **kwargs):
logger.info("Creating superuser.")
user = self.model(email=self.normalize_email(email),
first_name=first_name,
is_staff=True,
is_active=True,
is_superuser=True,
**kwargs)
user.set_password(password)
user.save(using=self.db)
return user
| CodaMais/CodaMais | CodaMais/user/managers.py | Python | gpl-3.0 | 1,179 | 0 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'configdialog.ui'
#
# by: PyQt4 UI code generator 4.5.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(993, 455)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/icons/kaddressbook.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
self.verticalLayout_6 = QtGui.QVBoxLayout(Dialog)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.splitter = QtGui.QSplitter(Dialog)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName("splitter")
self.pagelist = QtGui.QListWidget(self.splitter)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pagelist.sizePolicy().hasHeightForWidth())
self.pagelist.setSizePolicy(sizePolicy)
self.pagelist.setMaximumSize(QtCore.QSize(180, 16777215))
self.pagelist.setObjectName("pagelist")
self.layoutWidget = QtGui.QWidget(self.splitter)
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtGui.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setObjectName("verticalLayout")
self.container = QtGui.QScrollArea(self.layoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(5)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.container.sizePolicy().hasHeightForWidth())
self.container.setSizePolicy(sizePolicy)
self.container.setFrameShape(QtGui.QFrame.NoFrame)
self.container.setWidgetResizable(True)
self.container.setObjectName("container")
self.scrollAreaWidgetContents = QtGui.QWidget(self.container)
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 241, 399))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.scrollAreaWidgetContents)
self.verticalLayout_3.setMargin(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.layout = QtGui.QVBoxLayout()
self.layout.setObjectName("layout")
self.verticalLayout_3.addLayout(self.layout)
self.container.setWidget(self.scrollAreaWidgetContents)
self.verticalLayout.addWidget(self.container)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.pushButton_2 = QtGui.QPushButton(self.layoutWidget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.tabWidget = QtGui.QTabWidget(self.splitter)
self.tabWidget.setObjectName("tabWidget")
self.tab = QtGui.QWidget()
self.tab.setObjectName("tab")
self.verticalLayout_2 = QtGui.QVBoxLayout(self.tab)
self.verticalLayout_2.setMargin(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.scrollArea = QtGui.QScrollArea(self.tab)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents_2 = QtGui.QWidget(self.scrollArea)
self.scrollAreaWidgetContents_2.setGeometry(QtCore.QRect(0, 0, 532, 405))
self.scrollAreaWidgetContents_2.setObjectName("scrollAreaWidgetContents_2")
self.verticalLayout_4 = QtGui.QVBoxLayout(self.scrollAreaWidgetContents_2)
self.verticalLayout_4.setSpacing(3)
self.verticalLayout_4.setContentsMargins(0, 3, 0, -1)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.zoomin = QtGui.QToolButton(self.scrollAreaWidgetContents_2)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/icons/viewmag+.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.zoomin.setIcon(icon1)
self.zoomin.setObjectName("zoomin")
self.horizontalLayout_4.addWidget(self.zoomin)
self.zoomout = QtGui.QToolButton(self.scrollAreaWidgetContents_2)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/icons/viewmag-.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.zoomout.setIcon(icon2)
self.zoomout.setObjectName("zoomout")
self.horizontalLayout_4.addWidget(self.zoomout)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem1)
self.verticalLayout_4.addLayout(self.horizontalLayout_4)
self.preview = QtGui.QLabel(self.scrollAreaWidgetContents_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.preview.sizePolicy().hasHeightForWidth())
self.preview.setSizePolicy(sizePolicy)
self.preview.setFrameShape(QtGui.QFrame.NoFrame)
self.preview.setObjectName("preview")
self.verticalLayout_4.addWidget(self.preview)
spacerItem2 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_4.addItem(spacerItem2)
self.scrollArea.setWidget(self.scrollAreaWidgetContents_2)
self.verticalLayout_2.addWidget(self.scrollArea)
self.tabWidget.addTab(self.tab, "")
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName("tab_2")
self.verticalLayout_5 = QtGui.QVBoxLayout(self.tab_2)
self.verticalLayout_5.setMargin(0)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.snippet = QtGui.QTextBrowser(self.tab_2)
self.snippet.setObjectName("snippet")
self.verticalLayout_5.addWidget(self.snippet)
self.tabWidget.addTab(self.tab_2, "")
self.verticalLayout_6.addWidget(self.splitter)
self.retranslateUi(Dialog)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.pushButton_2, QtCore.SIGNAL("clicked()"), Dialog.accept)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Bookrest Settings", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_2.setText(QtGui.QApplication.translate("Dialog", "Close", None, QtGui.QApplication.UnicodeUTF8))
self.zoomin.setText(QtGui.QApplication.translate("Dialog", "...", None, QtGui.QApplication.UnicodeUTF8))
self.zoomout.setText(QtGui.QApplication.translate("Dialog", "...", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), QtGui.QApplication.translate("Dialog", "Preview", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), QtGui.QApplication.translate("Dialog", "Output", None, QtGui.QApplication.UnicodeUTF8))
import icons_rc
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog = QtGui.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
| shakna-israel/rst2pdf | gui/Ui_configdialog.py | Python | mit | 7,900 | 0.003418 |
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from actstream.models import Follow, Action, user_stream, actor_stream, model_stream
@login_required
def follow_unfollow(request, content_type_id, object_id, follow=True):
"""
Creates follow relationship st ``request.user`` starts following the actor defined by ``content_type_id``, ``object_id``
"""
ctype = get_object_or_404(ContentType, pk=content_type_id)
actor = get_object_or_404(ctype.model_class(), pk=object_id)
lookup = {
'user': request.user,
'content_type': ctype,
'object_id': object_id,
}
if follow:
Follow.objects.get_or_create(**lookup)
return type('Created', (HttpResponse,), {'status_code':201})()
Follow.objects.get(**lookup).delete()
return type('Deleted', (HttpResponse,), {'status_code':204})()
@login_required
def stream(request):
"""
Index page for authenticated user's activity stream. (Eg: Your feed at github.com)
"""
return render_to_response('activity/actor.html', {
'ctype': ContentType.objects.get_for_model(request.user),
'actor':request.user,'action_list':user_stream(request.user)
}, context_instance=RequestContext(request))
def followers(request, content_type_id, object_id):
"""
Creates a listing of ``User``s that follow the actor defined by ``content_type_id``, ``object_id``
"""
ctype = get_object_or_404(ContentType, pk=content_type_id)
follows = Follow.objects.filter(content_type=ctype, object_id=object_id)
actor = get_object_or_404(ctype.model_class(), pk=object_id)
return render_to_response('activity/followers.html', {
'followers': [f.user for f in follows], 'actor':actor
}, context_instance=RequestContext(request))
def user(request, username):
"""
``User`` focused activity stream. (Eg: Profile page twitter.com/justquick)
"""
user = get_object_or_404(User, username=username)
return render_to_response('activity/actor.html', {
'ctype': ContentType.objects.get_for_model(User),
'actor':user,'action_list':actor_stream(user)
}, context_instance=RequestContext(request))
def detail(request, action_id):
"""
``Action`` detail view (pretty boring, mainly used for get_absolute_url)
"""
return render_to_response('activity/detail.html', {
'action': get_object_or_404(Action, pk=action_id)
}, context_instance=RequestContext(request))
def actor(request, content_type_id, object_id):
"""
``Actor`` focused activity stream for actor defined by ``content_type_id``, ``object_id``
"""
ctype = get_object_or_404(ContentType, pk=content_type_id)
actor = get_object_or_404(ctype.model_class(), pk=object_id)
return render_to_response('activity/actor.html', {
'action_list': actor_stream(actor), 'actor':actor,'ctype':ctype
}, context_instance=RequestContext(request))
def model(request, content_type_id):
"""
``Actor`` focused activity stream for actor defined by ``content_type_id``, ``object_id``
"""
ctype = get_object_or_404(ContentType, pk=content_type_id)
actor = ctype.model_class()
return render_to_response('activity/actor.html', {
'action_list': model_stream(actor),'ctype':ctype,'actor':ctype#._meta.verbose_name_plural.title()
}, context_instance=RequestContext(request)) | netconstructor/django-activity-stream | actstream/views.py | Python | bsd-3-clause | 3,684 | 0.011129 |
# Author: Paul Wollaston
# Contributions: Luke Mullan
#
# This client script allows connection to Deluge Daemon directly, completely
# circumventing the requirement to use the WebUI.
import json
from base64 import b64encode
import sickbeard
from sickbeard import logger
from .generic import GenericClient
from synchronousdeluge import DelugeClient
class DelugeDAPI(GenericClient):
drpc = None
def __init__(self, host=None, username=None, password=None):
super(DelugeDAPI, self).__init__('DelugeD', host, username, password)
def _get_auth(self):
if not self.connect():
return None
return True
def connect(self, reconnect = False):
hostname = self.host.replace("/", "").split(':')
if not self.drpc or reconnect:
self.drpc = DelugeRPC(hostname[1], port = hostname[2], username = self.username, password = self.password)
return self.drpc
def _add_torrent_uri(self, result):
label = sickbeard.TORRENT_LABEL
if result.show.is_anime:
label = sickbeard.TORRENT_LABEL_ANIME
options = {
'add_paused': sickbeard.TORRENT_PAUSED
}
remote_torrent = self.drpc.add_torrent_magnet(result.url, options, result.hash)
if not remote_torrent:
return None
result.hash = remote_torrent
return remote_torrent
def _add_torrent_file(self, result):
label = sickbeard.TORRENT_LABEL
if result.show.is_anime:
label = sickbeard.TORRENT_LABEL_ANIME
if not result.content: result.content = {}
if not result.content:
return None
options = {
'add_paused': sickbeard.TORRENT_PAUSED
}
remote_torrent = self.drpc.add_torrent_file(result.name + '.torrent', result.content, options, result.hash)
if not remote_torrent:
return None
result.hash = remote_torrent
return remote_torrent
def _set_torrent_label(self, result):
label = sickbeard.TORRENT_LABEL
if result.show.is_anime:
label = sickbeard.TORRENT_LABEL_ANIME
if ' ' in label:
logger.log(self.name + u': Invalid label. Label must not contain a space', logger.ERROR)
return False
if label:
if self.drpc.set_torrent_label(result.hash, label):
return True
return False
def _set_torrent_ratio(self, result):
return True
def _set_torrent_path(self, result):
path = sickbeard.TORRENT_PATH
if path:
if self.drpc.set_torrent_path(result.hash, path):
return True
return False
def _set_torrent_pause(self, result):
if sickbeard.TORRENT_PAUSED:
return self.drpc.pause_torrent(result.hash)
return True
def testAuthentication(self):
if self.connect(True) and self.drpc.test():
return True, 'Success: Connected and Authenticated'
else:
return False, 'Error: Unable to Authenticate! Please check your config!'
class DelugeRPC(object):
host = 'localhost'
port = 58846
username = None
password = None
client = None
def __init__(self, host = 'localhost', port = 58846, username = None, password = None):
super(DelugeRPC, self).__init__()
self.host = host
self.port = port
self.username = username
self.password = password
def connect(self):
self.client = DelugeClient()
self.client.connect(self.host, int(self.port), self.username, self.password)
def test(self):
try:
self.connect()
except:
return False
return True
def add_torrent_magnet(self, torrent, options, torrent_hash):
torrent_id = False
try:
self.connect()
torrent_id = self.client.core.add_torrent_magnet(torrent, options).get()
if not torrent_id:
torrent_id = self._check_torrent(torrent_hash)
except Exception as err:
return False
finally:
if self.client:
self.disconnect()
return torrent_id
def add_torrent_file(self, filename, torrent, options, torrent_hash):
torrent_id = False
try:
self.connect()
torrent_id = self.client.core.add_torrent_file(filename, b64encode(torrent), options).get()
if not torrent_id:
torrent_id = self._check_torrent(torrent_hash)
except Exception as err:
return False
finally:
if self.client:
self.disconnect()
return torrent_id
def set_torrent_label(self, torrent_id, label):
try:
self.connect()
self.client.label.set_torrent(torrent_id, label).get()
except Exception as err:
logger.log('DelugeD: Failed to set label for torrent: ' + err + ' ' + traceback.format_exc(), logger.ERROR)
return False
finally:
if self.client:
self.disconnect()
return True
def set_torrent_path(self, torrent_id, path):
try:
self.connect()
self.client.core.set_torrent_move_completed_path(torrent_id, path).get()
self.client.core.set_torrent_move_completed(torrent_id, 1).get()
except Exception as err:
logger.log('DelugeD: Failed to set path for torrent: ' + err + ' ' + traceback.format_exc(), logger.ERROR)
return False
finally:
if self.client:
self.disconnect()
return True
def pause_torrent(self, torrent_ids):
try:
self.connect()
self.client.core.pause_torrent(torrent_ids).get()
except Exception as err:
logger.log('DelugeD: Failed to pause torrent: ' + err + ' ' + traceback.format_exc(), logger.ERROR)
return False
finally:
if self.client:
self.disconnect()
return True
def disconnect(self):
self.client.disconnect()
def _check_torrent(self, torrent_hash):
torrent_id = self.client.core.get_torrent_status(torrent_hash, {}).get()
if torrent_id['hash']:
logger.log('DelugeD: Torrent already exists in Deluge', logger.DEBUG)
return torrent_hash
return False
api = DelugeDAPI()
| eXistenZNL/SickRage | sickbeard/clients/deluged_client.py | Python | gpl-3.0 | 6,499 | 0.005539 |
import os
from pylons import app_globals
def delete_image(image):
paths = image.path.split(';')
for p in paths:
path = os.path.join(app_globals.image_storage, p)
os.remove(path)
| hep-gc/repoman | server/repoman/repoman/lib/storage/storage.py | Python | gpl-3.0 | 204 | 0.009804 |
"""autogenerated by genpy from mapping_dlut/Map.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import mapping_dlut.msg
import std_msgs.msg
class Map(genpy.Message):
_md5sum = "e6ab6c8862bf55f4e1b5fd48f03f1a7d"
_type = "mapping_dlut/Map"
_has_header = True #flag to mark the presence of a Header object
_full_text = """Header header
###########################################################
#Elevation Map Configuration
#half size of the map;
int32 nHalfSize
#Elevation Grid Resolution, in meter;
float32 fResolution
#x coordinate of the center of the map in world frame
float32 fCenterX
#y coordinate of the center of the map in world frame
float32 fCenterY
#maximum elevation of the map in world frame
float32 fMapMaxElevation
#minimum elevation of the map in world frame
float32 fMapMinElevation
###########################################################
###########################################################
#Vehicle Status
#vehicle x in world frame, in meters
float32 fVehicleX
#vehicle y in world frame, in meters
float32 fVehicleY
#vehicle z in world frame, in meters
float32 fVehicleZ
#vehicle heading angle, in rad
float32 fVehicleHeading
###########################################################
Grid[] map
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: mapping_dlut/Grid
#Header header
#maximum elevation in this grid;
#float32 fMaxElevation
#minimum elevation in this grid;
#float32 fMinElevation
#average elevation in this grid;
#float32 fAvgElevation
#points falling in this grid;
#int32 nPointCount
#up point falling in this grid;
#int32 nUpCount
#down point falling in this grid;
#int32 nDownCount
#average elevation in this grid;
float32 fAvgElevation
#proability
int8 proability
#texture
int8 texture
"""
__slots__ = ['header','nHalfSize','fResolution','fCenterX','fCenterY','fMapMaxElevation','fMapMinElevation','fVehicleX','fVehicleY','fVehicleZ','fVehicleHeading','map']
_slot_types = ['std_msgs/Header','int32','float32','float32','float32','float32','float32','float32','float32','float32','float32','mapping_dlut/Grid[]']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,nHalfSize,fResolution,fCenterX,fCenterY,fMapMaxElevation,fMapMinElevation,fVehicleX,fVehicleY,fVehicleZ,fVehicleHeading,map
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(Map, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.nHalfSize is None:
self.nHalfSize = 0
if self.fResolution is None:
self.fResolution = 0.
if self.fCenterX is None:
self.fCenterX = 0.
if self.fCenterY is None:
self.fCenterY = 0.
if self.fMapMaxElevation is None:
self.fMapMaxElevation = 0.
if self.fMapMinElevation is None:
self.fMapMinElevation = 0.
if self.fVehicleX is None:
self.fVehicleX = 0.
if self.fVehicleY is None:
self.fVehicleY = 0.
if self.fVehicleZ is None:
self.fVehicleZ = 0.
if self.fVehicleHeading is None:
self.fVehicleHeading = 0.
if self.map is None:
self.map = []
else:
self.header = std_msgs.msg.Header()
self.nHalfSize = 0
self.fResolution = 0.
self.fCenterX = 0.
self.fCenterY = 0.
self.fMapMaxElevation = 0.
self.fMapMinElevation = 0.
self.fVehicleX = 0.
self.fVehicleY = 0.
self.fVehicleZ = 0.
self.fVehicleHeading = 0.
self.map = []
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_i9f.pack(_x.nHalfSize, _x.fResolution, _x.fCenterX, _x.fCenterY, _x.fMapMaxElevation, _x.fMapMinElevation, _x.fVehicleX, _x.fVehicleY, _x.fVehicleZ, _x.fVehicleHeading))
length = len(self.map)
buff.write(_struct_I.pack(length))
for val1 in self.map:
_x = val1
buff.write(_struct_f2b.pack(_x.fAvgElevation, _x.proability, _x.texture))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.map is None:
self.map = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 40
(_x.nHalfSize, _x.fResolution, _x.fCenterX, _x.fCenterY, _x.fMapMaxElevation, _x.fMapMinElevation, _x.fVehicleX, _x.fVehicleY, _x.fVehicleZ, _x.fVehicleHeading,) = _struct_i9f.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.map = []
for i in range(0, length):
val1 = mapping_dlut.msg.Grid()
_x = val1
start = end
end += 6
(_x.fAvgElevation, _x.proability, _x.texture,) = _struct_f2b.unpack(str[start:end])
self.map.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_i9f.pack(_x.nHalfSize, _x.fResolution, _x.fCenterX, _x.fCenterY, _x.fMapMaxElevation, _x.fMapMinElevation, _x.fVehicleX, _x.fVehicleY, _x.fVehicleZ, _x.fVehicleHeading))
length = len(self.map)
buff.write(_struct_I.pack(length))
for val1 in self.map:
_x = val1
buff.write(_struct_f2b.pack(_x.fAvgElevation, _x.proability, _x.texture))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.map is None:
self.map = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 40
(_x.nHalfSize, _x.fResolution, _x.fCenterX, _x.fCenterY, _x.fMapMaxElevation, _x.fMapMinElevation, _x.fVehicleX, _x.fVehicleY, _x.fVehicleZ, _x.fVehicleHeading,) = _struct_i9f.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.map = []
for i in range(0, length):
val1 = mapping_dlut.msg.Grid()
_x = val1
start = end
end += 6
(_x.fAvgElevation, _x.proability, _x.texture,) = _struct_f2b.unpack(str[start:end])
self.map.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_f2b = struct.Struct("<f2b")
_struct_3I = struct.Struct("<3I")
_struct_i9f = struct.Struct("<i9f")
| WuNL/mylaptop | install/lib/python2.7/dist-packages/mapping_dlut/msg/_Map.py | Python | bsd-3-clause | 10,485 | 0.017167 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""The go plugin can be used for go projects using `go get`.
This plugin uses the common plugin keywords as well as those for "sources".
For more information check the 'plugins' topic for the former and the
'sources' topic for the latter.
Additionally, this plugin uses the following plugin-specific keywords:
- go-packages:
(list of strings)
Go packages to fetch, these must be a "main" package. Dependencies
are pulled in automatically by `go get`.
Packages that are not "main" will not cause an error, but would
not be useful either.
If the package is a part of the go-importpath the local package
corresponding to those sources will be used.
- go-importpath:
(string)
This entry tells the checked out `source` to live within a certain path
within `GOPATH`.
This is not needed and does not affect `go-packages`.
- go-buildtags:
(list of strings)
Tags to use during the go build. Default is not to use any build tags.
"""
import logging
import os
import shutil
from glob import iglob
import snapcraft
from snapcraft import common
logger = logging.getLogger(__name__)
class GoPlugin(snapcraft.BasePlugin):
@classmethod
def schema(cls):
schema = super().schema()
schema['properties']['go-packages'] = {
'type': 'array',
'minitems': 1,
'uniqueItems': True,
'items': {
'type': 'string',
},
'default': [],
}
schema['properties']['go-importpath'] = {
'type': 'string',
'default': ''
}
schema['properties']['go-buildtags'] = {
'type': 'array',
'minitems': 1,
'uniqueItems': True,
'items': {
'type': 'string',
},
'default': []
}
if 'required' in schema:
del schema['required']
return schema
@classmethod
def get_build_properties(cls):
# Inform Snapcraft of the properties associated with building. If these
# change in the YAML Snapcraft will consider the build step dirty.
return ['go-packages', 'go-buildtags']
@classmethod
def get_pull_properties(cls):
# Inform Snapcraft of the properties associated with pulling. If these
# change in the YAML Snapcraft will consider the pull step dirty.
return ['go-packages']
def __init__(self, name, options, project):
super().__init__(name, options, project)
self.build_packages.append('golang-go')
self._gopath = os.path.join(self.partdir, 'go')
self._gopath_src = os.path.join(self._gopath, 'src')
self._gopath_bin = os.path.join(self._gopath, 'bin')
self._gopath_pkg = os.path.join(self._gopath, 'pkg')
def pull(self):
# use -d to only download (build will happen later)
# use -t to also get the test-deps
# since we are not using -u the sources will stick to the
# original checkout.
super().pull()
os.makedirs(self._gopath_src, exist_ok=True)
if any(iglob('{}/**/*.go'.format(self.sourcedir), recursive=True)):
go_package = self._get_local_go_package()
go_package_path = os.path.join(self._gopath_src, go_package)
if os.path.islink(go_package_path):
os.unlink(go_package_path)
os.makedirs(os.path.dirname(go_package_path), exist_ok=True)
os.symlink(self.sourcedir, go_package_path)
self._run(['go', 'get', '-t', '-d', './{}/...'.format(go_package)])
for go_package in self.options.go_packages:
self._run(['go', 'get', '-t', '-d', go_package])
def clean_pull(self):
super().clean_pull()
# Remove the gopath (if present)
if os.path.exists(self._gopath):
shutil.rmtree(self._gopath)
def _get_local_go_package(self):
if self.options.go_importpath:
go_package = self.options.go_importpath
else:
logger.warning(
'Please consider setting `go-importpath` for the {!r} '
'part'.format(self.name))
go_package = os.path.basename(os.path.abspath(self.options.source))
return go_package
def _get_local_main_packages(self):
search_path = './{}/...'.format(self._get_local_go_package())
packages = self._run_output(['go', 'list', '-f',
'{{.ImportPath}} {{.Name}}',
search_path])
packages_split = [p.split() for p in packages.splitlines()]
main_packages = [p[0] for p in packages_split if p[1] == 'main']
return main_packages
def build(self):
super().build()
tags = []
if self.options.go_buildtags:
tags = ['-tags={}'.format(','.join(self.options.go_buildtags))]
packages = self.options.go_packages
if not packages:
packages = self._get_local_main_packages()
for package in packages:
binary = os.path.join(self._gopath_bin, self._binary_name(package))
self._run(['go', 'build', '-o', binary] + tags + [package])
install_bin_path = os.path.join(self.installdir, 'bin')
os.makedirs(install_bin_path, exist_ok=True)
for binary in os.listdir(self._gopath_bin):
binary_path = os.path.join(self._gopath_bin, binary)
shutil.copy2(binary_path, install_bin_path)
def _binary_name(self, package):
package = package.replace('/...', '')
return package.split('/')[-1]
def clean_build(self):
super().clean_build()
if os.path.isdir(self._gopath_bin):
shutil.rmtree(self._gopath_bin)
if os.path.isdir(self._gopath_pkg):
shutil.rmtree(self._gopath_pkg)
def _run(self, cmd, **kwargs):
env = self._build_environment()
return self.run(cmd, cwd=self._gopath_src, env=env, **kwargs)
def _run_output(self, cmd, **kwargs):
env = self._build_environment()
return self.run_output(cmd, cwd=self._gopath_src, env=env, **kwargs)
def _build_environment(self):
env = os.environ.copy()
env['GOPATH'] = self._gopath
env['GOBIN'] = self._gopath_bin
include_paths = []
for root in [self.installdir, self.project.stage_dir]:
include_paths.extend(
common.get_library_paths(root, self.project.arch_triplet))
flags = common.combine_paths(include_paths, '-L', ' ')
env['CGO_LDFLAGS'] = '{} {} {}'.format(
env.get('CGO_LDFLAGS', ''), flags, env.get('LDFLAGS', ''))
if self.project.is_cross_compiling:
env['CC'] = '{}-gcc'.format(self.project.arch_triplet)
env['CXX'] = '{}-g++'.format(self.project.arch_triplet)
env['CGO_ENABLED'] = '1'
# See https://golang.org/doc/install/source#environment
go_archs = {
'armhf': 'arm',
'i386': '386',
'ppc64el': 'ppc64le',
}
env['GOARCH'] = go_archs.get(self.project.deb_arch,
self.project.deb_arch)
if self.project.deb_arch == 'armhf':
env['GOARM'] = '7'
return env
def enable_cross_compilation(self):
pass
| elopio/snapcraft | snapcraft/plugins/go.py | Python | gpl-3.0 | 8,148 | 0 |
from pycp2k.inputsection import InputSection
class _each286(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Just_energy = None
self.Powell_opt = None
self.Qs_scf = None
self.Xas_scf = None
self.Md = None
self.Pint = None
self.Metadynamics = None
self.Geo_opt = None
self.Rot_opt = None
self.Cell_opt = None
self.Band = None
self.Ep_lin_solver = None
self.Spline_find_coeffs = None
self.Replica_eval = None
self.Bsse = None
self.Shell_opt = None
self.Tddft_scf = None
self._name = "EACH"
self._keywords = {'Bsse': 'BSSE', 'Cell_opt': 'CELL_OPT', 'Just_energy': 'JUST_ENERGY', 'Band': 'BAND', 'Xas_scf': 'XAS_SCF', 'Rot_opt': 'ROT_OPT', 'Replica_eval': 'REPLICA_EVAL', 'Tddft_scf': 'TDDFT_SCF', 'Shell_opt': 'SHELL_OPT', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt': 'GEO_OPT', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Powell_opt': 'POWELL_OPT', 'Qs_scf': 'QS_SCF', 'Ep_lin_solver': 'EP_LIN_SOLVER'}
| SINGROUP/pycp2k | pycp2k/classes/_each286.py | Python | lgpl-3.0 | 1,114 | 0.001795 |
# -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2016 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import ctypes
import ctypes.util
import array
from .errors import DiscordException
import logging
import sys
import os.path
log = logging.getLogger(__name__)
c_int_ptr = ctypes.POINTER(ctypes.c_int)
c_int16_ptr = ctypes.POINTER(ctypes.c_int16)
c_float_ptr = ctypes.POINTER(ctypes.c_float)
class EncoderStruct(ctypes.Structure):
pass
EncoderStructPtr = ctypes.POINTER(EncoderStruct)
# A list of exported functions.
# The first argument is obviously the name.
# The second one are the types of arguments it takes.
# The third is the result type.
exported_functions = [
('opus_strerror', [ctypes.c_int], ctypes.c_char_p),
('opus_encoder_get_size', [ctypes.c_int], ctypes.c_int),
('opus_encoder_create', [ctypes.c_int, ctypes.c_int, ctypes.c_int, c_int_ptr], EncoderStructPtr),
('opus_encode', [EncoderStructPtr, c_int16_ptr, ctypes.c_int, ctypes.c_char_p, ctypes.c_int32], ctypes.c_int32),
('opus_encoder_ctl', None, ctypes.c_int32),
('opus_encoder_destroy', [EncoderStructPtr], None)
]
def libopus_loader(name):
# create the library...
lib = ctypes.cdll.LoadLibrary(name)
# register the functions...
for item in exported_functions:
try:
func = getattr(lib, item[0])
except Exception as e:
raise e
try:
if item[1]:
func.argtypes = item[1]
func.restype = item[2]
except KeyError:
pass
return lib
try:
if sys.platform == 'win32':
_basedir = os.path.dirname(os.path.abspath(__file__))
_bitness = 'x64' if sys.maxsize > 2**32 else 'x86'
_filename = os.path.join(_basedir, 'bin', 'libopus-0.{}.dll'.format(_bitness))
_lib = libopus_loader(_filename)
else:
_lib = libopus_loader(ctypes.util.find_library('opus'))
except Exception as e:
_lib = None
def load_opus(name):
"""Loads the libopus shared library for use with voice.
If this function is not called then the library uses the function
`ctypes.util.find_library`__ and then loads that one
if available.
.. _find library: https://docs.python.org/3.5/library/ctypes.html#finding-shared-libraries
__ `find library`_
Not loading a library leads to voice not working.
This function propagates the exceptions thrown.
Warning
--------
The bitness of the library must match the bitness of your python
interpreter. If the library is 64-bit then your python interpreter
must be 64-bit as well. Usually if there's a mismatch in bitness then
the load will throw an exception.
Note
----
On Windows, the .dll extension is not necessary. However, on Linux
the full extension is required to load the library, e.g. ``libopus.so.1``.
On Linux however, `find library`_ will usually find the library automatically
without you having to call this.
Parameters
----------
name: str
The filename of the shared library.
"""
global _lib
_lib = libopus_loader(name)
def is_loaded():
"""Function to check if opus lib is successfully loaded either
via the ``ctypes.util.find_library`` call of :func:`load_opus`.
This must return ``True`` for voice to work.
Returns
-------
bool
Indicates if the opus library has been loaded.
"""
global _lib
return _lib is not None
class OpusError(DiscordException):
"""An exception that is thrown for libopus related errors.
Attributes
----------
code : int
The error code returned.
"""
def __init__(self, code):
self.code = code
msg = _lib.opus_strerror(self.code).decode('utf-8')
log.info('"{}" has happened'.format(msg))
super().__init__(msg)
class OpusNotLoaded(DiscordException):
"""An exception that is thrown for when libopus is not loaded."""
pass
# Some constants...
OK = 0
APPLICATION_AUDIO = 2049
APPLICATION_VOIP = 2048
APPLICATION_LOWDELAY = 2051
CTL_SET_BITRATE = 4002
CTL_SET_BANDWIDTH = 4008
CTL_SET_FEC = 4012
CTL_SET_PLP = 4014
band_ctl = {
'narrow': 1101,
'medium': 1102,
'wide': 1103,
'superwide': 1104,
'full': 1105,
}
class Encoder:
def __init__(self, sampling, channels, application=APPLICATION_AUDIO):
self.sampling_rate = sampling
self.channels = channels
self.application = application
self.frame_length = 20
self.sample_size = 2 * self.channels # (bit_rate / 8) but bit_rate == 16
self.samples_per_frame = int(self.sampling_rate / 1000 * self.frame_length)
self.frame_size = self.samples_per_frame * self.sample_size
if not is_loaded():
raise OpusNotLoaded()
self._state = self._create_state()
self.set_bitrate(128)
self.set_fec(True)
self.set_expected_packet_loss_percent(0.15)
self.set_bandwidth('full')
def __del__(self):
if hasattr(self, '_state'):
_lib.opus_encoder_destroy(self._state)
self._state = None
def _create_state(self):
ret = ctypes.c_int()
result = _lib.opus_encoder_create(self.sampling_rate, self.channels, self.application, ctypes.byref(ret))
if ret.value != 0:
log.info('error has happened in state creation')
raise OpusError(ret.value)
return result
def set_bitrate(self, kbps):
kbps = min(128, max(16, int(kbps)))
ret = _lib.opus_encoder_ctl(self._state, CTL_SET_BITRATE, kbps * 1024)
if ret < 0:
log.info('error has happened in set_bitrate')
raise OpusError(ret)
return kbps
def set_bandwidth(self, req):
if req not in band_ctl:
raise KeyError('%r is not a valid bandwidth setting. Try one of: %s' % (req, ','.join(band_ctl)))
k = band_ctl[req]
ret = _lib.opus_encoder_ctl(self._state, CTL_SET_BANDWIDTH, k)
if ret < 0:
log.info('error has happened in set_bandwidth')
raise OpusError(ret)
def set_fec(self, enabled=True):
ret = _lib.opus_encoder_ctl(self._state, CTL_SET_FEC, 1 if enabled else 0)
if ret < 0:
log.info('error has happened in set_fec')
raise OpusError(ret)
def set_expected_packet_loss_percent(self, percentage):
ret = _lib.opus_encoder_ctl(self._state, CTL_SET_PLP, min(100, max(0, int(percentage * 100))))
if ret < 0:
log.info('error has happened in set_expected_packet_loss_percent')
raise OpusError(ret)
def encode(self, pcm, frame_size):
max_data_bytes = len(pcm)
pcm = ctypes.cast(pcm, c_int16_ptr)
data = (ctypes.c_char * max_data_bytes)()
ret = _lib.opus_encode(self._state, pcm, frame_size, data, max_data_bytes)
if ret < 0:
log.info('error has happened in encode')
raise OpusError(ret)
return array.array('b', data[:ret]).tobytes()
| Aurous/Magic-Discord-Bot | discord/opus.py | Python | gpl-3.0 | 8,162 | 0.003798 |
import os
from bzrlib.branch import Branch
from charmhelpers.fetch import (
BaseFetchHandler,
UnhandledSource
)
from charmhelpers.core.host import mkdir
class BzrUrlFetchHandler(BaseFetchHandler):
"""Handler for bazaar branches via generic and lp URLs"""
def can_handle(self, source):
url_parts = self.parse_url(source)
if url_parts.scheme not in ('bzr+ssh', 'lp'):
return False
else:
return True
def branch(self, source, dest):
url_parts = self.parse_url(source)
# If we use lp:branchname scheme we need to load plugins
if not self.can_handle(source):
raise UnhandledSource("Cannot handle {}".format(source))
if url_parts.scheme == "lp":
from bzrlib.plugin import load_plugins
load_plugins()
try:
remote_branch = Branch.open(source)
remote_branch.bzrdir.sprout(dest).open_branch()
except Exception as e:
raise e
def install(self, source):
url_parts = self.parse_url(source)
branch_name = url_parts.path.strip("/").split("/")[-1]
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name)
if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755)
try:
self.branch(source, dest_dir)
except OSError as e:
raise UnhandledSource(e.strerror)
return dest_dir
| SaMnCo/charm-dashing | lib/charmhelpers/fetch/bzrurl.py | Python | agpl-3.0 | 1,463 | 0.001367 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
import requests
import wikipedia_template_parser as wtp
from lxml import etree
import re
import json
def templates_including_coords():
LINKSCOORD = "http://it.wikipedia.org/w/index.php?title="\
"Speciale:PuntanoQui/Template:Coord&namespace=10&limit=500"
req = requests.get(LINKSCOORD)
templates = list()
if req.ok:
doc = etree.fromstring(req.text)
tmpl_list = doc.xpath('//div[@id="mw-content-text"]//li/a')
templates = [tmpl.text
for tmpl in tmpl_list
if not tmpl.text.lower().endswith('/man')
]
return templates
def get_parameters(template):
try:
data = wtp.data_from_templates(template+'/man', "it")
except ValueError:
return []
try:
tabella = [d for d in data
if data[0]['name'] == 'TabellaTemplate'][0]
except IndexError:
return []
stringa_parametri = tabella['data']['parametri']
parametri = [p.replace('{{', '').replace('}}', '').split('~')[1]
for p in re.findall("{{[^{}]+}}", stringa_parametri)
]
return parametri
def write(outfile, addfile=None):
templates = templates_including_coords()
twparslist = []
for t in templates:
parametri = get_parameters(t)
twpars = {'name': t.replace('Template:', ''),
'parameters': [p for p in parametri
if 'lat' in p.lower() or 'lon' in p.lower()]
}
twparslist.append(twpars)
# with open(args.outfile, 'wb') as output:
# # Pickle the list using the highest protocol available.
# pickle.dump(twpars, output, -1)
addtwp = None
if addfile is not None:
addtwp = read(addfile)
for t in addtwp:
repeat = [(id_, tmp)
for (id_, tmp) in enumerate(twparslist)
if tmp['name'] == t['name']
]
if repeat:
id_ = repeat[0][0]
twparslist[id_] = t
addtwp.remove(t)
twparslist = twparslist + addtwp
with open(outfile, 'w+') as out:
for twp in twparslist:
out.write('{}\n'.format(json.dumps(twp)))
def read(infile):
with open(infile, 'r') as in_:
twp = [json.loads(l.strip()) for l in in_.readlines()]
return twp
def main():
# Options
text = 'Descrizione'
parser = argparse.ArgumentParser(description=text)
parser.add_argument("-f", "--file",
help='Nome del file di output con i dati '
'[default: '
'./data/wikipedia'
'/coords/templates_with_coords.txt]',
default=os.path.join("data",
"wikipedia",
"coords",
"templates_including_coords.txt"
),
action="store"
)
parser.add_argument("-a", "--add",
help='Nome del file con una lista di template '
'(serve per aggiungere alcuni template "a mano" '
'[default: '
'./data/wikipedia'
'/coords/add_templates_with_coords.txt]',
default=os.path.join("data",
"wikipedia",
"coords",
"add_templates"
"_including_coords.txt"
),
action="store"
)
parser.add_argument("--no-add",
help="Non aggiungere la lista dei template",
dest='no_add',
action="store_true"
)
parser.add_argument("-r", "--read",
help="leggi il file invece di scriverlo",
action="store_true"
)
args = parser.parse_args()
if args.read:
read(args.file)
else:
if args.no_add:
write(args.file)
else:
write(args.file, args.add)
if __name__ == '__main__':
import argparse
import os
main()
| CristianCantoro/wikipedia-tags-in-osm | extract_templates.py | Python | gpl-3.0 | 4,620 | 0.000433 |
if __name__ == '__main__':
import sys
import os
pkg_dir = (os.path.split(
os.path.split(
os.path.split(
os.path.abspath(__file__))[0])[0])[0])
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
import unittest
class KeyModuleTest(unittest.TestCase):
def test_get_focused(self):
self.assert_(True)
def test_get_mods(self):
while True:
pass
def test_get_pressed(self):
self.assert_(True)
def test_name(self):
self.assert_(True)
def test_set_mods(self):
self.assert_(True)
def test_set_repeat(self):
self.assert_(True)
if __name__ == '__main__':
unittest.main()
| bhansa/fireball | pyvenv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/fake_1_test.py | Python | gpl-3.0 | 977 | 0.008188 |
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsVectorLayer.
From build dir, run:
ctest -R PyQgsVectorLayer -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Tim Sutton'
__date__ = '20/08/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
import qgis # NOQA
import os
import tempfile
import shutil
from qgis.PyQt.QtCore import QDate, QDateTime, QVariant, Qt, QDateTime, QDate, QTime
from qgis.PyQt.QtGui import QPainter, QColor
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import (Qgis,
QgsWkbTypes,
QgsAction,
QgsAuxiliaryStorage,
QgsCoordinateTransformContext,
QgsDataProvider,
QgsDefaultValue,
QgsEditorWidgetSetup,
QgsMapLayer,
QgsVectorLayer,
QgsRectangle,
QgsFeature,
QgsFeatureRequest,
QgsGeometry,
QgsPointXY,
QgsField,
QgsFieldConstraints,
QgsFields,
QgsVectorLayerJoinInfo,
QgsSymbol,
QgsSingleSymbolRenderer,
QgsCoordinateReferenceSystem,
QgsVectorLayerCache,
QgsReadWriteContext,
QgsProject,
QgsUnitTypes,
QgsAggregateCalculator,
QgsPoint,
QgsExpressionContext,
QgsExpressionContextScope,
QgsExpressionContextUtils,
QgsLineSymbol,
QgsMapLayerServerProperties,
QgsMapLayerStyle,
QgsMapLayerDependency,
QgsRenderContext,
QgsPalLayerSettings,
QgsVectorLayerSimpleLabeling,
QgsSingleCategoryDiagramRenderer,
QgsDiagramLayerSettings,
QgsTextFormat,
QgsVectorLayerSelectedFeatureSource,
QgsExpression,
QgsLayerMetadata,
NULL)
from qgis.gui import (QgsAttributeTableModel,
QgsGui
)
from qgis.PyQt.QtTest import QSignalSpy
from qgis.testing import start_app, unittest
from featuresourcetestbase import FeatureSourceTestCase
from utilities import unitTestDataPath
TEST_DATA_DIR = unitTestDataPath()
start_app()
def createEmptyLayer():
layer = QgsVectorLayer("Point", "addfeat", "memory")
assert layer.featureCount() == 0
return layer
def createEmptyLayerWithFields():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer", "addfeat", "memory")
assert layer.featureCount() == 0
return layer
def createLayerWithOnePoint():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["test", 123])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
assert pr.addFeatures([f])
assert layer.featureCount() == 1
return layer
def createLayerWithTwoPoints():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["test", 123])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
f2 = QgsFeature()
f2.setAttributes(["test2", 457])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
assert pr.addFeatures([f, f2])
assert layer.featureCount() == 2
return layer
def createLayerWithFivePoints():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["test", 123])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
f2 = QgsFeature()
f2.setAttributes(["test2", 457])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(200, 200)))
f3 = QgsFeature()
f3.setAttributes(["test2", 888])
f3.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(300, 200)))
f4 = QgsFeature()
f4.setAttributes(["test3", -1])
f4.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(400, 300)))
f5 = QgsFeature()
f5.setAttributes(["test4", 0])
f5.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(0, 0)))
assert pr.addFeatures([f, f2, f3, f4, f5])
assert layer.featureCount() == 5
return layer
def createJoinLayer():
joinLayer = QgsVectorLayer(
"Point?field=x:string&field=y:integer&field=z:integer&field=date:datetime",
"joinlayer", "memory")
pr = joinLayer.dataProvider()
f1 = QgsFeature()
f1.setAttributes(["foo", 123, 321, QDateTime(QDate(2010, 1, 1))])
f1.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
f2 = QgsFeature()
f2.setAttributes(["bar", 456, 654, QDateTime(QDate(2020, 1, 1))])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(2, 2)))
f3 = QgsFeature()
f3.setAttributes(["qar", 457, 111, None])
f3.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(2, 2)))
f4 = QgsFeature()
f4.setAttributes(["a", 458, 19, QDateTime(QDate(2012, 1, 1))])
f4.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(2, 2)))
assert pr.addFeatures([f1, f2, f3, f4])
assert joinLayer.featureCount() == 4
return joinLayer
def dumpFeature(f):
print("--- FEATURE DUMP ---")
print(("valid: %d | id: %d" % (f.isValid(), f.id())))
geom = f.geometry()
if geom:
print(("geometry wkb: %d" % geom.wkbType()))
else:
print("no geometry")
print(("attrs: %s" % str(f.attributes())))
def formatAttributes(attrs):
return repr([str(a) for a in attrs])
def dumpEditBuffer(layer):
editBuffer = layer.editBuffer()
if not editBuffer:
print("NO EDITING!")
return
print("ADDED:")
for fid, f in editBuffer.addedFeatures().items():
print(("%d: %s | %s" % (
f.id(), formatAttributes(f.attributes()),
f.geometry().asWkt())))
print("CHANGED GEOM:")
for fid, geom in editBuffer.changedGeometries().items():
print(("%d | %s" % (f.id(), f.geometry().asWkt())))
class TestQgsVectorLayer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&field=dt:datetime&field=date:date&field=time:time&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, -200, NULL, 'NuLl', '5', QDateTime(QDate(2020, 5, 4), QTime(12, 13, 14)), QDate(2020, 5, 2), QTime(12, 13, 1)])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL])
f3 = QgsFeature()
f3.setAttributes([1, 100, 'Orange', 'oranGe', '1', QDateTime(QDate(2020, 5, 3), QTime(12, 13, 14)), QDate(2020, 5, 3), QTime(12, 13, 14)])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes([2, 200, 'Apple', 'Apple', '2', QDateTime(QDate(2020, 5, 4), QTime(12, 14, 14)), QDate(2020, 5, 4), QTime(12, 14, 14)])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes([4, 400, 'Honey', 'Honey', '4', QDateTime(QDate(2021, 5, 4), QTime(13, 13, 14)), QDate(2021, 5, 4), QTime(13, 13, 14)])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
QgsGui.editorWidgetRegistry().initEditors()
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def test_FeatureCount(self):
myPath = os.path.join(unitTestDataPath(), 'lines.shp')
myLayer = QgsVectorLayer(myPath, 'Lines', 'ogr')
myCount = myLayer.featureCount()
self.assertEqual(myCount, 6)
# undo stack
def testUndoStack(self):
layer = createLayerWithOnePoint()
layer.startEditing()
self.assertEqual(layer.undoStack().count(), 0)
self.assertEqual(layer.undoStack().index(), 0)
f = QgsFeature()
f.setAttributes(["test", 123])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
self.assertTrue(layer.addFeatures([f]))
self.assertEqual(layer.undoStack().count(), 1)
self.assertEqual(layer.undoStack().index(), 1)
self.assertEqual(layer.featureCount(), 2)
layer.undoStack().undo()
self.assertEqual(layer.undoStack().count(), 1)
self.assertEqual(layer.undoStack().index(), 0)
self.assertEqual(layer.featureCount(), 1)
layer.undoStack().redo()
self.assertEqual(layer.undoStack().count(), 1)
self.assertEqual(layer.undoStack().index(), 1)
self.assertEqual(layer.featureCount(), 2)
# macro commands
layer.beginEditCommand("Test command 1")
self.assertTrue(layer.addFeatures([f]))
self.assertTrue(layer.addFeatures([f]))
layer.endEditCommand()
self.assertEqual(layer.undoStack().count(), 2)
self.assertEqual(layer.undoStack().index(), 2)
self.assertEqual(layer.featureCount(), 4)
layer.undoStack().undo()
self.assertEqual(layer.undoStack().count(), 2)
self.assertEqual(layer.undoStack().index(), 1)
self.assertEqual(layer.featureCount(), 2)
layer.undoStack().redo()
self.assertEqual(layer.undoStack().count(), 2)
self.assertEqual(layer.undoStack().index(), 2)
self.assertEqual(layer.featureCount(), 4)
# throw away a macro command
layer.beginEditCommand("Test command 1")
self.assertTrue(layer.addFeatures([f]))
self.assertTrue(layer.addFeatures([f]))
self.assertEqual(layer.featureCount(), 6)
layer.destroyEditCommand()
self.assertEqual(layer.undoStack().count(), 2)
self.assertEqual(layer.undoStack().index(), 2)
self.assertEqual(layer.featureCount(), 4)
def testSetDataSource(self):
"""
Test changing a layer's data source
"""
layer = createLayerWithOnePoint()
layer.setCrs(QgsCoordinateReferenceSystem("epsg:3111"))
r = QgsSingleSymbolRenderer(QgsSymbol.defaultSymbol(QgsWkbTypes.PointGeometry))
layer.setRenderer(r)
self.assertEqual(layer.renderer().symbol().type(), QgsSymbol.Marker)
spy = QSignalSpy(layer.dataSourceChanged)
options = QgsDataProvider.ProviderOptions()
# change with layer of same type
points_path = os.path.join(unitTestDataPath(), 'points.shp')
layer.setDataSource(points_path, 'new name', 'ogr', options)
self.assertTrue(layer.isValid())
self.assertEqual(layer.name(), 'new name')
self.assertEqual(layer.wkbType(), QgsWkbTypes.Point)
self.assertEqual(layer.crs().authid(), 'EPSG:4326')
self.assertIn(points_path, layer.dataProvider().dataSourceUri())
self.assertEqual(len(spy), 1)
# should have kept the same renderer!
self.assertEqual(layer.renderer(), r)
# layer with different type
lines_path = os.path.join(unitTestDataPath(), 'rectangles.shp')
layer.setDataSource(lines_path, 'new name2', 'ogr', options)
self.assertTrue(layer.isValid())
self.assertEqual(layer.name(), 'new name2')
self.assertEqual(layer.wkbType(), QgsWkbTypes.MultiPolygon)
self.assertEqual(layer.crs().authid(), 'EPSG:4326')
self.assertIn(lines_path, layer.dataProvider().dataSourceUri())
self.assertEqual(len(spy), 2)
# should have reset renderer!
self.assertNotEqual(layer.renderer(), r)
self.assertEqual(layer.renderer().symbol().type(), QgsSymbol.Fill)
def testSetDataSourceInvalidToValid(self):
"""
Test that changing an invalid layer path to valid maintains the renderer
"""
layer = createLayerWithOnePoint()
layer.setCrs(QgsCoordinateReferenceSystem("epsg:3111"))
r = QgsSingleSymbolRenderer(QgsSymbol.defaultSymbol(QgsWkbTypes.PointGeometry))
layer.setRenderer(r)
self.assertEqual(layer.renderer().symbol().type(), QgsSymbol.Marker)
# change to invalid path
options = QgsDataProvider.ProviderOptions()
layer.setDataSource('nothing', 'new name', 'ogr', options)
self.assertFalse(layer.isValid())
# these properties should be kept intact!
self.assertEqual(layer.name(), 'new name')
self.assertEqual(layer.wkbType(), QgsWkbTypes.Point)
self.assertEqual(layer.crs().authid(), 'EPSG:3111')
# should have kept the same renderer!
self.assertEqual(layer.renderer(), r)
# set to a valid path
points_path = os.path.join(unitTestDataPath(), 'points.shp')
layer.setDataSource(points_path, 'new name2', 'ogr', options)
self.assertTrue(layer.isValid())
self.assertEqual(layer.name(), 'new name2')
self.assertEqual(layer.wkbType(), QgsWkbTypes.Point)
self.assertEqual(layer.crs().authid(), 'EPSG:4326')
self.assertIn(points_path, layer.dataProvider().dataSourceUri())
# should STILL have kept renderer!
self.assertEqual(layer.renderer(), r)
def testSetCustomProperty(self):
"""
Test setting a custom property of the layer
"""
layer = createLayerWithOnePoint()
layer.setCustomProperty('Key_0', 'Value_0')
layer.setCustomProperty('Key_1', 'Value_1')
spy = QSignalSpy(layer.customPropertyChanged)
# change nothing by setting the same value
layer.setCustomProperty('Key_0', 'Value_0')
layer.setCustomProperty('Key_1', 'Value_1')
self.assertEqual(len(spy), 0)
# change one
layer.setCustomProperty('Key_0', 'Value zero')
self.assertEqual(len(spy), 1)
# add one
layer.setCustomProperty('Key_2', 'Value two')
self.assertEqual(len(spy), 2)
# add a null one and an empty one
layer.setCustomProperty('Key_3', None)
layer.setCustomProperty('Key_4', '')
self.assertEqual(len(spy), 4)
# remove one
layer.removeCustomProperty('Key_0')
self.assertEqual(len(spy), 5)
self.assertEqual(layer.customProperty('Key_0', 'no value'), 'no value')
self.assertEqual(layer.customProperty('Key_1', 'no value'), 'Value_1')
self.assertEqual(layer.customProperty('Key_2', 'no value'), 'Value two')
self.assertEqual(layer.customProperty('Key_3', 'no value'), None)
self.assertEqual(layer.customProperty('Key_4', 'no value'), '')
self.assertEqual(len(spy), 5)
def testStoreWkbTypeInvalidLayers(self):
"""
Test that layer wkb types are restored for projects with invalid layer paths
"""
layer = createLayerWithOnePoint()
layer.setName('my test layer')
r = QgsSingleSymbolRenderer(QgsSymbol.defaultSymbol(QgsWkbTypes.PointGeometry))
r.symbol().setColor(QColor('#123456'))
layer.setRenderer(r)
self.assertEqual(layer.renderer().symbol().color().name(), '#123456')
p = QgsProject()
p.addMapLayer(layer)
# reset layer to a bad path
options = QgsDataProvider.ProviderOptions()
layer.setDataSource('nothing', 'new name', 'ogr', options)
# should have kept the same renderer and wkb type!
self.assertEqual(layer.wkbType(), QgsWkbTypes.Point)
self.assertEqual(layer.renderer().symbol().color().name(), '#123456')
# save project to a temporary file
temp_path = tempfile.mkdtemp()
temp_project_path = os.path.join(temp_path, 'temp.qgs')
self.assertTrue(p.write(temp_project_path))
# restore project
p2 = QgsProject()
self.assertTrue(p2.read(temp_project_path))
l2 = p2.mapLayersByName('new name')[0]
self.assertFalse(l2.isValid())
# should have kept the same renderer and wkb type!
self.assertEqual(l2.wkbType(), QgsWkbTypes.Point)
self.assertEqual(l2.renderer().symbol().color().name(), '#123456')
shutil.rmtree(temp_path, True)
def testFallbackCrsWkbType(self):
"""
Test fallback CRS and WKB types are used when layer path is invalid
"""
vl = QgsVectorLayer('this is an outrage!!!')
self.assertFalse(vl.isValid()) # I'd certainly hope so...
self.assertEqual(vl.wkbType(), QgsWkbTypes.Unknown)
self.assertFalse(vl.crs().isValid())
# with fallback
options = QgsVectorLayer.LayerOptions()
options.fallbackWkbType = QgsWkbTypes.CircularString
options.fallbackCrs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
vl = QgsVectorLayer("i'm the moon", options=options)
self.assertFalse(vl.isValid())
self.assertEqual(vl.wkbType(), QgsWkbTypes.CircularString)
self.assertEqual(vl.crs().authid(), 'EPSG:3111')
def test_layer_crs(self):
"""
Test that spatial layers have CRS, and non-spatial don't
"""
vl = QgsVectorLayer('Point?crs=epsg:3111&field=pk:integer', 'test', 'memory')
self.assertTrue(vl.isSpatial())
self.assertTrue(vl.crs().isValid())
self.assertEqual(vl.crs().authid(), 'EPSG:3111')
vl = QgsVectorLayer('None?field=pk:integer', 'test', 'memory')
self.assertFalse(vl.isSpatial())
self.assertFalse(vl.crs().isValid())
# even if provider has a crs - we don't respect it for non-spatial layers!
vl = QgsVectorLayer('None?crs=epsg:3111&field=pk:integer', 'test', 'memory')
self.assertFalse(vl.isSpatial())
self.assertFalse(vl.crs().isValid())
def test_wgs84Extent(self):
# We use this particular shapefile because we need a layer with an
# epsg != 4326
p = os.path.join(unitTestDataPath(), 'bug5598.shp')
vl0 = QgsVectorLayer(p, 'test', 'ogr')
extent = vl0.extent()
wgs84_extent = vl0.wgs84Extent()
# write xml document where the wgs84 extent will be stored
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(vl0.writeLayerXml(elem, doc, QgsReadWriteContext()))
# create a 2nd layer and read the xml document WITHOUT trust
vl1 = QgsVectorLayer()
flags = QgsMapLayer.ReadFlags()
vl1.readLayerXml(elem, QgsReadWriteContext(), flags)
self.assertTrue(extent == vl1.extent())
self.assertTrue(wgs84_extent == vl1.wgs84Extent())
# we add a feature and check that the original extent has been
# updated (the extent is bigger with the new feature)
vl1.startEditing()
f = QgsFeature()
f.setAttributes([0, "", "", 0.0, 0.0, 0.0, 0.0])
f.setGeometry(QgsGeometry.fromPolygonXY([[QgsPointXY(2484588, 2425732), QgsPointXY(2482767, 2398853),
QgsPointXY(2520109, 2397715), QgsPointXY(2520792, 2425494),
QgsPointXY(2484588, 2425732)]]))
vl1.addFeature(f)
vl1.updateExtents()
self.assertTrue(extent != vl1.extent())
# trust is not activated so the wgs84 extent is updated
# accordingly
self.assertTrue(wgs84_extent != vl1.wgs84Extent())
vl1.rollBack()
# create a 3rd layer and read the xml document WITH trust
vl2 = QgsVectorLayer()
flags = QgsMapLayer.ReadFlags()
flags |= QgsMapLayer.FlagTrustLayerMetadata
vl2.readLayerXml(elem, QgsReadWriteContext(), flags)
self.assertTrue(extent == vl2.extent())
self.assertTrue(wgs84_extent == vl2.wgs84Extent())
# we add a feature and check that the original extent has been
# updated (the extent is bigger with the new feature)
vl2.startEditing()
f = QgsFeature()
f.setAttributes([0, "", "", 0.0, 0.0, 0.0, 0.0])
f.setGeometry(QgsGeometry.fromPolygonXY([[QgsPointXY(2484588, 2425732), QgsPointXY(2482767, 2398853),
QgsPointXY(2520109, 2397715), QgsPointXY(2520792, 2425494),
QgsPointXY(2484588, 2425732)]]))
vl2.addFeature(f)
vl2.updateExtents()
self.assertTrue(extent != vl2.extent())
# trust is activated so the wgs84 extent is not updated
self.assertTrue(wgs84_extent == vl2.wgs84Extent())
# but we can still retrieve the current wgs84 xtent with the force
# parameter
self.assertTrue(wgs84_extent != vl2.wgs84Extent(True))
vl2.rollBack()
# ADD FEATURE
def test_AddFeature(self):
layer = createEmptyLayerWithFields()
feat = QgsFeature(layer.fields())
feat.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 2)))
def checkAfter():
self.assertEqual(layer.featureCount(), 1)
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(1, 2))
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(1, 2))
def checkBefore():
self.assertEqual(layer.featureCount(), 0)
# check select+nextFeature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
checkBefore()
# try to add feature without editing mode
self.assertFalse(layer.addFeature(feat))
# add feature
layer.startEditing()
# try adding feature with incorrect number of fields
bad_feature = QgsFeature()
self.assertFalse(layer.addFeature(bad_feature))
# add good feature
self.assertTrue(layer.addFeature(feat))
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 0)
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 1)
# ADD FEATURES
def test_AddFeatures(self):
layer = createEmptyLayerWithFields()
feat1 = QgsFeature(layer.fields())
feat1.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 2)))
feat2 = QgsFeature(layer.fields())
feat2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(11, 12)))
def checkAfter():
self.assertEqual(layer.featureCount(), 2)
# check select+nextFeature
it = layer.getFeatures()
f1 = next(it)
self.assertEqual(f1.geometry().asPoint(), QgsPointXY(1, 2))
f2 = next(it)
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(11, 12))
# check feature at id
f1_1 = next(layer.getFeatures(QgsFeatureRequest(f1.id())))
self.assertEqual(f1_1.geometry().asPoint(), QgsPointXY(1, 2))
f2_1 = next(layer.getFeatures(QgsFeatureRequest(f2.id())))
self.assertEqual(f2_1.geometry().asPoint(), QgsPointXY(11, 12))
def checkBefore():
self.assertEqual(layer.featureCount(), 0)
# check select+nextFeature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
checkBefore()
# try to add feature without editing mode
self.assertFalse(layer.addFeatures([feat1, feat2]))
# add feature
layer.startEditing()
# try adding feature with incorrect number of fields
bad_feature = QgsFeature()
self.assertFalse(layer.addFeatures([bad_feature]))
# add good features
self.assertTrue(layer.addFeatures([feat1, feat2]))
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 0)
# now try undo/redo
layer.undoStack().undo()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 2)
# DELETE FEATURE
def test_DeleteFeature(self):
layer = createLayerWithOnePoint()
fid = 1
def checkAfter():
self.assertEqual(layer.featureCount(), 0)
# check select+nextFeature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
# check feature at id
with self.assertRaises(StopIteration):
next(layer.getFeatures(QgsFeatureRequest(fid)))
def checkBefore():
self.assertEqual(layer.featureCount(), 1)
# check select+nextFeature
fi = layer.getFeatures()
f = next(fi)
self.assertEqual(f.geometry().asPoint(), QgsPointXY(100, 200))
with self.assertRaises(StopIteration):
next(fi)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(fid)))
self.assertEqual(f2.id(), fid)
checkBefore()
# try to delete feature without editing mode
self.assertFalse(layer.deleteFeature(fid))
# delete feature
layer.startEditing()
self.assertTrue(layer.deleteFeature(fid))
checkAfter()
# make sure calling it twice does not work
self.assertFalse(layer.deleteFeature(fid))
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 1)
self.assertTrue(layer.commitChanges())
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 0)
def test_DeleteFeatureAfterAddFeature(self):
layer = createEmptyLayer()
feat = QgsFeature()
feat.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 2)))
def checkBefore():
self.assertEqual(layer.featureCount(), 0)
# check select+nextFeature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
def checkAfter1():
self.assertEqual(layer.featureCount(), 1)
def checkAfter2():
checkBefore() # should be the same state: no features
checkBefore()
# add feature
layer.startEditing()
self.assertTrue(layer.addFeature(feat))
checkAfter1()
fid = feat.id()
self.assertTrue(layer.deleteFeature(fid))
checkAfter2()
# now try undo/redo
layer.undoStack().undo()
checkAfter1()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter1()
layer.undoStack().redo()
checkAfter2()
self.assertTrue(layer.commitChanges())
checkAfter2()
self.assertEqual(layer.dataProvider().featureCount(), 0)
def test_DeleteJoinedFeature(self):
joinLayer = createJoinLayer()
joinLayer2 = createJoinLayer()
QgsProject.instance().addMapLayers([joinLayer, joinLayer2])
layer = createLayerWithOnePoint()
join = QgsVectorLayerJoinInfo()
join.setTargetFieldName("fldint")
join.setJoinLayer(joinLayer)
join.setJoinFieldName("y")
join.setUsingMemoryCache(True)
join.setEditable(True)
join.setCascadedDelete(True)
layer.addJoin(join)
join2 = QgsVectorLayerJoinInfo()
join2.setTargetFieldName("fldint")
join2.setJoinLayer(joinLayer2)
join2.setJoinFieldName("y")
join2.setUsingMemoryCache(True)
join2.setPrefix("custom-prefix_")
join2.setEditable(True)
join2.setCascadedDelete(False)
layer.addJoin(join2)
# check number of features
self.assertEqual(layer.featureCount(), 1)
self.assertEqual(joinLayer.featureCount(), 4)
self.assertEqual(joinLayer2.featureCount(), 4)
# delete a feature which is also in joined layers
layer.startEditing()
joinLayer.startEditing()
joinLayer2.startEditing()
filter = QgsExpression.createFieldEqualityExpression('fldint', '123')
feature = next(layer.getFeatures(QgsFeatureRequest().setFilterExpression(filter)))
layer.deleteFeature(feature.id())
# check number of features
self.assertEqual(layer.featureCount(), 0)
self.assertEqual(joinLayer.featureCount(), 3) # deleteCascade activated
self.assertEqual(joinLayer2.featureCount(), 4) # deleteCascade deactivated
# CHANGE ATTRIBUTE
def test_ChangeAttribute(self):
layer = createLayerWithOnePoint()
fid = 1
def checkAfter():
# check select+nextFeature
fi = layer.getFeatures()
f = next(fi)
self.assertEqual(f[0], "good")
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2[0], "good")
def checkBefore():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f[0], "test")
checkBefore()
# try to change attribute without editing mode
self.assertFalse(layer.changeAttributeValue(fid, 0, "good"))
# change attribute
layer.startEditing()
self.assertTrue(layer.changeAttributeValue(fid, 0, "good"))
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
def test_ChangeAttributeAfterAddFeature(self):
layer = createLayerWithOnePoint()
layer.dataProvider().deleteFeatures([1]) # no need for this feature
newF = QgsFeature()
newF.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
newF.setAttributes(["hello", 42])
def checkAfter():
self.assertEqual(len(layer.fields()), 2)
# check feature
fi = layer.getFeatures()
f = next(fi)
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "hello")
self.assertEqual(attrs[1], 12)
with self.assertRaises(StopIteration):
next(fi)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2[0], "hello")
self.assertEqual(f2[1], 12)
def checkBefore():
# check feature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
checkBefore()
layer.startEditing()
layer.beginEditCommand("AddFeature + ChangeAttribute")
self.assertTrue(layer.addFeature(newF))
self.assertTrue(layer.changeAttributeValue(newF.id(), 1, 12))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
# print "COMMIT ERRORS:"
# for item in list(layer.commitErrors()): print item
# CHANGE GEOMETRY
def test_ChangeGeometry(self):
layer = createLayerWithOnePoint()
fid = 1
def checkAfter():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(300, 400))
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(300, 400))
def checkBefore():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(100, 200))
# try to change geometry without editing mode
self.assertFalse(layer.changeGeometry(fid, QgsGeometry.fromPointXY(QgsPointXY(300, 400))))
checkBefore()
# change geometry
layer.startEditing()
layer.beginEditCommand("ChangeGeometry")
self.assertTrue(layer.changeGeometry(fid, QgsGeometry.fromPointXY(QgsPointXY(300, 400))))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
def test_ChangeGeometryAfterChangeAttribute(self):
layer = createLayerWithOnePoint()
fid = 1
def checkAfter():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(300, 400))
self.assertEqual(f[0], "changed")
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(300, 400))
self.assertEqual(f2[0], "changed")
def checkBefore():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(100, 200))
self.assertEqual(f[0], "test")
checkBefore()
# change geometry
layer.startEditing()
layer.beginEditCommand("ChangeGeometry + ChangeAttribute")
self.assertTrue(layer.changeAttributeValue(fid, 0, "changed"))
self.assertTrue(layer.changeGeometry(fid, QgsGeometry.fromPointXY(QgsPointXY(300, 400))))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
def test_ChangeGeometryAfterAddFeature(self):
layer = createLayerWithOnePoint()
layer.dataProvider().deleteFeatures([1]) # no need for this feature
newF = QgsFeature()
newF.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
newF.setAttributes(["hello", 42])
def checkAfter():
self.assertEqual(len(layer.fields()), 2)
# check feature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(2, 2))
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(2, 2))
def checkBefore():
# check feature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
checkBefore()
layer.startEditing()
layer.beginEditCommand("AddFeature+ChangeGeometry")
self.assertTrue(layer.addFeature(newF))
self.assertTrue(layer.changeGeometry(newF.id(), QgsGeometry.fromPointXY(QgsPointXY(2, 2))))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
# print "COMMIT ERRORS:"
# for item in list(layer.commitErrors()): print item
# updateFeature
def testUpdateFeature(self):
layer = createLayerWithFivePoints()
features = [f for f in layer.getFeatures()]
# try to change feature without editing mode
self.assertFalse(layer.updateFeature(features[0]))
layer.startEditing()
# no matching feature
f = QgsFeature(1123)
self.assertFalse(layer.updateFeature(f))
# change geometry and attributes
f = features[0]
f.setAttributes(['new', 321])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(-200, -200)))
self.assertTrue(layer.updateFeature(f))
new_feature = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(new_feature.attributes(), ['new', 321])
self.assertEqual(new_feature.geometry().asPoint(), QgsPointXY(-200, -200))
# add feature with no geometry
f6 = QgsFeature()
f6.setAttributes(["test6", 555])
self.assertTrue(layer.dataProvider().addFeatures([f6]))
features = [f for f in layer.getFeatures()]
# update feature with no geometry -> have geometry
f = features[-1]
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(-350, -250)))
self.assertTrue(layer.updateFeature(f))
new_feature = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(new_feature.attributes(), ['test6', 555])
self.assertTrue(new_feature.hasGeometry())
self.assertEqual(new_feature.geometry().asPoint(), QgsPointXY(-350, -250))
# update feature from geometry -> no geometry
f = features[1]
f.clearGeometry()
self.assertTrue(layer.updateFeature(f))
new_feature = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(new_feature.attributes(), ['test2', 457])
self.assertFalse(new_feature.hasGeometry())
# ADD ATTRIBUTE
def test_AddAttribute(self):
layer = createLayerWithOnePoint()
fld1 = QgsField("fld1", QVariant.Int, "integer")
# fld2 = QgsField("fld2", QVariant.Int, "integer")
def checkBefore():
# check fields
flds = layer.fields()
self.assertEqual(len(flds), 2)
self.assertEqual(flds[0].name(), "fldtxt")
self.assertEqual(flds[1].name(), "fldint")
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
def checkAfter():
# check fields
flds = layer.fields()
self.assertEqual(len(flds), 3)
self.assertEqual(flds[0].name(), "fldtxt")
self.assertEqual(flds[1].name(), "fldint")
self.assertEqual(flds[2].name(), "fld1")
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 3)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
self.assertTrue(attrs[2] is None)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2[0], "test")
self.assertEqual(f2[1], 123)
self.assertTrue(f2[2] is None)
# for nt in layer.dataProvider().nativeTypes():
# print (nt.mTypeDesc, nt.mTypeName, nt.mType, nt.mMinLen,
# nt.mMaxLen, nt.mMinPrec, nt.mMaxPrec)
self.assertTrue(layer.dataProvider().supportedType(fld1))
# without editing mode
self.assertFalse(layer.addAttribute(fld1))
layer.startEditing()
checkBefore()
self.assertTrue(layer.addAttribute(fld1))
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
layer.commitChanges()
checkAfter()
def test_AddAttributeAfterAddFeature(self):
layer = createLayerWithOnePoint()
layer.dataProvider().deleteFeatures([1]) # no need for this feature
newF = QgsFeature()
newF.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
newF.setAttributes(["hello", 42])
fld1 = QgsField("fld1", QVariant.Int, "integer")
def checkBefore():
self.assertEqual(len(layer.fields()), 2)
# check feature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
def checkAfter():
self.assertEqual(len(layer.fields()), 3)
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 3)
self.assertEqual(attrs[0], "hello")
self.assertEqual(attrs[1], 42)
self.assertTrue(attrs[2] is None)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2[0], "hello")
self.assertEqual(f2[1], 42)
self.assertTrue(f2[2] is None)
layer.startEditing()
checkBefore()
layer.beginEditCommand("AddFeature + AddAttribute")
self.assertTrue(layer.addFeature(newF))
self.assertTrue(layer.addAttribute(fld1))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
layer.commitChanges()
checkAfter()
# print "COMMIT ERRORS:"
# for item in list(layer.commitErrors()): print item
def test_AddAttributeAfterChangeValue(self):
pass # not interesting to test...?
def test_AddAttributeAfterDeleteAttribute(self):
pass # maybe it would be good to test
# DELETE ATTRIBUTE
def test_DeleteAttribute(self):
layer = createLayerWithOnePoint()
layer.dataProvider().addAttributes(
[QgsField("flddouble", QVariant.Double, "double")])
layer.dataProvider().changeAttributeValues(
{1: {2: 5.5}})
# without editing mode
self.assertFalse(layer.deleteAttribute(0))
def checkBefore():
flds = layer.fields()
self.assertEqual(len(flds), 3)
self.assertEqual(flds[0].name(), "fldtxt")
self.assertEqual(flds[1].name(), "fldint")
self.assertEqual(flds[2].name(), "flddouble")
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 3)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
self.assertEqual(attrs[2], 5.5)
layer.startEditing()
checkBefore()
self.assertTrue(layer.deleteAttribute(0))
def checkAfterOneDelete():
flds = layer.fields()
# for fld in flds: print "FLD", fld.name()
self.assertEqual(len(flds), 2)
self.assertEqual(flds[0].name(), "fldint")
self.assertEqual(flds[1].name(), "flddouble")
self.assertEqual(layer.attributeList(), [0, 1])
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], 123)
self.assertEqual(attrs[1], 5.5)
checkAfterOneDelete()
# delete last attribute
self.assertTrue(layer.deleteAttribute(0))
def checkAfterTwoDeletes():
self.assertEqual(layer.attributeList(), [0])
flds = layer.fields()
# for fld in flds: print "FLD", fld.name()
self.assertEqual(len(flds), 1)
self.assertEqual(flds[0].name(), "flddouble")
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 1)
self.assertEqual(attrs[0], 5.5)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(len(f2.attributes()), 1)
self.assertEqual(f2[0], 5.5)
checkAfterTwoDeletes()
layer.undoStack().undo()
checkAfterOneDelete()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfterOneDelete()
layer.undoStack().redo()
checkAfterTwoDeletes()
self.assertTrue(layer.commitChanges()) # COMMIT!
checkAfterTwoDeletes()
def test_DeleteAttributeAfterAddAttribute(self):
layer = createLayerWithOnePoint()
fld1 = QgsField("fld1", QVariant.Int, "integer")
def checkAfter(): # layer should be unchanged
flds = layer.fields()
self.assertEqual(len(flds), 2)
self.assertEqual(flds[0].name(), "fldtxt")
self.assertEqual(flds[1].name(), "fldint")
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(len(f2.attributes()), 2)
self.assertEqual(f2[0], "test")
self.assertEqual(f2[1], 123)
checkAfter()
layer.startEditing()
layer.beginEditCommand("AddAttribute + DeleteAttribute")
self.assertTrue(layer.addAttribute(fld1))
self.assertTrue(layer.deleteAttribute(2))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkAfter()
layer.undoStack().redo()
checkAfter()
layer.commitChanges()
checkAfter()
def test_DeleteAttributeAfterAddFeature(self):
layer = createLayerWithOnePoint()
layer.dataProvider().deleteFeatures([1]) # no need for this feature
newF = QgsFeature()
newF.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
newF.setAttributes(["hello", 42])
def checkBefore():
self.assertEqual(len(layer.fields()), 2)
# check feature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
def checkAfter1():
self.assertEqual(len(layer.fields()), 2)
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "hello")
self.assertEqual(attrs[1], 42)
def checkAfter2():
self.assertEqual(len(layer.fields()), 1)
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 1)
self.assertEqual(attrs[0], 42)
layer.startEditing()
checkBefore()
layer.addFeature(newF)
checkAfter1()
layer.deleteAttribute(0)
checkAfter2()
# now try undo/redo
layer.undoStack().undo()
checkAfter1()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter1()
layer.undoStack().redo()
checkAfter2()
layer.commitChanges()
checkAfter2()
def test_DeleteAttributeAfterChangeValue(self):
layer = createLayerWithOnePoint()
def checkBefore():
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
def checkAfter1():
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "changed")
self.assertEqual(attrs[1], 123)
def checkAfter2():
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 1)
self.assertEqual(attrs[0], 123)
layer.startEditing()
checkBefore()
self.assertTrue(layer.changeAttributeValue(1, 0, "changed"))
checkAfter1()
self.assertTrue(layer.deleteAttribute(0))
checkAfter2()
# now try undo/redo
layer.undoStack().undo()
checkAfter1()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter1()
layer.undoStack().redo()
checkAfter2()
layer.commitChanges()
checkAfter2()
# RENAME ATTRIBUTE
def test_RenameAttribute(self):
layer = createLayerWithOnePoint()
# without editing mode
self.assertFalse(layer.renameAttribute(0, 'renamed'))
def checkFieldNames(names):
flds = layer.fields()
f = next(layer.getFeatures())
self.assertEqual(flds.count(), len(names))
self.assertEqual(f.fields().count(), len(names))
for idx, expected_name in enumerate(names):
self.assertEqual(flds[idx].name(), expected_name)
self.assertEqual(f.fields().at(idx).name(), expected_name)
layer.startEditing()
checkFieldNames(['fldtxt', 'fldint'])
self.assertFalse(layer.renameAttribute(-1, 'fldtxt2'))
self.assertFalse(layer.renameAttribute(10, 'fldtxt2'))
self.assertFalse(layer.renameAttribute(0, 'fldint')) # duplicate name
self.assertTrue(layer.renameAttribute(0, 'fldtxt2'))
checkFieldNames(['fldtxt2', 'fldint'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint'])
layer.undoStack().redo()
checkFieldNames(['fldtxt2', 'fldint'])
# change two fields
self.assertTrue(layer.renameAttribute(1, 'fldint2'))
checkFieldNames(['fldtxt2', 'fldint2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt2', 'fldint'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint'])
layer.undoStack().redo()
checkFieldNames(['fldtxt2', 'fldint'])
layer.undoStack().redo()
checkFieldNames(['fldtxt2', 'fldint2'])
# two renames
self.assertTrue(layer.renameAttribute(0, 'fldtxt3'))
checkFieldNames(['fldtxt3', 'fldint2'])
self.assertTrue(layer.renameAttribute(0, 'fldtxt4'))
checkFieldNames(['fldtxt4', 'fldint2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt3', 'fldint2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt2', 'fldint2'])
layer.undoStack().redo()
checkFieldNames(['fldtxt3', 'fldint2'])
layer.undoStack().redo()
checkFieldNames(['fldtxt4', 'fldint2'])
def test_RenameAttributeAfterAdd(self):
layer = createLayerWithOnePoint()
def checkFieldNames(names):
flds = layer.fields()
f = next(layer.getFeatures())
self.assertEqual(flds.count(), len(names))
self.assertEqual(f.fields().count(), len(names))
for idx, expected_name in enumerate(names):
self.assertEqual(flds[idx].name(), expected_name)
self.assertEqual(f.fields().at(idx).name(), expected_name)
layer.startEditing()
checkFieldNames(['fldtxt', 'fldint'])
self.assertTrue(layer.renameAttribute(1, 'fldint2'))
checkFieldNames(['fldtxt', 'fldint2'])
# add an attribute
self.assertTrue(layer.addAttribute(QgsField("flddouble", QVariant.Double, "double")))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble'])
# rename it
self.assertTrue(layer.renameAttribute(2, 'flddouble2'))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2'])
self.assertTrue(layer.addAttribute(QgsField("flddate", QVariant.Date, "date")))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2', 'flddate'])
self.assertTrue(layer.renameAttribute(2, 'flddouble3'))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate'])
self.assertTrue(layer.renameAttribute(3, 'flddate2'))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2', 'flddate'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2', 'flddate'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate2'])
def test_RenameAttributeAndDelete(self):
layer = createLayerWithOnePoint()
layer.dataProvider().addAttributes(
[QgsField("flddouble", QVariant.Double, "double")])
layer.updateFields()
def checkFieldNames(names):
flds = layer.fields()
f = next(layer.getFeatures())
self.assertEqual(flds.count(), len(names))
self.assertEqual(f.fields().count(), len(names))
for idx, expected_name in enumerate(names):
self.assertEqual(flds[idx].name(), expected_name)
self.assertEqual(f.fields().at(idx).name(), expected_name)
layer.startEditing()
checkFieldNames(['fldtxt', 'fldint', 'flddouble'])
self.assertTrue(layer.renameAttribute(0, 'fldtxt2'))
checkFieldNames(['fldtxt2', 'fldint', 'flddouble'])
self.assertTrue(layer.renameAttribute(2, 'flddouble2'))
checkFieldNames(['fldtxt2', 'fldint', 'flddouble2'])
# delete an attribute
self.assertTrue(layer.deleteAttribute(0))
checkFieldNames(['fldint', 'flddouble2'])
# rename remaining
self.assertTrue(layer.renameAttribute(0, 'fldint2'))
checkFieldNames(['fldint2', 'flddouble2'])
self.assertTrue(layer.renameAttribute(1, 'flddouble3'))
checkFieldNames(['fldint2', 'flddouble3'])
# delete an attribute
self.assertTrue(layer.deleteAttribute(0))
checkFieldNames(['flddouble3'])
self.assertTrue(layer.renameAttribute(0, 'flddouble4'))
checkFieldNames(['flddouble4'])
layer.undoStack().undo()
checkFieldNames(['flddouble3'])
layer.undoStack().undo()
checkFieldNames(['fldint2', 'flddouble3'])
layer.undoStack().undo()
checkFieldNames(['fldint2', 'flddouble2'])
layer.undoStack().undo()
checkFieldNames(['fldint', 'flddouble2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt2', 'fldint', 'flddouble2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt2', 'fldint', 'flddouble'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint', 'flddouble'])
# layer.undoStack().redo()
# checkFieldNames(['fldtxt2', 'fldint'])
# layer.undoStack().redo()
# checkFieldNames(['fldint'])
def test_RenameExpressionField(self):
layer = createLayerWithOnePoint()
exp_field_idx = layer.addExpressionField('1+1', QgsField('math_is_hard', QVariant.Int))
# rename and check
self.assertTrue(layer.renameAttribute(exp_field_idx, 'renamed'))
self.assertEqual(layer.fields()[exp_field_idx].name(), 'renamed')
f = next(layer.getFeatures())
self.assertEqual(f.fields()[exp_field_idx].name(), 'renamed')
def test_fields(self):
layer = createLayerWithOnePoint()
flds = layer.fields()
self.assertEqual(flds.indexFromName("fldint"), 1)
self.assertEqual(flds.indexFromName("fldXXX"), -1)
def test_getFeatures(self):
layer = createLayerWithOnePoint()
f = QgsFeature()
fi = layer.getFeatures()
self.assertTrue(fi.nextFeature(f))
self.assertTrue(f.isValid())
self.assertEqual(f.id(), 1)
self.assertEqual(f.geometry().asPoint(), QgsPointXY(100, 200))
self.assertEqual(f["fldtxt"], "test")
self.assertEqual(f["fldint"], 123)
self.assertFalse(fi.nextFeature(f))
layer2 = createLayerWithFivePoints()
# getFeature(fid)
feat = layer2.getFeature(4)
self.assertTrue(feat.isValid())
self.assertEqual(feat['fldtxt'], 'test3')
self.assertEqual(feat['fldint'], -1)
feat = layer2.getFeature(10)
self.assertFalse(feat.isValid())
# getFeatures(expression)
it = layer2.getFeatures("fldint <= 0")
fids = [f.id() for f in it]
self.assertEqual(set(fids), set([4, 5]))
# getFeatures(fids)
it = layer2.getFeatures([1, 2])
fids = [f.id() for f in it]
self.assertEqual(set(fids), set([1, 2]))
# getFeatures(rect)
it = layer2.getFeatures(QgsRectangle(99, 99, 201, 201))
fids = [f.id() for f in it]
self.assertEqual(set(fids), set([1, 2]))
def test_join(self):
joinLayer = createJoinLayer()
joinLayer2 = createJoinLayer()
QgsProject.instance().addMapLayers([joinLayer, joinLayer2])
layer = createLayerWithOnePoint()
join = QgsVectorLayerJoinInfo()
join.setTargetFieldName("fldint")
join.setJoinLayer(joinLayer)
join.setJoinFieldName("y")
join.setUsingMemoryCache(True)
layer.addJoin(join)
join2 = QgsVectorLayerJoinInfo()
join2.setTargetFieldName("fldint")
join2.setJoinLayer(joinLayer2)
join2.setJoinFieldName("y")
join2.setUsingMemoryCache(True)
join2.setPrefix("custom-prefix_")
layer.addJoin(join2)
flds = layer.fields()
self.assertEqual(len(flds), 8)
self.assertEqual(flds[2].name(), "joinlayer_x")
self.assertEqual(flds[3].name(), "joinlayer_z")
self.assertEqual(flds[5].name(), "custom-prefix_x")
self.assertEqual(flds[6].name(), "custom-prefix_z")
self.assertEqual(flds.fieldOrigin(0), QgsFields.OriginProvider)
self.assertEqual(flds.fieldOrigin(2), QgsFields.OriginJoin)
self.assertEqual(flds.fieldOrigin(3), QgsFields.OriginJoin)
self.assertEqual(flds.fieldOriginIndex(0), 0)
self.assertEqual(flds.fieldOriginIndex(2), 0)
self.assertEqual(flds.fieldOriginIndex(3), 2)
f = QgsFeature()
fi = layer.getFeatures()
self.assertTrue(fi.nextFeature(f))
attrs = f.attributes()
self.assertEqual(len(attrs), 8)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
self.assertEqual(attrs[2], "foo")
self.assertEqual(attrs[3], 321)
self.assertFalse(fi.nextFeature(f))
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(len(f2.attributes()), 8)
self.assertEqual(f2[2], "foo")
self.assertEqual(f2[3], 321)
def test_JoinStats(self):
""" test calculating min/max/uniqueValues on joined field """
joinLayer = createJoinLayer()
layer = createLayerWithTwoPoints()
QgsProject.instance().addMapLayers([joinLayer, layer])
join = QgsVectorLayerJoinInfo()
join.setTargetFieldName("fldint")
join.setJoinLayer(joinLayer)
join.setJoinFieldName("y")
join.setUsingMemoryCache(True)
layer.addJoin(join)
# stats on joined fields should only include values present by join
# strings
self.assertEqual(layer.minimumValue(2), "foo")
self.assertEqual(layer.maximumValue(2), "qar")
self.assertEqual(layer.minimumAndMaximumValue(2), ("foo", "qar"))
# numbers
self.assertEqual(layer.minimumValue(3), 111)
self.assertEqual(layer.maximumValue(3), 321)
self.assertEqual(layer.minimumAndMaximumValue(3), (111, 321))
# dates (maximumValue also tests we properly handle null values by skipping those)
self.assertEqual(layer.minimumValue(4), QDateTime(QDate(2010, 1, 1)))
self.assertEqual(layer.maximumValue(4), QDateTime(QDate(2010, 1, 1)))
self.assertEqual(layer.minimumAndMaximumValue(4), (QDateTime(QDate(2010, 1, 1)), QDateTime(QDate(2010, 1, 1))))
self.assertEqual(set(layer.uniqueValues(3)), set([111, 321]))
def test_valid_join_when_opening_project(self):
join_field = "id"
fid = 4
attr_idx = 4
join_attr_idx = 1
new_value = 33.0
# read project and get layers
myPath = os.path.join(unitTestDataPath(), 'joins.qgs')
rc = QgsProject.instance().read(myPath)
layer = QgsProject.instance().mapLayersByName("polys_with_id")[0]
join_layer = QgsProject.instance().mapLayersByName("polys_overlapping_with_id")[0]
# create an attribute table for the main_layer and the
# joined layer
cache = QgsVectorLayerCache(layer, 100)
am = QgsAttributeTableModel(cache)
am.loadLayer()
join_cache = QgsVectorLayerCache(join_layer, 100)
join_am = QgsAttributeTableModel(join_cache)
join_am.loadLayer()
# check feature value of a joined field from the attribute model
model_index = am.idToIndex(fid)
feature_model = am.feature(model_index)
join_model_index = join_am.idToIndex(fid)
join_feature_model = join_am.feature(join_model_index)
self.assertEqual(feature_model.attribute(attr_idx), join_feature_model.attribute(join_attr_idx))
# change attribute value for a feature of the joined layer
join_layer.startEditing()
join_layer.changeAttributeValue(fid, join_attr_idx, new_value)
join_layer.commitChanges()
# check the feature previously modified
join_model_index = join_am.idToIndex(fid)
join_feature_model = join_am.feature(join_model_index)
self.assertEqual(join_feature_model.attribute(join_attr_idx), new_value)
# recreate a new cache and model to simulate the opening of
# a new attribute table
cache = QgsVectorLayerCache(layer, 100)
am = QgsAttributeTableModel(cache)
am.loadLayer()
# test that the model is up to date with the joined layer
model_index = am.idToIndex(fid)
feature_model = am.feature(model_index)
self.assertEqual(feature_model.attribute(attr_idx), new_value)
# restore value
join_layer.startEditing()
join_layer.changeAttributeValue(fid, join_attr_idx, 7.0)
join_layer.commitChanges()
def testUniqueValue(self):
""" test retrieving unique values """
layer = createLayerWithFivePoints()
# test layer with just provider features
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0]))
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
# should be included in unique values
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0, 999]))
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0, 999]))
# add another feature
f3 = QgsFeature()
f3.setAttributes(["test2", 9999])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0, 999, 9999]))
# change an attribute value to a new unique value
f1_id = next(layer.getFeatures()).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 1, 481523))
# note - this isn't 100% accurate, since 123 no longer exists - but it avoids looping through all features
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0, 999, 9999, 481523]))
def testUniqueStringsMatching(self):
""" test retrieving unique strings matching subset """
layer = QgsVectorLayer("Point?field=fldtxt:string", "addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["apple"])
f2 = QgsFeature()
f2.setAttributes(["orange"])
f3 = QgsFeature()
f3.setAttributes(["pear"])
f4 = QgsFeature()
f4.setAttributes(["BanaNa"])
f5 = QgsFeature()
f5.setAttributes(["ApriCot"])
assert pr.addFeatures([f, f2, f3, f4, f5])
assert layer.featureCount() == 5
# test layer with just provider features
self.assertEqual(set(layer.uniqueStringsMatching(0, 'N')), set(['orange', 'BanaNa']))
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["waterMelon"])
self.assertTrue(layer.addFeature(f1))
# should be included in unique values
self.assertEqual(set(layer.uniqueStringsMatching(0, 'N')), set(['orange', 'BanaNa', 'waterMelon']))
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["waterMelon"])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(set(layer.uniqueStringsMatching(0, 'N')), set(['orange', 'BanaNa', 'waterMelon']))
self.assertEqual(set(layer.uniqueStringsMatching(0, 'aN')), set(['orange', 'BanaNa']))
# add another feature
f3 = QgsFeature()
f3.setAttributes(["pineapple"])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(set(layer.uniqueStringsMatching(0, 'n')), set(['orange', 'BanaNa', 'waterMelon', 'pineapple']))
# change an attribute value to a new unique value
f = QgsFeature()
f1_id = next(layer.getFeatures()).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 0, 'coconut'))
# note - this isn't 100% accurate, since orange no longer exists - but it avoids looping through all features
self.assertEqual(set(layer.uniqueStringsMatching(0, 'n')),
set(['orange', 'BanaNa', 'waterMelon', 'pineapple', 'coconut']))
def test_subsetString(self):
subset_string_changed = False
def onSubsetStringChanged():
nonlocal subset_string_changed
subset_string_changed = True
path = os.path.join(unitTestDataPath(), 'lines.shp')
layer = QgsVectorLayer(path, 'test', 'ogr')
layer.subsetStringChanged.connect(onSubsetStringChanged)
layer.setSubsetString("\"Name\" = 'Highway'")
self.assertTrue(subset_string_changed)
self.assertEqual(layer.featureCount(), 2)
def testMinValue(self):
""" test retrieving minimum values """
layer = createLayerWithFivePoints()
# test layer with just provider features
self.assertEqual(layer.minimumValue(1), -1)
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["test2", -999])
self.assertTrue(layer.addFeature(f1))
# should be new minimum value
self.assertEqual(layer.minimumValue(1), -999)
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["test2", -999])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(layer.minimumValue(1), -999)
# add another feature
f3 = QgsFeature()
f3.setAttributes(["test2", -1000])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(layer.minimumValue(1), -1000)
# change an attribute value to a new minimum value
f1_id = next(layer.getFeatures()).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 1, -1001))
self.assertEqual(layer.minimumValue(1), -1001)
def testMaxValue(self):
""" test retrieving maximum values """
layer = createLayerWithFivePoints()
# test layer with just provider features
self.assertEqual(layer.maximumValue(1), 888)
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
# should be new maximum value
self.assertEqual(layer.maximumValue(1), 999)
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(layer.maximumValue(1), 999)
# add another feature
f3 = QgsFeature()
f3.setAttributes(["test2", 1000])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(layer.maximumValue(1), 1000)
# change an attribute value to a new maximum value
f1_id = next(layer.getFeatures()).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 1, 1001))
self.assertEqual(layer.maximumValue(1), 1001)
def testMinAndMaxValue(self):
""" test retrieving minimum and maximum values at once"""
layer = createLayerWithFivePoints()
# test layer with just provider features
self.assertEqual(layer.minimumAndMaximumValue(1), (-1, 888))
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
# should be new maximum value
self.assertEqual(layer.minimumAndMaximumValue(1), (-1, 999))
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(layer.minimumAndMaximumValue(1), (-1, 999))
# add another feature
f3 = QgsFeature()
f3.setAttributes(["test2", 1000])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(layer.minimumAndMaximumValue(1), (-1, 1000))
# add feature with new minimum value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["test2", -999])
self.assertTrue(layer.addFeature(f1))
# should be new minimum value
self.assertEqual(layer.minimumAndMaximumValue(1), (-999, 1000))
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["test2", -999])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(layer.minimumAndMaximumValue(1), (-999, 1000))
# add another feature
f3 = QgsFeature()
f3.setAttributes(["test2", -1000])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(layer.minimumAndMaximumValue(1), (-1000, 1000))
# change an attribute value to a new maximum value
it = layer.getFeatures()
f1_id = next(it).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 1, 1001))
self.assertEqual(layer.minimumAndMaximumValue(1), (-1000, 1001))
f1_id = next(it).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 1, -1001))
self.assertEqual(layer.minimumAndMaximumValue(1), (-1001, 1001))
def testMinMaxInVirtualField(self):
"""
Test minimum and maximum values in a virtual field
"""
layer = QgsVectorLayer("Point?field=fldstr:string", "layer", "memory")
pr = layer.dataProvider()
int_values = ['2010-01-01', None, '2020-01-01']
features = []
for i in int_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([i])
features.append(f)
assert pr.addFeatures(features)
field = QgsField('virtual', QVariant.Date)
layer.addExpressionField('to_date("fldstr")', field)
self.assertEqual(len(layer.getFeature(1).attributes()), 2)
self.assertEqual(layer.minimumValue(1), QDate(2010, 1, 1))
self.assertEqual(layer.maximumValue(1), QDate(2020, 1, 1))
self.assertEqual(layer.minimumAndMaximumValue(1), (QDate(2010, 1, 1), QDate(2020, 1, 1)))
def test_InvalidOperations(self):
layer = createLayerWithOnePoint()
layer.startEditing()
# ADD FEATURE
newF1 = QgsFeature()
self.assertFalse(layer.addFeature(newF1)) # need attributes like the layer has)
# DELETE FEATURE
self.assertFalse(layer.deleteFeature(-333))
# we do not check for existence of the feature id if it's
# not newly added feature
# self.assertFalse(layer.deleteFeature(333))
# CHANGE GEOMETRY
self.assertFalse(layer.changeGeometry(
-333, QgsGeometry.fromPointXY(QgsPointXY(1, 1))))
# CHANGE VALUE
self.assertFalse(layer.changeAttributeValue(-333, 0, 1))
self.assertFalse(layer.changeAttributeValue(1, -1, 1))
# ADD ATTRIBUTE
self.assertFalse(layer.addAttribute(QgsField()))
# DELETE ATTRIBUTE
self.assertFalse(layer.deleteAttribute(-1))
def onBlendModeChanged(self, mode):
self.blendModeTest = mode
def test_setBlendMode(self):
layer = createLayerWithOnePoint()
self.blendModeTest = 0
layer.blendModeChanged.connect(self.onBlendModeChanged)
layer.setBlendMode(QPainter.CompositionMode_Screen)
self.assertEqual(self.blendModeTest, QPainter.CompositionMode_Screen)
self.assertEqual(layer.blendMode(), QPainter.CompositionMode_Screen)
def test_setFeatureBlendMode(self):
layer = createLayerWithOnePoint()
self.blendModeTest = 0
layer.featureBlendModeChanged.connect(self.onBlendModeChanged)
layer.setFeatureBlendMode(QPainter.CompositionMode_Screen)
self.assertEqual(self.blendModeTest, QPainter.CompositionMode_Screen)
self.assertEqual(layer.featureBlendMode(), QPainter.CompositionMode_Screen)
def test_ExpressionField(self):
layer = createLayerWithOnePoint()
cnt = layer.fields().count()
idx = layer.addExpressionField('5', QgsField('test', QVariant.LongLong))
fet = next(layer.getFeatures())
self.assertEqual(fet[idx], 5)
# check fields
self.assertEqual(layer.fields().count(), cnt + 1)
self.assertEqual(fet.fields(), layer.fields())
# retrieve single feature and check fields
fet = next(layer.getFeatures(QgsFeatureRequest().setFilterFid(1)))
self.assertEqual(fet.fields(), layer.fields())
layer.updateExpressionField(idx, '9')
self.assertEqual(next(layer.getFeatures())[idx], 9)
layer.removeExpressionField(idx)
self.assertEqual(layer.fields().count(), cnt)
# expression field which references itself
idx = layer.addExpressionField('sum(test2)', QgsField('test2', QVariant.LongLong))
fet = next(layer.getFeatures())
self.assertEqual(fet['test2'], 0)
def test_ExpressionFieldEllipsoidLengthCalculation(self):
# create a temporary layer
temp_layer = QgsVectorLayer("LineString?crs=epsg:3111&field=pk:int", "vl", "memory")
self.assertTrue(temp_layer.isValid())
f1 = QgsFeature(temp_layer.dataProvider().fields(), 1)
f1.setAttribute("pk", 1)
f1.setGeometry(QgsGeometry.fromPolylineXY([QgsPointXY(2484588, 2425722), QgsPointXY(2482767, 2398853)]))
temp_layer.dataProvider().addFeatures([f1])
# set project CRS and ellipsoid
srs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
QgsProject.instance().setCrs(srs)
QgsProject.instance().setEllipsoid("WGS84")
QgsProject.instance().setDistanceUnits(QgsUnitTypes.DistanceMeters)
idx = temp_layer.addExpressionField('$length', QgsField('length', QVariant.Double)) # NOQA
# check value
f = next(temp_layer.getFeatures())
expected = 26932.156
self.assertAlmostEqual(f['length'], expected, 3)
# change project length unit, check calculation respects unit
QgsProject.instance().setDistanceUnits(QgsUnitTypes.DistanceFeet)
f = next(temp_layer.getFeatures())
expected = 88360.0918635
self.assertAlmostEqual(f['length'], expected, 3)
def test_ExpressionFieldEllipsoidAreaCalculation(self):
# create a temporary layer
temp_layer = QgsVectorLayer("Polygon?crs=epsg:3111&field=pk:int", "vl", "memory")
self.assertTrue(temp_layer.isValid())
f1 = QgsFeature(temp_layer.dataProvider().fields(), 1)
f1.setAttribute("pk", 1)
f1.setGeometry(QgsGeometry.fromPolygonXY([[QgsPointXY(2484588, 2425722), QgsPointXY(2482767, 2398853),
QgsPointXY(2520109, 2397715), QgsPointXY(2520792, 2425494),
QgsPointXY(2484588, 2425722)]]))
temp_layer.dataProvider().addFeatures([f1])
# set project CRS and ellipsoid
srs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
QgsProject.instance().setCrs(srs)
QgsProject.instance().setEllipsoid("WGS84")
QgsProject.instance().setAreaUnits(QgsUnitTypes.AreaSquareMeters)
idx = temp_layer.addExpressionField('$area', QgsField('area', QVariant.Double)) # NOQA
# check value
f = next(temp_layer.getFeatures())
expected = 1005755617.8191342
self.assertAlmostEqual(f['area'], expected, delta=1.0)
# change project area unit, check calculation respects unit
QgsProject.instance().setAreaUnits(QgsUnitTypes.AreaSquareMiles)
f = next(temp_layer.getFeatures())
expected = 388.3244150061589
self.assertAlmostEqual(f['area'], expected, 3)
def test_ExpressionFilter(self):
layer = createLayerWithOnePoint()
idx = layer.addExpressionField('5', QgsField('test', QVariant.LongLong)) # NOQA
features = layer.getFeatures(QgsFeatureRequest().setFilterExpression('"test" = 6'))
assert (len(list(features)) == 0)
features = layer.getFeatures(QgsFeatureRequest().setFilterExpression('"test" = 5'))
assert (len(list(features)) == 1)
def testSelectByIds(self):
""" Test selecting by ID"""
layer = QgsVectorLayer(os.path.join(unitTestDataPath(), 'points.shp'), 'Points', 'ogr')
# SetSelection
layer.selectByIds([1, 3, 5, 7], QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 3, 5, 7]))
# check that existing selection is cleared
layer.selectByIds([2, 4, 6], QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 4, 6]))
# AddToSelection
layer.selectByIds([3, 5], QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 4, 5, 6]))
layer.selectByIds([1], QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 2, 3, 4, 5, 6]))
# IntersectSelection
layer.selectByIds([1, 3, 5, 6], QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 3, 5, 6]))
layer.selectByIds([1, 2, 5, 6], QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 5, 6]))
# RemoveFromSelection
layer.selectByIds([2, 6, 7], QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 5]))
layer.selectByIds([1, 5], QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
def testSelectByExpression(self):
""" Test selecting by expression """
layer = QgsVectorLayer(os.path.join(unitTestDataPath(), 'points.shp'), 'Points', 'ogr')
# SetSelection
layer.selectByExpression('"Class"=\'B52\' and "Heading" > 10 and "Heading" <70', QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([10, 11]))
# check that existing selection is cleared
layer.selectByExpression('"Class"=\'Biplane\'', QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 5, 6, 7, 8]))
# SetSelection no matching
layer.selectByExpression('"Class"=\'A380\'', QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
# AddToSelection
layer.selectByExpression('"Importance"=3', QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([0, 2, 3, 4, 14]))
layer.selectByExpression('"Importance"=4', QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([0, 2, 3, 4, 13, 14]))
# IntersectSelection
layer.selectByExpression('"Heading"<100', QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([0, 2, 3, 4]))
layer.selectByExpression('"Cabin Crew"=1', QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3]))
# RemoveFromSelection
layer.selectByExpression('"Heading"=85', QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([3]))
layer.selectByExpression('"Heading"=95', QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
def testSelectByRect(self):
""" Test selecting by rectangle """
layer = QgsVectorLayer(os.path.join(unitTestDataPath(), 'points.shp'), 'Points', 'ogr')
# SetSelection
layer.selectByRect(QgsRectangle(-112, 30, -94, 45), QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 7, 10, 11, 15]))
# check that existing selection is cleared
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 10, 15]))
# SetSelection no matching
layer.selectByRect(QgsRectangle(112, 30, 115, 45), QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
# AddToSelection
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 10, 15]))
layer.selectByRect(QgsRectangle(-112, 37, -94, 45), QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 7, 10, 11, 15]))
# IntersectSelection
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 10, 15]))
layer.selectByIds([2, 10, 13])
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 10]))
# RemoveFromSelection
layer.selectByRect(QgsRectangle(-112, 30, -94, 45), QgsVectorLayer.SetSelection)
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([7, 11]))
layer.selectByRect(QgsRectangle(-112, 30, -94, 45), QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
def testReselect(self):
layer = QgsVectorLayer(os.path.join(unitTestDataPath(), 'points.shp'), 'Points', 'ogr')
layer.selectByIds([1, 3, 5, 7], QgsVectorLayer.SetSelection)
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5, 7])
layer.reselect() # no effect, selection has not been cleared
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5, 7])
# clear selection
layer.removeSelection()
self.assertCountEqual(layer.selectedFeatureIds(), [])
# reselect should bring this back
layer.reselect()
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5, 7])
layer.reselect() # no change
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5, 7])
# change an existing selection
layer.selectByIds([1, 3, 5], QgsVectorLayer.SetSelection)
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5])
layer.reselect() # no change
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5])
layer.removeSelection()
self.assertCountEqual(layer.selectedFeatureIds(), [])
# reselect should bring this back
layer.reselect()
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5])
layer.select(7)
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5, 7])
layer.reselect()
self.assertCountEqual(layer.selectedFeatureIds(), [1, 3, 5, 7])
layer.removeSelection()
layer.select([3, 5])
self.assertCountEqual(layer.selectedFeatureIds(), [3, 5])
layer.reselect()
self.assertCountEqual(layer.selectedFeatureIds(), [3, 5])
layer.deselect([5])
self.assertCountEqual(layer.selectedFeatureIds(), [3])
layer.reselect()
self.assertCountEqual(layer.selectedFeatureIds(), [3])
layer.modifySelection([5], [3])
self.assertCountEqual(layer.selectedFeatureIds(), [5])
layer.reselect()
self.assertCountEqual(layer.selectedFeatureIds(), [5])
def testAggregate(self):
""" Test aggregate calculation """
layer = QgsVectorLayer("Point?field=fldint:integer", "layer", "memory")
pr = layer.dataProvider()
int_values = [4, 2, 3, 2, 5, None, 8]
features = []
for i in int_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([i])
features.append(f)
assert pr.addFeatures(features)
tests = [[QgsAggregateCalculator.Count, 6],
[QgsAggregateCalculator.Sum, 24],
[QgsAggregateCalculator.Mean, 4],
[QgsAggregateCalculator.StDev, 2.0816],
[QgsAggregateCalculator.StDevSample, 2.2803],
[QgsAggregateCalculator.Min, 2],
[QgsAggregateCalculator.Max, 8],
[QgsAggregateCalculator.Range, 6],
[QgsAggregateCalculator.Median, 3.5],
[QgsAggregateCalculator.CountDistinct, 5],
[QgsAggregateCalculator.CountMissing, 1],
[QgsAggregateCalculator.FirstQuartile, 2],
[QgsAggregateCalculator.ThirdQuartile, 5.0],
[QgsAggregateCalculator.InterQuartileRange, 3.0]
]
for t in tests:
val, ok = layer.aggregate(t[0], 'fldint')
self.assertTrue(ok)
if isinstance(t[1], int):
self.assertEqual(val, t[1])
else:
self.assertAlmostEqual(val, t[1], 3)
# test with parameters
layer = QgsVectorLayer("Point?field=fldstring:string", "layer", "memory")
pr = layer.dataProvider()
string_values = ['this', 'is', 'a', 'test', 'a', 'nice', 'test']
features = []
for s in string_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([s])
features.append(f)
assert pr.addFeatures(features)
params = QgsAggregateCalculator.AggregateParameters()
params.delimiter = ' '
val, ok = layer.aggregate(QgsAggregateCalculator.StringConcatenate, 'fldstring', params)
self.assertTrue(ok)
self.assertEqual(val, 'this is a test a nice test')
val, ok = layer.aggregate(QgsAggregateCalculator.StringConcatenateUnique, 'fldstring', params)
self.assertTrue(ok)
self.assertEqual(val, 'this is a test nice')
def testAggregateInVirtualField(self):
"""
Test aggregates in a virtual field
"""
layer = QgsVectorLayer("Point?field=fldint:integer", "layer", "memory")
pr = layer.dataProvider()
int_values = [4, 2, 3, 2, 5, None, 8]
features = []
for i in int_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([i])
features.append(f)
assert pr.addFeatures(features)
field = QgsField('virtual', QVariant.Double)
layer.addExpressionField('sum(fldint*2)', field)
vals = [f['virtual'] for f in layer.getFeatures()]
self.assertEqual(vals, [48, 48, 48, 48, 48, 48, 48])
def testAggregateFilter(self):
""" Test aggregate calculation """
layer = QgsVectorLayer("Point?field=fldint:integer", "layer", "memory")
pr = layer.dataProvider()
int_values = [4, 2, 3, 2, 5, None, 8]
features = []
for i in int_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([i])
features.append(f)
assert pr.addFeatures(features)
val, ok = layer.aggregate(QgsAggregateCalculator.Sum, 'fldint', fids=[1, 2])
self.assertTrue(ok)
self.assertEqual(val, 6.0)
def onLayerOpacityChanged(self, tr):
self.opacityTest = tr
def test_setLayerOpacity(self):
layer = createLayerWithOnePoint()
self.opacityTest = 0
layer.opacityChanged.connect(self.onLayerOpacityChanged)
layer.setOpacity(0.5)
self.assertEqual(self.opacityTest, 0.5)
self.assertEqual(layer.opacity(), 0.5)
def onRendererChanged(self):
self.rendererChanged = True
def test_setRenderer(self):
layer = createLayerWithOnePoint()
self.rendererChanged = False
layer.rendererChanged.connect(self.onRendererChanged)
r = QgsSingleSymbolRenderer(QgsSymbol.defaultSymbol(QgsWkbTypes.PointGeometry))
layer.setRenderer(r)
self.assertTrue(self.rendererChanged)
self.assertEqual(layer.renderer(), r)
def testGetSetAliases(self):
""" test getting and setting aliases """
layer = createLayerWithOnePoint()
self.assertEqual(len(layer.attributeAliases()), 2)
self.assertFalse(layer.attributeAlias(0))
self.assertFalse(layer.attributeAlias(1))
self.assertFalse(layer.attributeAlias(2))
layer.setFieldAlias(0, "test")
self.assertEqual(layer.attributeAlias(0), "test")
self.assertFalse(layer.attributeAlias(1))
self.assertFalse(layer.attributeAlias(2))
self.assertEqual(layer.fields().at(0).alias(), "test")
layer.setFieldAlias(1, "test2")
self.assertEqual(layer.attributeAlias(0), "test")
self.assertEqual(layer.attributeAlias(1), "test2")
self.assertFalse(layer.attributeAlias(2))
self.assertEqual(layer.fields().at(0).alias(), "test")
self.assertEqual(layer.fields().at(1).alias(), "test2")
layer.setFieldAlias(1, None)
self.assertEqual(layer.attributeAlias(0), "test")
self.assertFalse(layer.attributeAlias(1))
self.assertFalse(layer.attributeAlias(2))
self.assertEqual(layer.fields().at(0).alias(), "test")
self.assertFalse(layer.fields().at(1).alias())
layer.removeFieldAlias(0)
self.assertFalse(layer.attributeAlias(0))
self.assertFalse(layer.attributeAlias(1))
self.assertFalse(layer.attributeAlias(2))
self.assertFalse(layer.fields().at(0).alias())
self.assertFalse(layer.fields().at(1).alias())
def testSaveRestoreAliases(self):
""" test saving and restoring aliases from xml"""
layer = createLayerWithOnePoint()
# no default expressions
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer2 = createLayerWithOnePoint()
self.assertTrue(layer2.readXml(elem, QgsReadWriteContext()))
self.assertFalse(layer2.attributeAlias(0))
self.assertFalse(layer2.attributeAlias(1))
# set some aliases
layer.setFieldAlias(0, "test")
layer.setFieldAlias(1, "test2")
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer3 = createLayerWithOnePoint()
self.assertTrue(layer3.readXml(elem, QgsReadWriteContext()))
self.assertEqual(layer3.attributeAlias(0), "test")
self.assertEqual(layer3.attributeAlias(1), "test2")
self.assertEqual(layer3.fields().at(0).alias(), "test")
self.assertEqual(layer3.fields().at(1).alias(), "test2")
def testGetSetDefaults(self):
""" test getting and setting default expressions """
layer = createLayerWithOnePoint()
self.assertFalse(layer.defaultValueDefinition(0))
self.assertFalse(layer.defaultValueDefinition(0).expression())
self.assertFalse(layer.defaultValueDefinition(0).applyOnUpdate())
self.assertFalse(layer.defaultValueDefinition(1))
self.assertFalse(layer.defaultValueDefinition(2))
layer.setDefaultValueDefinition(0, QgsDefaultValue("'test'"))
self.assertTrue(layer.defaultValueDefinition(0))
self.assertEqual(layer.defaultValueDefinition(0).expression(), "'test'")
self.assertFalse(layer.defaultValueDefinition(0).applyOnUpdate())
self.assertFalse(layer.defaultValueDefinition(1))
self.assertFalse(layer.defaultValueDefinition(1).applyOnUpdate())
self.assertFalse(layer.defaultValueDefinition(2))
self.assertFalse(layer.defaultValueDefinition(2).applyOnUpdate())
self.assertEqual(layer.fields().at(0).defaultValueDefinition().expression(), "'test'")
layer.setDefaultValueDefinition(1, QgsDefaultValue("2+2"))
self.assertEqual(layer.defaultValueDefinition(0).expression(), "'test'")
self.assertFalse(layer.defaultValueDefinition(0).applyOnUpdate())
self.assertEqual(layer.defaultValueDefinition(1).expression(), "2+2")
self.assertFalse(layer.defaultValueDefinition(1).applyOnUpdate())
self.assertFalse(layer.defaultValueDefinition(2))
self.assertFalse(layer.defaultValueDefinition(2).applyOnUpdate())
self.assertEqual(layer.fields().at(0).defaultValueDefinition().expression(), "'test'")
self.assertEqual(layer.fields().at(1).defaultValueDefinition().expression(), "2+2")
layer.setDefaultValueDefinition(1, QgsDefaultValue("2+2", True))
self.assertEqual(layer.defaultValueDefinition(0).expression(), "'test'")
self.assertFalse(layer.defaultValueDefinition(0).applyOnUpdate())
self.assertEqual(layer.defaultValueDefinition(1).expression(), "2+2")
self.assertTrue(layer.defaultValueDefinition(1).applyOnUpdate())
self.assertEqual(layer.fields().at(0).defaultValueDefinition().expression(), "'test'")
self.assertEqual(layer.fields().at(1).defaultValueDefinition().expression(), "2+2")
def testSaveRestoreDefaults(self):
""" test saving and restoring default expressions from xml"""
layer = createLayerWithOnePoint()
# no default expressions
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer2 = createLayerWithOnePoint()
self.assertTrue(layer2.readXml(elem, QgsReadWriteContext()))
self.assertFalse(layer2.defaultValueDefinition(0))
self.assertFalse(layer2.defaultValueDefinition(1))
# set some default expressions
layer.setDefaultValueDefinition(0, QgsDefaultValue("'test'"))
layer.setDefaultValueDefinition(1, QgsDefaultValue("2+2"))
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer3 = createLayerWithOnePoint()
self.assertTrue(layer3.readXml(elem, QgsReadWriteContext()))
self.assertEqual(layer3.defaultValueDefinition(0).expression(), "'test'")
self.assertEqual(layer3.defaultValueDefinition(1).expression(), "2+2")
self.assertEqual(layer3.fields().at(0).defaultValueDefinition().expression(), "'test'")
self.assertEqual(layer3.fields().at(1).defaultValueDefinition().expression(), "2+2")
def testEvaluatingDefaultExpressions(self):
""" tests calculation of default values"""
layer = createLayerWithOnePoint()
layer.setDefaultValueDefinition(0, QgsDefaultValue("'test'"))
layer.setDefaultValueDefinition(1, QgsDefaultValue("2+2"))
self.assertEqual(layer.defaultValue(0), 'test')
self.assertEqual(layer.defaultValue(1), 4)
# using feature
layer.setDefaultValueDefinition(1, QgsDefaultValue('$id * 2'))
feature = QgsFeature(4)
feature.setValid(True)
feature.setFields(layer.fields())
# no feature:
self.assertFalse(layer.defaultValue(1))
# with feature:
self.assertEqual(layer.defaultValue(0, feature), 'test')
self.assertEqual(layer.defaultValue(1, feature), 8)
# using feature geometry
layer.setDefaultValueDefinition(1, QgsDefaultValue('$x * 2'))
feature.setGeometry(QgsGeometry(QgsPoint(6, 7)))
self.assertEqual(layer.defaultValue(1, feature), 12)
# using contexts
scope = QgsExpressionContextScope()
scope.setVariable('var1', 16)
context = QgsExpressionContext()
context.appendScope(scope)
layer.setDefaultValueDefinition(1, QgsDefaultValue('$id + @var1'))
self.assertEqual(layer.defaultValue(1, feature, context), 20)
# if no scope passed, should use a default constructed one including layer variables
QgsExpressionContextUtils.setLayerVariable(layer, 'var2', 4)
QgsExpressionContextUtils.setProjectVariable(QgsProject.instance(), 'var3', 8)
layer.setDefaultValueDefinition(1, QgsDefaultValue('to_int(@var2) + to_int(@var3) + $id'))
self.assertEqual(layer.defaultValue(1, feature), 16)
# bad expression
layer.setDefaultValueDefinition(1, QgsDefaultValue('not a valid expression'))
self.assertFalse(layer.defaultValue(1))
def testApplyOnUpdateDefaultExpressions(self):
"""tests apply on update of default values"""
layer = createLayerWithOnePoint()
layer.setDefaultValueDefinition(0, QgsDefaultValue("CONCAT('l: ', @number, ',f: ', \"fldint\" )", True))
layer.setDefaultValueDefinition(1, QgsDefaultValue("1 * @number", False))
QgsExpressionContextUtils.setLayerVariable(layer, 'number', 4)
layer.startEditing()
feature = QgsFeature()
feature.setFields(layer.fields())
feature.setValid(True)
# Both default values should be set on feature create
feature.setAttribute(1, layer.defaultValue(1, feature))
feature.setAttribute(0, layer.defaultValue(0, feature))
self.assertTrue(layer.addFeature(feature))
fid = feature.id()
self.assertEqual(layer.getFeature(fid)['fldtxt'], 'l: 4,f: 4')
self.assertEqual(layer.getFeature(fid)['fldint'], 4)
# ApplyOnUpdateDefaultValue should be set on changeAttributeValue
layer.changeAttributeValue(fid, 1, 20)
self.assertEqual(layer.getFeature(fid)['fldtxt'], 'l: 4,f: 20')
self.assertEqual(layer.getFeature(fid)['fldint'], 20)
# When changing the value of the "derived" attribute, only this one
# should be updated
QgsExpressionContextUtils.setLayerVariable(layer, 'number', 8)
layer.changeAttributeValue(fid, 0, 0)
self.assertEqual(layer.getFeature(fid)['fldtxt'], 'l: 8,f: 20')
self.assertEqual(layer.getFeature(fid)['fldint'], 20)
# Check update on geometry change
layer.setDefaultValueDefinition(1, QgsDefaultValue("x($geometry)", True))
layer.changeGeometry(fid, QgsGeometry.fromPointXY(QgsPointXY(300, 200)))
self.assertEqual(layer.getFeature(fid)['fldint'], 300)
def testGetSetConstraints(self):
""" test getting and setting field constraints """
layer = createLayerWithOnePoint()
self.assertFalse(layer.fieldConstraints(0))
self.assertFalse(layer.fieldConstraints(1))
self.assertFalse(layer.fieldConstraints(2))
layer.setFieldConstraint(0, QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fieldConstraints(0), QgsFieldConstraints.ConstraintNotNull)
self.assertFalse(layer.fieldConstraints(1))
self.assertFalse(layer.fieldConstraints(2))
self.assertEqual(layer.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(0).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer.fieldConstraints(0), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fieldConstraints(1),
QgsFieldConstraints.ConstraintNotNull | QgsFieldConstraints.ConstraintUnique)
self.assertFalse(layer.fieldConstraints(2))
self.assertEqual(layer.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(0).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
self.assertEqual(layer.fields().at(1).constraints().constraints(),
QgsFieldConstraints.ConstraintNotNull | QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintStrengthHard)
layer.removeFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull)
layer.removeFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer.fieldConstraints(0), QgsFieldConstraints.ConstraintNotNull)
self.assertFalse(layer.fieldConstraints(1))
self.assertFalse(layer.fieldConstraints(2))
self.assertEqual(layer.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(0).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
self.assertFalse(layer.fields().at(1).constraints().constraints())
self.assertEqual(layer.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginNotSet)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthNotSet)
def testSaveRestoreConstraints(self):
""" test saving and restoring constraints from xml"""
layer = createLayerWithOnePoint()
# no constraints
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer2 = createLayerWithOnePoint()
self.assertTrue(layer2.readXml(elem, QgsReadWriteContext()))
self.assertFalse(layer2.fieldConstraints(0))
self.assertFalse(layer2.fieldConstraints(1))
# set some constraints
layer.setFieldConstraint(0, QgsFieldConstraints.ConstraintNotNull)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull, QgsFieldConstraints.ConstraintStrengthSoft)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer3 = createLayerWithOnePoint()
self.assertTrue(layer3.readXml(elem, QgsReadWriteContext()))
self.assertEqual(layer3.fieldConstraints(0), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer3.fieldConstraints(1),
QgsFieldConstraints.ConstraintNotNull | QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer3.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer3.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(0).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
self.assertEqual(layer3.fields().at(1).constraints().constraints(),
QgsFieldConstraints.ConstraintNotNull | QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer3.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer3.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthSoft)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintStrengthHard)
def testGetSetConstraintExpressions(self):
""" test getting and setting field constraint expressions """
layer = createLayerWithOnePoint()
self.assertFalse(layer.constraintExpression(0))
self.assertFalse(layer.constraintExpression(1))
self.assertFalse(layer.constraintExpression(2))
layer.setConstraintExpression(0, '1+2')
self.assertEqual(layer.constraintExpression(0), '1+2')
self.assertFalse(layer.constraintExpression(1))
self.assertFalse(layer.constraintExpression(2))
self.assertEqual(layer.fields().at(0).constraints().constraintExpression(), '1+2')
layer.setConstraintExpression(1, '3+4', 'desc')
self.assertEqual(layer.constraintExpression(0), '1+2')
self.assertEqual(layer.constraintExpression(1), '3+4')
self.assertEqual(layer.constraintDescription(1), 'desc')
self.assertFalse(layer.constraintExpression(2))
self.assertEqual(layer.fields().at(0).constraints().constraintExpression(), '1+2')
self.assertEqual(layer.fields().at(1).constraints().constraintExpression(), '3+4')
self.assertEqual(layer.fields().at(1).constraints().constraintDescription(), 'desc')
layer.setConstraintExpression(1, None)
self.assertEqual(layer.constraintExpression(0), '1+2')
self.assertFalse(layer.constraintExpression(1))
self.assertFalse(layer.constraintExpression(2))
self.assertEqual(layer.fields().at(0).constraints().constraintExpression(), '1+2')
self.assertFalse(layer.fields().at(1).constraints().constraintExpression())
def testSaveRestoreConstraintExpressions(self):
""" test saving and restoring constraint expressions from xml"""
layer = createLayerWithOnePoint()
# no constraints
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer2 = createLayerWithOnePoint()
self.assertTrue(layer2.readXml(elem, QgsReadWriteContext()))
self.assertFalse(layer2.constraintExpression(0))
self.assertFalse(layer2.constraintExpression(1))
# set some constraints
layer.setConstraintExpression(0, '1+2')
layer.setConstraintExpression(1, '3+4', 'desc')
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer3 = createLayerWithOnePoint()
self.assertTrue(layer3.readXml(elem, QgsReadWriteContext()))
self.assertEqual(layer3.constraintExpression(0), '1+2')
self.assertEqual(layer3.constraintExpression(1), '3+4')
self.assertEqual(layer3.constraintDescription(1), 'desc')
self.assertEqual(layer3.fields().at(0).constraints().constraintExpression(), '1+2')
self.assertEqual(layer3.fields().at(1).constraints().constraintExpression(), '3+4')
self.assertEqual(layer3.fields().at(1).constraints().constraintDescription(), 'desc')
self.assertEqual(layer3.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintExpression)
self.assertEqual(layer3.fields().at(1).constraints().constraints(), QgsFieldConstraints.ConstraintExpression)
self.assertEqual(layer3.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintExpression),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer3.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintExpression),
QgsFieldConstraints.ConstraintOriginLayer)
def testGetFeatureLimitWithEdits(self):
""" test getting features with a limit, when edits are present """
layer = createLayerWithOnePoint()
# now has one feature with id 0
pr = layer.dataProvider()
f1 = QgsFeature(1)
f1.setAttributes(["test", 3])
f1.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(300, 200)))
f2 = QgsFeature(2)
f2.setAttributes(["test", 3])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
f3 = QgsFeature(3)
f3.setAttributes(["test", 3])
f3.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
self.assertTrue(pr.addFeatures([f1, f2, f3]))
req = QgsFeatureRequest().setLimit(2)
self.assertEqual(len(list(layer.getFeatures(req))), 2)
# now delete feature f1
layer.startEditing()
self.assertTrue(layer.deleteFeature(1))
req = QgsFeatureRequest().setLimit(2)
self.assertEqual(len(list(layer.getFeatures(req))), 2)
layer.rollBack()
# change an attribute value required by filter
layer.startEditing()
req = QgsFeatureRequest().setFilterExpression('fldint=3').setLimit(2)
self.assertTrue(layer.changeAttributeValue(2, 1, 4))
self.assertEqual(len(list(layer.getFeatures(req))), 2)
layer.rollBack()
layer.startEditing()
req = QgsFeatureRequest().setFilterRect(QgsRectangle(50, 100, 150, 300)).setLimit(2)
self.assertTrue(layer.changeGeometry(2, QgsGeometry.fromPointXY(QgsPointXY(500, 600))))
self.assertEqual(len(list(layer.getFeatures(req))), 2)
layer.rollBack()
def test_server_properties(self):
""" Test server properties. """
layer = QgsVectorLayer('Point?field=fldtxt:string', 'layer_1', 'memory')
self.assertIsInstance(layer.serverProperties(), QgsMapLayerServerProperties)
def testClone(self):
# init crs
srs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
# init map layer styles
tmplayer = createLayerWithTwoPoints()
sym1 = QgsLineSymbol()
sym1.setColor(Qt.magenta)
tmplayer.setRenderer(QgsSingleSymbolRenderer(sym1))
style0 = QgsMapLayerStyle()
style0.readFromLayer(tmplayer)
style1 = QgsMapLayerStyle()
style1.readFromLayer(tmplayer)
# init dependencies layers
ldep = createLayerWithTwoPoints()
dep = QgsMapLayerDependency(ldep.id())
# init layer
layer = createLayerWithTwoPoints()
layer.setBlendMode(QPainter.CompositionMode_Screen)
layer.styleManager().addStyle('style0', style0)
layer.styleManager().addStyle('style1', style1)
layer.setName('MyName')
layer.setShortName('MyShortName')
layer.setMaximumScale(0.5)
layer.setMinimumScale(1.5)
layer.setScaleBasedVisibility(True)
layer.setTitle('MyTitle')
layer.setAbstract('MyAbstract')
layer.setKeywordList('MyKeywordList')
layer.setDataUrl('MyDataUrl')
layer.setDataUrlFormat('MyDataUrlFormat')
layer.setAttribution('MyAttribution')
layer.setAttributionUrl('MyAttributionUrl')
layer.setMetadataUrl('MyMetadataUrl')
layer.setMetadataUrlType('MyMetadataUrlType')
layer.setMetadataUrlFormat('MyMetadataUrlFormat')
layer.setLegendUrl('MyLegendUrl')
layer.setLegendUrlFormat('MyLegendUrlFormat')
layer.setDependencies([dep])
layer.setCrs(srs)
layer.setCustomProperty('MyKey0', 'MyValue0')
layer.setCustomProperty('MyKey1', 'MyValue1')
layer.setOpacity(0.66)
layer.setProviderEncoding('latin9')
layer.setDisplayExpression('MyDisplayExpression')
layer.setMapTipTemplate('MyMapTipTemplate')
layer.setExcludeAttributesWfs(['MyExcludeAttributeWFS'])
layer.setExcludeAttributesWms(['MyExcludeAttributeWMS'])
layer.setFeatureBlendMode(QPainter.CompositionMode_Xor)
sym = QgsLineSymbol()
sym.setColor(Qt.magenta)
layer.setRenderer(QgsSingleSymbolRenderer(sym))
simplify = layer.simplifyMethod()
simplify.setTolerance(33.3)
simplify.setThreshold(0.333)
layer.setSimplifyMethod(simplify)
layer.setFieldAlias(0, 'MyAlias0')
layer.setFieldAlias(1, 'MyAlias1')
jl0 = createLayerWithTwoPoints()
j0 = QgsVectorLayerJoinInfo()
j0.setJoinLayer(jl0)
jl1 = createLayerWithTwoPoints()
j1 = QgsVectorLayerJoinInfo()
j1.setJoinLayer(jl1)
layer.addJoin(j0)
layer.addJoin(j1)
fids = layer.allFeatureIds()
selected_fids = fids[0:3]
layer.selectByIds(selected_fids)
cfg = layer.attributeTableConfig()
cfg.setSortOrder(Qt.DescendingOrder) # by default AscendingOrder
layer.setAttributeTableConfig(cfg)
pal = QgsPalLayerSettings()
text_format = QgsTextFormat()
text_format.setSize(33)
text_format.setColor(Qt.magenta)
pal.setFormat(text_format)
labeling = QgsVectorLayerSimpleLabeling(pal)
layer.setLabeling(labeling)
diag_renderer = QgsSingleCategoryDiagramRenderer()
diag_renderer.setAttributeLegend(False) # true by default
layer.setDiagramRenderer(diag_renderer)
diag_settings = QgsDiagramLayerSettings()
diag_settings.setPriority(3)
diag_settings.setZIndex(0.33)
layer.setDiagramLayerSettings(diag_settings)
edit_form_config = layer.editFormConfig()
edit_form_config.setUiForm("MyUiForm")
edit_form_config.setInitFilePath("MyInitFilePath")
layer.setEditFormConfig(edit_form_config)
widget_setup = QgsEditorWidgetSetup("MyWidgetSetupType", {})
layer.setEditorWidgetSetup(0, widget_setup)
layer.setConstraintExpression(0, "MyFieldConstraintExpression")
layer.setFieldConstraint(0, QgsFieldConstraints.ConstraintUnique, QgsFieldConstraints.ConstraintStrengthHard)
layer.setDefaultValueDefinition(0, QgsDefaultValue("MyDefaultValueExpression"))
action = QgsAction(QgsAction.Unix, "MyActionDescription", "MyActionCmd")
layer.actions().addAction(action)
metadata = QgsLayerMetadata()
metadata.setFees('a handful of roos')
layer.setMetadata(metadata)
# clone layer
clone = layer.clone()
self.assertEqual(layer.metadata().fees(), 'a handful of roos')
# generate xml from layer
layer_doc = QDomDocument("doc")
layer_elem = layer_doc.createElement("maplayer")
layer.writeLayerXml(layer_elem, layer_doc, QgsReadWriteContext())
# generate xml from clone
clone_doc = QDomDocument("doc")
clone_elem = clone_doc.createElement("maplayer")
clone.writeLayerXml(clone_elem, clone_doc, QgsReadWriteContext())
# replace id within xml of clone
clone_id_elem = clone_elem.firstChildElement("id")
clone_id_elem_patch = clone_doc.createElement("id")
clone_id_elem_patch_value = clone_doc.createTextNode(layer.id())
clone_id_elem_patch.appendChild(clone_id_elem_patch_value)
clone_elem.replaceChild(clone_id_elem_patch, clone_id_elem)
# update doc
clone_doc.appendChild(clone_elem)
layer_doc.appendChild(layer_elem)
# compare xml documents
self.assertEqual(layer_doc.toString(), clone_doc.toString())
def testQgsVectorLayerSelectedFeatureSource(self):
"""
test QgsVectorLayerSelectedFeatureSource
"""
layer = QgsVectorLayer("Point?crs=epsg:3111&field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f1 = QgsFeature(1)
f1.setAttributes(["test", 123])
f1.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
f2 = QgsFeature(2)
f2.setAttributes(["test2", 457])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(200, 200)))
f3 = QgsFeature(3)
f3.setAttributes(["test2", 888])
f3.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(300, 200)))
f4 = QgsFeature(4)
f4.setAttributes(["test3", -1])
f4.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(400, 300)))
f5 = QgsFeature(5)
f5.setAttributes(["test4", 0])
f5.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(0, 0)))
self.assertTrue(pr.addFeatures([f1, f2, f3, f4, f5]))
self.assertEqual(layer.featureCount(), 5)
source = QgsVectorLayerSelectedFeatureSource(layer)
self.assertEqual(source.sourceCrs().authid(), 'EPSG:3111')
self.assertEqual(source.wkbType(), QgsWkbTypes.Point)
self.assertEqual(source.fields(), layer.fields())
# no selection
self.assertEqual(source.featureCount(), 0)
it = source.getFeatures()
f = QgsFeature()
self.assertFalse(it.nextFeature(f))
# with selection
layer.selectByIds([f1.id(), f3.id(), f5.id()])
source = QgsVectorLayerSelectedFeatureSource(layer)
self.assertEqual(source.featureCount(), 3)
ids = set([f.id() for f in source.getFeatures()])
self.assertEqual(ids, {f1.id(), f3.id(), f5.id()})
# test that requesting subset of ids intersects this request with the selected ids
ids = set([f.id() for f in source.getFeatures(QgsFeatureRequest().setFilterFids([f1.id(), f2.id(), f5.id()]))])
self.assertEqual(ids, {f1.id(), f5.id()})
# test that requesting id works
ids = set([f.id() for f in source.getFeatures(QgsFeatureRequest().setFilterFid(f1.id()))])
self.assertEqual(ids, {f1.id()})
ids = set([f.id() for f in source.getFeatures(QgsFeatureRequest().setFilterFid(f5.id()))])
self.assertEqual(ids, {f5.id()})
# test that source has stored snapshot of selected features
layer.selectByIds([f2.id(), f4.id()])
self.assertEqual(source.featureCount(), 3)
ids = set([f.id() for f in source.getFeatures()])
self.assertEqual(ids, {f1.id(), f3.id(), f5.id()})
# test that source is not dependent on layer
del layer
ids = set([f.id() for f in source.getFeatures()])
self.assertEqual(ids, {f1.id(), f3.id(), f5.id()})
def testFeatureRequestWithReprojectionAndVirtualFields(self):
layer = self.getSource()
field = QgsField('virtual', QVariant.Double)
layer.addExpressionField('$x', field)
virtual_values = [f['virtual'] for f in layer.getFeatures()]
self.assertAlmostEqual(virtual_values[0], -71.123, 2)
self.assertEqual(virtual_values[1], NULL)
self.assertAlmostEqual(virtual_values[2], -70.332, 2)
self.assertAlmostEqual(virtual_values[3], -68.2, 2)
self.assertAlmostEqual(virtual_values[4], -65.32, 2)
# repeat, with reprojection on request
request = QgsFeatureRequest().setDestinationCrs(QgsCoordinateReferenceSystem.fromEpsgId(3785),
QgsProject.instance().transformContext())
features = [f for f in layer.getFeatures(request)]
# virtual field value should not change, even though geometry has
self.assertAlmostEqual(features[0]['virtual'], -71.123, 2)
self.assertAlmostEqual(features[0].geometry().constGet().x(), -7917376, -5)
self.assertEqual(features[1]['virtual'], NULL)
self.assertFalse(features[1].hasGeometry())
self.assertAlmostEqual(features[2]['virtual'], -70.332, 2)
self.assertAlmostEqual(features[2].geometry().constGet().x(), -7829322, -5)
self.assertAlmostEqual(features[3]['virtual'], -68.2, 2)
self.assertAlmostEqual(features[3].geometry().constGet().x(), -7591989, -5)
self.assertAlmostEqual(features[4]['virtual'], -65.32, 2)
self.assertAlmostEqual(features[4].geometry().constGet().x(), -7271389, -5)
def testPrecision(self):
layer = QgsVectorLayer("Polygon?crs=epsg:2056&field=pk:int", "vl", "memory")
layer.geometryOptions().setGeometryPrecision(10)
geom = QgsGeometry.fromWkt('Polygon ((2596411 1224654, 2596400 1224652, 2596405 1224640, 2596410 1224641, 2596411 1224654))')
feature = QgsFeature(layer.fields())
feature.setGeometry(geom)
layer.startEditing()
layer.addFeature(feature)
self.assertGeometriesEqual(QgsGeometry.fromWkt('Polygon ((2596410 1224650, 2596400 1224650, 2596410 1224640, 2596410 1224650))'), feature.geometry(), 'geometry with unsnapped nodes', 'fixed geometry')
layer.geometryOptions().setGeometryPrecision(0.0)
feature.setGeometry(QgsGeometry.fromWkt('Polygon ((2596411 1224654, 2596400 1224652, 2596405 1224640, 2596410 1224641, 2596411 1224654))'))
layer.addFeature(feature)
self.assertGeometriesEqual(QgsGeometry.fromWkt('Polygon ((2596411 1224654, 2596400 1224652, 2596405 1224640, 2596410 1224641, 2596411 1224654))'), feature.geometry(), 'geometry with duplicates', 'unchanged geometry')
def testRemoveDuplicateNodes(self):
layer = QgsVectorLayer("Polygon?crs=epsg:2056&field=pk:int", "vl", "memory")
layer.geometryOptions().setRemoveDuplicateNodes(True)
geom = QgsGeometry.fromWkt('Polygon ((70 80, 80 90, 80 90, 60 50, 70 80))')
feature = QgsFeature(layer.fields())
feature.setGeometry(geom)
layer.startEditing()
layer.addFeature(feature)
self.assertGeometriesEqual(feature.geometry(), QgsGeometry.fromWkt('Polygon ((70 80, 80 90, 60 50, 70 80))'), 'fixed geometry', 'geometry with duplicates')
layer.geometryOptions().setRemoveDuplicateNodes(False)
feature.setGeometry(QgsGeometry.fromWkt('Polygon ((70 80, 80 90, 80 90, 60 50, 70 80))'))
layer.addFeature(feature)
self.assertGeometriesEqual(feature.geometry(), QgsGeometry.fromWkt('Polygon ((70 80, 80 90, 80 90, 60 50, 70 80))'), 'unchanged geometry', 'geometry with duplicates')
def testPrecisionAndDuplicateNodes(self):
layer = QgsVectorLayer("Polygon?crs=epsg:2056&field=pk:int", "vl", "memory")
layer.geometryOptions().setGeometryPrecision(10)
layer.geometryOptions().setRemoveDuplicateNodes(True)
geom = QgsGeometry.fromWkt('Polygon ((2596411 1224654, 2596400 1224652, 2596402 1224653, 2596405 1224640, 2596410 1224641, 2596411 1224654))')
feature = QgsFeature(layer.fields())
feature.setGeometry(geom)
layer.startEditing()
layer.addFeature(feature)
self.assertGeometriesEqual(QgsGeometry.fromWkt('Polygon ((2596410 1224650, 2596400 1224650, 2596410 1224640, 2596410 1224650))'), feature.geometry(), 'geometry with unsnapped nodes', 'fixed geometry')
def testDefaultDisplayExpression(self):
"""
Test that default display expression gravitates to most interesting column names
"""
layer = QgsVectorLayer("Polygon?crs=epsg:2056&field=pk:int", "vl", "memory")
self.assertEqual(layer.displayExpression(), '"pk"')
self.assertEqual(layer.displayField(), 'pk')
layer = QgsVectorLayer("Polygon?crs=epsg:2056&field=pk:int&field=DESCRIPTION:string&field=fid:int", "vl", "memory")
self.assertEqual(layer.displayExpression(), '"DESCRIPTION"')
self.assertEqual(layer.displayField(), 'DESCRIPTION')
layer = QgsVectorLayer("Polygon?crs=epsg:2056&field=pk:int&field=DESCRIPTION:string&field=fid:int&field=NAME:string", "vl", "memory")
self.assertEqual(layer.displayExpression(), '"NAME"')
self.assertEqual(layer.displayField(), 'NAME')
layer = QgsVectorLayer("Polygon?crs=epsg:2056&field=pk:int&field=DESCRIPTION:string&field=fid:int&field=BETTER_NAME:string&field=NAME:string", "vl", "memory")
self.assertEqual(layer.displayExpression(), '"BETTER_NAME"')
self.assertEqual(layer.displayField(), 'BETTER_NAME')
class TestQgsVectorLayerSourceAddedFeaturesInBuffer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&field=dt:datetime&field=date:date&field=time:time&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, -200, NULL, 'NuLl', '5', QDateTime(QDate(2020, 5, 4), QTime(12, 13, 14)), QDate(2020, 5, 2), QTime(12, 13, 1)])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL])
f3 = QgsFeature()
f3.setAttributes([1, 100, 'Orange', 'oranGe', '1', QDateTime(QDate(2020, 5, 3), QTime(12, 13, 14)), QDate(2020, 5, 3), QTime(12, 13, 14)])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes([2, 200, 'Apple', 'Apple', '2', QDateTime(QDate(2020, 5, 4), QTime(12, 14, 14)), QDate(2020, 5, 4), QTime(12, 14, 14)])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes([4, 400, 'Honey', 'Honey', '4', QDateTime(QDate(2021, 5, 4), QTime(13, 13, 14)), QDate(2021, 5, 4), QTime(13, 13, 14)])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
# create a layer with features only in the added features buffer - not the provider
vl.startEditing()
vl.addFeatures([f1, f2, f3, f4, f5])
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testOrderBy(self):
""" Skip order by tests - edited features are not sorted in iterators.
(Maybe they should be??)
"""
pass
def testMinimumValue(self):
""" Skip min values test - due to inconsistencies in how null values are treated by providers.
They are included here, but providers don't include them.... which is right?
"""
pass
class TestQgsVectorLayerSourceChangedGeometriesInBuffer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&field=dt:datetime&field=date:date&field=time:time&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, -200, NULL, 'NuLl', '5', QDateTime(QDate(2020, 5, 4), QTime(12, 13, 14)), QDate(2020, 5, 2), QTime(12, 13, 1)])
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL])
f2.setGeometry(QgsGeometry.fromWkt('Point (-70.5 65.2)'))
f3 = QgsFeature()
f3.setAttributes([1, 100, 'Orange', 'oranGe', '1', QDateTime(QDate(2020, 5, 3), QTime(12, 13, 14)), QDate(2020, 5, 3), QTime(12, 13, 14)])
f4 = QgsFeature()
f4.setAttributes([2, 200, 'Apple', 'Apple', '2', QDateTime(QDate(2020, 5, 4), QTime(12, 14, 14)), QDate(2020, 5, 4), QTime(12, 14, 14)])
f5 = QgsFeature()
f5.setAttributes([4, 400, 'Honey', 'Honey', '4', QDateTime(QDate(2021, 5, 4), QTime(13, 13, 14)), QDate(2021, 5, 4), QTime(13, 13, 14)])
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
ids = {f['pk']: f.id() for f in vl.getFeatures()}
# modify geometries in buffer
vl.startEditing()
vl.changeGeometry(ids[5], QgsGeometry.fromWkt('Point (-71.123 78.23)'))
vl.changeGeometry(ids[3], QgsGeometry())
vl.changeGeometry(ids[1], QgsGeometry.fromWkt('Point (-70.332 66.33)'))
vl.changeGeometry(ids[2], QgsGeometry.fromWkt('Point (-68.2 70.8)'))
vl.changeGeometry(ids[4], QgsGeometry.fromWkt('Point (-65.32 78.3)'))
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testOrderBy(self):
""" Skip order by tests - edited features are not sorted in iterators.
(Maybe they should be??)
"""
pass
class TestQgsVectorLayerSourceChangedAttributesInBuffer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&field=dt:datetime&field=date:date&field=time:time&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, 200, 'a', 'b', 'c', QDateTime(2020, 4, 5, 1, 2, 3), QDate(2020, 4, 5), QTime(1, 2, 3)])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, -200, 'd', 'e', 'f', QDateTime(2020, 4, 5, 1, 2, 3), QDate(2020, 4, 5), QTime(1, 2, 3)])
f3 = QgsFeature()
f3.setAttributes([1, -100, 'g', 'h', 'i', QDateTime(2020, 4, 5, 1, 2, 3), QDate(2020, 4, 5), QTime(1, 2, 3)])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes([2, -200, 'j', 'k', 'l', QDateTime(2020, 4, 5, 1, 2, 3), QDate(2020, 4, 5), QTime(1, 2, 3)])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes([4, 400, 'm', 'n', 'o', QDateTime(2020, 4, 5, 1, 2, 3), QDate(2020, 4, 5), QTime(1, 2, 3)])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
ids = {f['pk']: f.id() for f in vl.getFeatures()}
# modify geometries in buffer
vl.startEditing()
vl.changeAttributeValue(ids[5], 1, -200)
vl.changeAttributeValue(ids[5], 2, NULL)
vl.changeAttributeValue(ids[5], 3, 'NuLl')
vl.changeAttributeValue(ids[5], 4, '5')
vl.changeAttributeValue(ids[5], 5, QDateTime(QDate(2020, 5, 4), QTime(12, 13, 14)))
vl.changeAttributeValue(ids[5], 6, QDate(2020, 5, 2))
vl.changeAttributeValue(ids[5], 7, QTime(12, 13, 1))
vl.changeAttributeValue(ids[3], 1, 300)
vl.changeAttributeValue(ids[3], 2, 'Pear')
vl.changeAttributeValue(ids[3], 3, 'PEaR')
vl.changeAttributeValue(ids[3], 4, '3')
vl.changeAttributeValue(ids[3], 5, NULL)
vl.changeAttributeValue(ids[3], 6, NULL)
vl.changeAttributeValue(ids[3], 7, NULL)
vl.changeAttributeValue(ids[1], 1, 100)
vl.changeAttributeValue(ids[1], 2, 'Orange')
vl.changeAttributeValue(ids[1], 3, 'oranGe')
vl.changeAttributeValue(ids[1], 4, '1')
vl.changeAttributeValue(ids[1], 5, QDateTime(QDate(2020, 5, 3), QTime(12, 13, 14)))
vl.changeAttributeValue(ids[1], 6, QDate(2020, 5, 3))
vl.changeAttributeValue(ids[1], 7, QTime(12, 13, 14))
vl.changeAttributeValue(ids[2], 1, 200)
vl.changeAttributeValue(ids[2], 2, 'Apple')
vl.changeAttributeValue(ids[2], 3, 'Apple')
vl.changeAttributeValue(ids[2], 4, '2')
vl.changeAttributeValue(ids[2], 5, QDateTime(QDate(2020, 5, 4), QTime(12, 14, 14)))
vl.changeAttributeValue(ids[2], 6, QDate(2020, 5, 4))
vl.changeAttributeValue(ids[2], 7, QTime(12, 14, 14))
vl.changeAttributeValue(ids[4], 1, 400)
vl.changeAttributeValue(ids[4], 2, 'Honey')
vl.changeAttributeValue(ids[4], 3, 'Honey')
vl.changeAttributeValue(ids[4], 4, '4')
vl.changeAttributeValue(ids[4], 5, QDateTime(QDate(2021, 5, 4), QTime(13, 13, 14)))
vl.changeAttributeValue(ids[4], 6, QDate(2021, 5, 4))
vl.changeAttributeValue(ids[4], 7, QTime(13, 13, 14))
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testOrderBy(self):
""" Skip order by tests - edited features are not sorted in iterators.
(Maybe they should be??)
"""
pass
def testUniqueValues(self):
""" Skip unique values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
def testMinimumValue(self):
""" Skip min values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
def testMaximumValue(self):
""" Skip max values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
class TestQgsVectorLayerSourceDeletedFeaturesInBuffer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&field=dt:datetime&field=date:date&field=time:time&&key=pk',
'test', 'memory')
assert (vl.isValid())
# add a bunch of similar features to the provider
b1 = QgsFeature()
b1.setAttributes([5, -300, 'Apple', 'PEaR', '1', QDateTime(QDate(2020, 5, 5), QTime(12, 11, 14)), QDate(2020, 5, 1), QTime(10, 13, 1)])
b1.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
b2 = QgsFeature()
b2.setAttributes([3, 100, 'Orange', 'NuLl', '2', QDateTime(QDate(2020, 5, 1), QTime(12, 13, 14)), QDate(2020, 5, 9), QTime(9, 13, 1)])
b2.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
b3 = QgsFeature()
b3.setAttributes([1, -200, 'Honey', 'oranGe', '5', QDateTime(QDate(2020, 5, 1), QTime(12, 13, 14)), QDate(2020, 5, 19), QTime(2, 13, 1)])
b4 = QgsFeature()
b4.setAttributes([2, 400, 'Pear', 'Honey', '3', QDateTime(QDate(2020, 4, 4), QTime(12, 13, 14)), QDate(2020, 4, 2), QTime(4, 13, 1)])
b4.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
b5 = QgsFeature()
b5.setAttributes([4, 200, NULL, 'oranGe', '3', QDateTime(QDate(2019, 5, 4), QTime(12, 13, 14)), QDate(2019, 5, 2), QTime(1, 13, 1)])
b5.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
vl.dataProvider().addFeatures([b1, b2, b3, b4, b5])
bad_ids = [f['pk'] for f in vl.getFeatures()]
# here's our good features
f1 = QgsFeature()
f1.setAttributes([5, -200, NULL, 'NuLl', '5', QDateTime(QDate(2020, 5, 4), QTime(12, 13, 14)), QDate(2020, 5, 2), QTime(12, 13, 1)])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL])
f3 = QgsFeature()
f3.setAttributes([1, 100, 'Orange', 'oranGe', '1', QDateTime(QDate(2020, 5, 3), QTime(12, 13, 14)), QDate(2020, 5, 3), QTime(12, 13, 14)])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes([2, 200, 'Apple', 'Apple', '2', QDateTime(QDate(2020, 5, 4), QTime(12, 14, 14)), QDate(2020, 5, 4), QTime(12, 14, 14)])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes([4, 400, 'Honey', 'Honey', '4', QDateTime(QDate(2021, 5, 4), QTime(13, 13, 14)), QDate(2021, 5, 4), QTime(13, 13, 14)])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
# delete the bad features, but don't commit
vl.startEditing()
vl.deleteFeatures(bad_ids)
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testOrderBy(self):
""" Skip order by tests - edited features are not sorted in iterators.
(Maybe they should be??)
"""
pass
def testUniqueValues(self):
""" Skip unique values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
def testMinimumValue(self):
""" Skip min values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
def testMaximumValue(self):
""" Skip max values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
class TestQgsVectorLayerTransformContext(unittest.TestCase):
def setUp(self):
"""Prepare tc"""
super(TestQgsVectorLayerTransformContext, self).setUp()
self.ctx = QgsCoordinateTransformContext()
self.ctx.addCoordinateOperation(QgsCoordinateReferenceSystem.fromEpsgId(4326),
QgsCoordinateReferenceSystem.fromEpsgId(3857), 'test')
def testTransformContextIsSetInCtor(self):
"""Test transform context can be set from ctor"""
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
self.assertFalse(vl.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
options = QgsVectorLayer.LayerOptions(self.ctx)
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory', options)
self.assertTrue(vl.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
def testTransformContextInheritsFromProject(self):
"""Test that when a layer is added to a project it inherits its context"""
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
self.assertFalse(vl.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
p = QgsProject()
self.assertFalse(p.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
p.setTransformContext(self.ctx)
self.assertTrue(p.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
p.addMapLayers([vl])
self.assertTrue(vl.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
def testTransformContextIsSyncedFromProject(self):
"""Test that when a layer is synced when project context changes"""
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
self.assertFalse(vl.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
p = QgsProject()
self.assertFalse(p.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
p.setTransformContext(self.ctx)
self.assertTrue(p.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
p.addMapLayers([vl])
self.assertTrue(vl.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
# Now change the project context
tc2 = QgsCoordinateTransformContext()
p.setTransformContext(tc2)
self.assertFalse(p.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
self.assertFalse(vl.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
p.setTransformContext(self.ctx)
self.assertTrue(p.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
self.assertTrue(vl.transformContext().hasTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), QgsCoordinateReferenceSystem.fromEpsgId(3857)))
def testDeletedFeaturesAreNotSelected(self):
"""Test that when features are deleted are also removed from selected before
featuresDeleted is emitted"""
layer = QgsVectorLayer("point?crs=epsg:4326&field=id:integer", "Scratch point layer", "memory")
layer.startEditing()
layer.addFeature(QgsFeature(layer.fields()))
layer.commitChanges()
self.assertEqual(layer.featureCount(), 1)
test_errors = []
def onFeaturesDeleted(deleted_fids):
selected = layer.selectedFeatureIds()
for fid in selected:
test_errors.append(f'Feature with id {fid} was deleted but is still selected')
layer.featuresDeleted.connect(onFeaturesDeleted)
layer.startEditing()
layer.selectAll()
layer.deleteSelectedFeatures()
layer.commitChanges()
self.assertEqual(test_errors, [], test_errors)
self.assertEqual(layer.featureCount(), 0)
self.assertEqual(layer.selectedFeatureIds(), [])
def testCommitChangesReportsDeletedFeatureIDs(self):
"""
Tests if commitChanges emits "featuresDeleted" with all deleted feature IDs,
e.g. in case (negative) temporary FIDs are converted into (positive) persistent FIDs.
"""
temp_fids = []
def onFeaturesDeleted(deleted_fids):
self.assertEqual(len(deleted_fids), len(temp_fids),
msg=f'featuresDeleted returned {len(deleted_fids)} instead of 2 deleted feature IDs: '
f'{deleted_fids}')
for d in deleted_fids:
self.assertTrue(d in temp_fids)
layer = QgsVectorLayer("point?crs=epsg:4326&field=name:string", "Scratch point layer", "memory")
layer.featuresDeleted.connect(onFeaturesDeleted)
layer.startEditing()
layer.beginEditCommand('add 2 features')
layer.addFeature(QgsFeature(layer.fields()))
layer.addFeature(QgsFeature(layer.fields()))
layer.endEditCommand()
temp_fids.extend(layer.allFeatureIds())
layer.commitChanges()
def testSubsetStringInvalidLayer(self):
"""
Test that subset strings can be set on invalid layers, and retrieved later...
"""
vl = QgsVectorLayer(
'nope',
'test', 'no')
self.assertFalse(vl.isValid())
self.assertIsNone(vl.dataProvider())
vl.setSubsetString('xxxxxxxxx')
self.assertEqual(vl.subsetString(), 'xxxxxxxxx')
# invalid layer subset strings must be persisted via xml
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(vl.writeXml(elem, doc, QgsReadWriteContext()))
vl2 = QgsVectorLayer(
'nope',
'test', 'no')
vl2.readXml(elem, QgsReadWriteContext())
self.assertEqual(vl2.subsetString(), 'xxxxxxxxx')
def testLayerTypeFlags(self):
"""Basic API test, DB providers that support query layers should test the flag individually"""
layer = QgsVectorLayer("point?crs=epsg:4326&field=name:string", "Scratch point layer", "memory")
self.assertEqual(layer.vectorLayerTypeFlags(), Qgis.VectorLayerTypeFlags())
def testLayerWithoutProvider(self):
"""Test that we don't crash when invoking methods on a layer with a broken provider"""
layer = QgsVectorLayer("test", "test", "broken_provider")
layer.clone()
layer.storageType()
layer.capabilitiesString()
layer.dataComment()
layer.displayField()
layer.setDisplayExpression('')
layer.displayExpression()
layer.dataProvider()
layer.temporalProperties()
layer.setProviderEncoding('utf-8')
layer.setCoordinateSystem()
layer.addJoin(QgsVectorLayerJoinInfo())
layer.removeJoin('id')
layer.joinBuffer()
layer.vectorJoins()
layer.setDependencies([])
layer.dependencies()
idx = layer.addExpressionField('1+1', QgsField('foo'))
# layer.expressionField(idx)
# layer.updateExpressionField(idx, '')
# layer.removeExpressionField(idx)
layer.actions()
layer.serverProperties()
layer.selectedFeatureCount()
layer.selectByRect(QgsRectangle())
layer.selectByExpression('1')
layer.selectByIds([0])
layer.modifySelection([], [])
layer.invertSelection()
layer.selectAll()
layer.invertSelectionInRectangle(QgsRectangle())
layer.selectedFeatures()
layer.getSelectedFeatures()
layer.selectedFeatureIds()
layer.boundingBoxOfSelected()
layer.labelsEnabled()
layer.setLabelsEnabled(False)
layer.diagramsEnabled()
layer.setDiagramRenderer(None)
layer.diagramRenderer()
layer.diagramLayerSettings()
layer.setDiagramLayerSettings(QgsDiagramLayerSettings())
layer.renderer()
layer.setRenderer(None)
layer.addFeatureRendererGenerator(None)
layer.removeFeatureRendererGenerator(None)
layer.featureRendererGenerators()
layer.geometryType()
layer.wkbType()
layer.sourceCrs()
layer.sourceName()
layer.readXml
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
layer.writeXml(elem, doc, QgsReadWriteContext())
layer.readXml(elem, QgsReadWriteContext())
layer.encodedSource('', QgsReadWriteContext())
layer.decodedSource('', 'invalid_provider', QgsReadWriteContext())
layer.resolveReferences(QgsProject())
layer.saveStyleToDatabase('name', 'description', False, 'uiFileContent')
layer.listStylesInDatabase()
layer.getStyleFromDatabase('id')
layer.deleteStyleFromDatabase('id')
layer.loadNamedStyle('uri', False)
layer.loadAuxiliaryLayer(QgsAuxiliaryStorage())
layer.setAuxiliaryLayer(None)
layer.auxiliaryLayer()
# layer.readSymbology()
# layer.readStyle()
# layer.writeSymbology()
# layer.writeStyle()
# layer.writeSld()
# layer.readSld()
layer.featureCount(None)
layer.symbolFeatureIds(None)
layer.hasFeatures()
layer.loadDefaultStyle()
layer.countSymbolFeatures()
layer.setSubsetString(None)
layer.subsetString()
layer.getFeatures()
layer.getFeature(0)
layer.getGeometry(0)
layer.getFeatures([0])
layer.getFeatures(QgsRectangle())
layer.addFeature(QgsFeature())
layer.updateFeature(QgsFeature())
layer.insertVertex(0, 0, 0, False)
layer.moveVertex(0, 0, 0, False)
layer.moveVertexV2(QgsPoint(), 0, False)
layer.deleteVertex(0, 0)
layer.deleteSelectedFeatures()
layer.addRing([QgsPointXY()])
# layer.addRing(QgsPointSequence())
# layer.addRing(QgsCurve())
# layer.addPart()
layer.translateFeature(0, 0, 0)
layer.splitParts([])
layer.splitFeatures([])
layer.addTopologicalPoints(QgsPoint())
layer.labeling()
layer.setLabeling(None)
layer.isEditable()
layer.isSpatial()
layer.isModified()
layer.isAuxiliaryField(0)
layer.reload()
layer.createMapRenderer(QgsRenderContext())
layer.extent()
layer.sourceExtent()
layer.fields()
layer.attributeList()
layer.primaryKeyAttributes()
layer.featureCount()
layer.setReadOnly(False)
layer.supportsEditing()
layer.changeGeometry(0, QgsGeometry())
layer.changeAttributeValue(0, 0, '')
layer.changeAttributeValues(0, {})
layer.addAttribute(QgsField('foo'))
layer.setFieldAlias(0, 'bar')
layer.removeFieldAlias(0)
layer.renameAttribute(0, 'bar')
layer.attributeAlias(0)
layer.attributeDisplayName(0)
layer.attributeAliases()
layer.deleteAttribute(0)
layer.deleteAttributes([])
layer.deleteFeature(0)
layer.deleteFeatures([])
layer.commitChanges()
layer.commitErrors()
layer.rollBack()
layer.referencingRelations(0)
layer.editBuffer()
layer.beginEditCommand('foo')
layer.endEditCommand()
layer.destroyEditCommand()
layer.updateFields()
layer.defaultValue(0)
layer.setDefaultValueDefinition(0, layer.defaultValueDefinition(0))
layer.fieldConstraints(0)
layer.fieldConstraintsAndStrength(0)
layer.setFieldConstraint(0, QgsFieldConstraints.ConstraintUnique)
layer.removeFieldConstraint(0, QgsFieldConstraints.ConstraintUnique)
layer.constraintExpression(0)
layer.constraintDescription(0)
layer.setConstraintExpression(0, '1')
layer.setEditorWidgetSetup(0, QgsEditorWidgetSetup('Hidden', {}))
layer.editorWidgetSetup(0)
layer.uniqueValues(0)
layer.uniqueStringsMatching(0, None)
layer.minimumValue(0)
layer.maximumValue(0)
layer.minimumAndMaximumValue(0)
layer.aggregate(QgsAggregateCalculator.Count, 'foo')
layer.setFeatureBlendMode(QPainter.CompositionMode_Screen)
layer.featureBlendMode()
layer.htmlMetadata()
layer.setSimplifyMethod(layer.simplifyMethod())
# layer.simplifyDrawingCanbeApplied()
layer.conditionalStyles()
layer.attributeTableConfig()
layer.setAttributeTableConfig(layer.attributeTableConfig())
layer.mapTipTemplate()
layer.setMapTipTemplate('')
layer.createExpressionContext()
layer.editFormConfig()
layer.setEditFormConfig(layer.editFormConfig())
layer.setReadExtentFromXml(False)
layer.readExtentFromXml()
layer.isEditCommandActive()
layer.storedExpressionManager()
layer.select(0)
layer.select([])
layer.deselect(0)
layer.deselect([])
layer.removeSelection()
layer.reselect()
layer.updateExtents()
layer.startEditing()
layer.setTransformContext(QgsCoordinateTransformContext())
layer.hasSpatialIndex()
# layer.accept(QgsStyleEntityVisitorInterface())
# TODO:
# - fetch rect: feat with changed geometry: 1. in rect, 2. out of rect
# - more join tests
# - import
if __name__ == '__main__':
unittest.main()
| jgrocha/QGIS | tests/src/python/test_qgsvectorlayer.py | Python | gpl-2.0 | 159,533 | 0.001793 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('telerivet', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='incomingrequest',
name='secret',
field=models.CharField(max_length=255, null=True, db_index=True),
preserve_default=True,
),
]
| qedsoftware/commcare-hq | corehq/messaging/smsbackends/telerivet/migrations/0002_add_index_on_webhook_secret.py | Python | bsd-3-clause | 464 | 0 |
from NodeDefender.manage.setup import (manager, print_message, print_topic,
print_info)
from flask_script import prompt
import NodeDefender
@manager.command
def database():
print_topic('Database')
print_info("Database is used to store presistant data.")
print_info("By having it disabled the data will be store in run-time RAM for the\
session")
enabled = None
while enabled is None:
enabled = prompt("Enable Database(Y/N)").upper()
if 'Y' in enabled:
enabled = True
elif 'N' in enabled:
enabled = False
else:
enabled = None
if not enabled:
NodeDefender.config.database.set(enabled = False)
if NodeDefender.config.database.write():
print_info("Database- config successfully written")
return False
supported_databases = ['mysql', 'sqlite']
engine = None
while engine is None:
engine = prompt("Enter DB Engine(SQLite, MySQL)").lower()
if engine not in supported_databases:
engine = None
host = None
port = None
username = None
password = None
database = None
if engine == "mysql":
while not host:
host = prompt('Enter Server Address')
while not port:
port = prompt('Enter Server Port')
while not username:
username = prompt('Enter Username')
while not password:
password = prompt('Enter Password')
while not database:
database = prompt("Enter Database Name")
filepath = None
if engine == "sqlite":
while not filepath:
print_info("Filename for SQLite Database")
print_info("SQLite will be stored as file in data- folder")
print_info(NodeDefender.config.datafolder)
print_info("Do not use any slashes in the filename")
filepath = prompt("Enter File Path")
NodeDefender.config.database.set(enabled=True,
engine=engine,
host=host,
port=port,
username=username,
password=password,
database=database,
filepath=filepath)
if NodeDefender.config.database.write():
print_info("Database- config successfully written")
return True
| CTSNE/NodeDefender | NodeDefender/manage/setup/database.py | Python | mit | 2,533 | 0.001974 |
from qtpy.QtCore import Qt, QPoint, QObject, Signal
from qtpy.QtGui import QColor
from qtpy.QtWidgets import QWidget, QVBoxLayout, QSizePolicy, QFrame, QLabel
import html
class ErrorPopup(QWidget):
error_template = (
"<html>"
"<table style='background-color: #ffdfdf;'width='100%%'>"
"<tr><td style='font-weight: bold; padding-left: 5px;'>Warning:</td></tr>"
"%s"
"</table>"
"</html>"
)
def __init__(self):
QWidget.__init__(self, None, Qt.ToolTip)
self.resize(300, 50)
self.setContentsMargins(0, 0, 0, 0)
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
self._error_widget = QLabel("")
self._error_widget.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Minimum)
self._error_widget.setFrameStyle(QFrame.Box)
self._error_widget.setWordWrap(True)
self._error_widget.setScaledContents(True)
# self.warning_widget.setAlignment(Qt.AlignHCenter)
self._error_widget.setTextFormat(Qt.RichText)
layout.addWidget(self._error_widget)
self.setLayout(layout)
def presentError(self, widget, error):
assert isinstance(widget, QWidget)
self._error_widget.setText(ErrorPopup.error_template % html.escape(error))
self.show()
size_hint = self.sizeHint()
rect = widget.rect()
p = widget.mapToGlobal(QPoint(rect.left(), rect.top()))
self.setGeometry(
p.x(), p.y() - size_hint.height() - 5, size_hint.width(), size_hint.height()
)
self.raise_()
class ValidationSupport(QObject):
STRONG_ERROR_COLOR = QColor(255, 215, 215)
ERROR_COLOR = QColor(255, 235, 235)
INVALID_COLOR = QColor(235, 235, 255)
WARNING = "warning"
EXCLAMATION = "ide/small/exclamation"
validationChanged = Signal(bool)
def __init__(self, validation_target):
"""@type validation_target: QWidget"""
QObject.__init__(self)
self._validation_target = validation_target
self._validation_message = None
self._validation_type = None
self._error_popup = ErrorPopup()
self._originalEnterEvent = validation_target.enterEvent
self._originalLeaveEvent = validation_target.leaveEvent
self._originalHideEvent = validation_target.hideEvent
def enterEvent(event):
self._originalEnterEvent(event)
if not self.isValid():
self._error_popup.presentError(
self._validation_target, self._validation_message
)
validation_target.enterEvent = enterEvent
def leaveEvent(event):
self._originalLeaveEvent(event)
if self._error_popup is not None:
self._error_popup.hide()
validation_target.leaveEvent = leaveEvent
def hideEvent(hide_event):
self._error_popup.hide()
self._originalHideEvent(hide_event)
validation_target.hideEvent = hideEvent
def setValidationMessage(self, message, validation_type=WARNING):
"""Add a warning or information icon to the widget with a tooltip"""
message = message.strip()
if message == "":
self._validation_type = None
self._validation_message = None
self._error_popup.hide()
self.validationChanged.emit(True)
else:
self._validation_type = validation_type
self._validation_message = message
if (
self._validation_target.hasFocus()
or self._validation_target.underMouse()
):
self._error_popup.presentError(
self._validation_target, self._validation_message
)
self.validationChanged.emit(False)
def isValid(self):
return self._validation_message is None
| joakim-hove/ert | ert_gui/ertwidgets/validationsupport.py | Python | gpl-3.0 | 3,928 | 0.001018 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: Wout De Nolf (wout.de_nolf@esrf.eu)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import unittest
from . import test_classa
def test_suite():
"""Test suite including all test suites"""
testSuite = unittest.TestSuite()
testSuite.addTest(test_classa.test_suite())
return testSuite
if __name__ == '__main__':
import sys
mysuite = test_suite()
runner = unittest.TextTestRunner()
if not runner.run(mysuite).wasSuccessful():
sys.exit(1)
| woutdenolf/wdncrunch | wdncrunch/modulea/tests/test_all.py | Python | mit | 1,623 | 0.002465 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.