text
stringlengths
6
947k
repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging # from datetime import timedelta, datetime from yamo import ( Connection, EmbeddedDocument, Document, IDFormatter, Index, StringField, IntField, FloatField, BooleanField, ListField, EmbeddedField, DateTimeField, ) conn = Connection('mongodb://localhost/zg') log = logging.getLogger('zg') class User(Document): """ 跌零用户 """ class Meta: idf = IDFormatter('{mobile}') idx1 = Index('mobile', unique=True) idx2 = Index('username') mobile = StringField(required=True) username = StringField(required=True) password = StringField(required=True) paid = FloatField(default=0) total_money = FloatField(default=0) total_capital = FloatField(default=0) total_profit = FloatField(default=0) _is_admin = BooleanField(default=False) def get_id(self): return self._id def is_active(self): return True def is_anonymous(self): return False def is_authenticated(self): return True def is_admin(self): return self._is_admin class Account(Document): """ 跌零用户的账号 """ class Meta: idf = IDFormatter('{user_id}_{login_name}') user_id = StringField(required=True) login_name = StringField(required=True) login_password = StringField(required=True) class MyPosition(EmbeddedDocument): """ 持仓汇总 """ name = StringField(required=True) symbol = StringField(required=True) average_price = FloatField(required=True) quantity = IntField(required=True) price = FloatField(required=True) sellable = IntField() profit = FloatField() @property def increase(self): if self.price > 0: return '{:4.2f}%'.format( (self.price / self.average_price - 1) * 100) else: return '0%' class MyOrder(EmbeddedDocument): """ 成交订单汇总 """ type_ = StringField(required=True) name = StringField(required=True) symbol = StringField(required=True) price = FloatField(required=True) current_price = FloatField(required=True) quantity = IntField(required=True) commision = FloatField(required=True) profit = FloatField(required=True) class MyStatus(EmbeddedDocument): """ 挂单情况 """ order = StringField(required=True) order_at = StringField(required=True) type_ = StringField(required=True) name = StringField(required=True) symbol = StringField(required=True) price = FloatField(required=True) quantity = IntField(required=True) pending_quantity = IntField(required=True) status = StringField(required=True) class Position(Document): """ 当日持仓汇总 """ class Meta: idf = IDFormatter('{user_id}_{date}') idx1 = Index(['user_id', 'date'], unique=True) user_id = StringField(required=True) date = DateTimeField(required=True) position_list = ListField(EmbeddedField(MyPosition)) class Order(Document): """ 当日订单汇总 """ class Meta: idf = IDFormatter('{user_id}_{date}') idx1 = Index(['user_id', 'date'], unique=True) user_id = StringField(required=True) date = DateTimeField(required=True) order_list = ListField(EmbeddedField(MyOrder)) class Status(Document): """ 当日挂单汇总 """ class Meta: idf = IDFormatter('{user_id}_{date}') idx1 = Index(['user_id', 'date'], unique=True) user_id = StringField(required=True) date = DateTimeField(required=True) status_list = ListField(EmbeddedField(MyStatus)) conn.register_all()
yxdong/ybk
zg/models.py
Python
mit
3,701
0.000277
from .core import ImageOptim, NoImagesOptimizedError
derrickorama/image_optim
image_optim/__init__.py
Python
mit
52
0.019231
import logging import datetime import pytz from django.conf import settings from django.contrib.auth.models import Group from django.http import (HttpResponse, HttpResponseRedirect, HttpResponseNotFound, HttpResponseBadRequest, HttpResponseForbidden, Http404) from django.utils.translation import ugettext as _ from django.views.decorators.http import require_POST from django.core.urlresolvers import reverse from django.views.decorators.csrf import csrf_exempt from django.contrib.auth.decorators import login_required from edxmako.shortcuts import render_to_response from xmodule.modulestore.locations import SlashSeparatedCourseKey from shoppingcart.reports import RefundReport, ItemizedPurchaseReport, UniversityRevenueShareReport, CertificateStatusReport from student.models import CourseEnrollment from .exceptions import ItemAlreadyInCartException, AlreadyEnrolledInCourseException, CourseDoesNotExistException, ReportTypeDoesNotExistException from .models import Order, PaidCourseRegistration, OrderItem from .processors import process_postpay_callback, render_purchase_form_html log = logging.getLogger("shoppingcart") EVENT_NAME_USER_UPGRADED = 'edx.course.enrollment.upgrade.succeeded' REPORT_TYPES = [ ("refund_report", RefundReport), ("itemized_purchase_report", ItemizedPurchaseReport), ("university_revenue_share", UniversityRevenueShareReport), ("certificate_status", CertificateStatusReport), ] def initialize_report(report_type, start_date, end_date, start_letter=None, end_letter=None): """ Creates the appropriate type of Report object based on the string report_type. """ for item in REPORT_TYPES: if report_type in item: return item[1](start_date, end_date, start_letter, end_letter) raise ReportTypeDoesNotExistException @require_POST def add_course_to_cart(request, course_id): """ Adds course specified by course_id to the cart. The model function add_to_order does all the heavy lifting (logging, error checking, etc) """ assert isinstance(course_id, basestring) if not request.user.is_authenticated(): log.info("Anon user trying to add course {} to cart".format(course_id)) return HttpResponseForbidden(_('You must be logged-in to add to a shopping cart')) cart = Order.get_cart_for_user(request.user) course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) # All logging from here handled by the model try: PaidCourseRegistration.add_to_order(cart, course_key) except CourseDoesNotExistException: return HttpResponseNotFound(_('The course you requested does not exist.')) except ItemAlreadyInCartException: return HttpResponseBadRequest(_('The course {0} is already in your cart.'.format(course_id))) except AlreadyEnrolledInCourseException: return HttpResponseBadRequest(_('You are already registered in course {0}.'.format(course_id))) return HttpResponse(_("Course added to cart.")) @login_required def show_cart(request): cart = Order.get_cart_for_user(request.user) total_cost = cart.total_cost cart_items = cart.orderitem_set.all() form_html = render_purchase_form_html(cart) return render_to_response("shoppingcart/list.html", {'shoppingcart_items': cart_items, 'amount': total_cost, 'form_html': form_html, }) @login_required def clear_cart(request): cart = Order.get_cart_for_user(request.user) cart.clear() return HttpResponse('Cleared') @login_required def remove_item(request): item_id = request.REQUEST.get('id', '-1') try: item = OrderItem.objects.get(id=item_id, status='cart') if item.user == request.user: item.delete() except OrderItem.DoesNotExist: log.exception('Cannot remove cart OrderItem id={0}. DoesNotExist or item is already purchased'.format(item_id)) return HttpResponse('OK') @csrf_exempt @require_POST def postpay_callback(request): """ Receives the POST-back from processor. Mainly this calls the processor-specific code to check if the payment was accepted, and to record the order if it was, and to generate an error page. If successful this function should have the side effect of changing the "cart" into a full "order" in the DB. The cart can then render a success page which links to receipt pages. If unsuccessful the order will be left untouched and HTML messages giving more detailed error info will be returned. """ params = request.POST.dict() result = process_postpay_callback(params) if result['success']: return HttpResponseRedirect(reverse('shoppingcart.views.show_receipt', args=[result['order'].id])) else: return render_to_response('shoppingcart/error.html', {'order': result['order'], 'error_html': result['error_html']}) @login_required def show_receipt(request, ordernum): """ Displays a receipt for a particular order. 404 if order is not yet purchased or request.user != order.user """ try: order = Order.objects.get(id=ordernum) except Order.DoesNotExist: raise Http404('Order not found!') if order.user != request.user or order.status != 'purchased': raise Http404('Order not found!') order_items = OrderItem.objects.filter(order=order).select_subclasses() any_refunds = any(i.status == "refunded" for i in order_items) receipt_template = 'shoppingcart/receipt.html' __, instructions = order.generate_receipt_instructions() # we want to have the ability to override the default receipt page when # there is only one item in the order context = { 'order': order, 'order_items': order_items, 'any_refunds': any_refunds, 'instructions': instructions, } if order_items.count() == 1: receipt_template = order_items[0].single_item_receipt_template context.update(order_items[0].single_item_receipt_context) # Only orders where order_items.count() == 1 might be attempting to upgrade attempting_upgrade = request.session.get('attempting_upgrade', False) if attempting_upgrade: course_enrollment = CourseEnrollment.get_or_create_enrollment(request.user, order_items[0].course_id) course_enrollment.emit_event(EVENT_NAME_USER_UPGRADED) request.session['attempting_upgrade'] = False return render_to_response(receipt_template, context) def _can_download_report(user): """ Tests if the user can download the payments report, based on membership in a group whose name is determined in settings. If the group does not exist, denies all access """ try: access_group = Group.objects.get(name=settings.PAYMENT_REPORT_GENERATOR_GROUP) except Group.DoesNotExist: return False return access_group in user.groups.all() def _get_date_from_str(date_input): """ Gets date from the date input string. Lets the ValueError raised by invalid strings be processed by the caller """ return datetime.datetime.strptime(date_input.strip(), "%Y-%m-%d").replace(tzinfo=pytz.UTC) def _render_report_form(start_str, end_str, start_letter, end_letter, report_type, total_count_error=False, date_fmt_error=False): """ Helper function that renders the purchase form. Reduces repetition """ context = { 'total_count_error': total_count_error, 'date_fmt_error': date_fmt_error, 'start_date': start_str, 'end_date': end_str, 'start_letter': start_letter, 'end_letter': end_letter, 'requested_report': report_type, } return render_to_response('shoppingcart/download_report.html', context) @login_required def csv_report(request): """ Downloads csv reporting of orderitems """ if not _can_download_report(request.user): return HttpResponseForbidden(_('You do not have permission to view this page.')) if request.method == 'POST': start_date = request.POST.get('start_date', '') end_date = request.POST.get('end_date', '') start_letter = request.POST.get('start_letter', '') end_letter = request.POST.get('end_letter', '') report_type = request.POST.get('requested_report', '') try: start_date = _get_date_from_str(start_date) + datetime.timedelta(days=0) end_date = _get_date_from_str(end_date) + datetime.timedelta(days=1) except ValueError: # Error case: there was a badly formatted user-input date string return _render_report_form(start_date, end_date, start_letter, end_letter, report_type, date_fmt_error=True) report = initialize_report(report_type, start_date, end_date, start_letter, end_letter) items = report.rows() response = HttpResponse(mimetype='text/csv') filename = "purchases_report_{}.csv".format(datetime.datetime.now(pytz.UTC).strftime("%Y-%m-%d-%H-%M-%S")) response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename) report.write_csv(response) return response elif request.method == 'GET': end_date = datetime.datetime.now(pytz.UTC) start_date = end_date - datetime.timedelta(days=30) start_letter = "" end_letter = "" return _render_report_form(start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"), start_letter, end_letter, report_type="") else: return HttpResponseBadRequest("HTTP Method Not Supported")
nanolearning/edx-platform
lms/djangoapps/shoppingcart/views.py
Python
agpl-3.0
9,715
0.0035
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2011 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from cloudfiles.errors import ContainerNotEmpty from django import http from django.contrib import messages from django.core.urlresolvers import reverse from django_openstack import api from django_openstack.tests.view_tests import base from mox import IgnoreArg, IsA class ContainerViewTests(base.BaseViewTests): def setUp(self): super(ContainerViewTests, self).setUp() self.container = self.mox.CreateMock(api.Container) self.container.name = 'containerName' def test_index(self): self.mox.StubOutWithMock(api, 'swift_get_containers') api.swift_get_containers( IsA(http.HttpRequest)).AndReturn([self.container]) self.mox.ReplayAll() res = self.client.get(reverse('dash_containers', args=['tenant'])) self.assertTemplateUsed(res, 'django_openstack/dash/containers/index.html') self.assertIn('containers', res.context) containers = res.context['containers'] self.assertEqual(len(containers), 1) self.assertEqual(containers[0].name, 'containerName') self.mox.VerifyAll() def test_delete_container(self): formData = {'container_name': 'containerName', 'method': 'DeleteContainer'} self.mox.StubOutWithMock(api, 'swift_delete_container') api.swift_delete_container(IsA(http.HttpRequest), 'containerName') self.mox.ReplayAll() res = self.client.post(reverse('dash_containers', args=['tenant']), formData) self.assertRedirectsNoFollow(res, reverse('dash_containers', args=['tenant'])) self.mox.VerifyAll() def test_delete_container_nonempty(self): formData = {'container_name': 'containerName', 'method': 'DeleteContainer'} exception = ContainerNotEmpty('containerNotEmpty') self.mox.StubOutWithMock(api, 'swift_delete_container') api.swift_delete_container( IsA(http.HttpRequest), 'containerName').AndRaise(exception) self.mox.StubOutWithMock(messages, 'error') messages.error(IgnoreArg(), IsA(unicode)) self.mox.ReplayAll() res = self.client.post(reverse('dash_containers', args=['tenant']), formData) self.assertRedirectsNoFollow(res, reverse('dash_containers', args=['tenant'])) self.mox.VerifyAll() def test_create_container_get(self): res = self.client.get(reverse('dash_containers_create', args=['tenant'])) self.assertTemplateUsed(res, 'django_openstack/dash/containers/create.html') def test_create_container_post(self): formData = {'name': 'containerName', 'method': 'CreateContainer'} self.mox.StubOutWithMock(api, 'swift_create_container') api.swift_create_container( IsA(http.HttpRequest), 'CreateContainer') self.mox.StubOutWithMock(messages, 'success') messages.success(IgnoreArg(), IsA(str)) res = self.client.post(reverse('dash_containers_create', args=[self.request.user.tenant]), formData) self.assertRedirectsNoFollow(res, reverse('dash_containers', args=[self.request.user.tenant]))
zen/openstack-dashboard
django-openstack/django_openstack/tests/view_tests/dash/container_tests.py
Python
apache-2.0
4,367
0.001145
import numpy as np import os import pickle import pytest import re import time import shutil from copy import deepcopy from numpy import allclose, isclose from flare import struc, env, gp from flare.parameters import Parameters from flare.mgp import MappedGaussianProcess from flare.lammps import lammps_calculator from flare.utils.element_coder import _Z_to_mass, _Z_to_element, _element_to_Z from flare.ase.calculator import FLARE_Calculator from flare.ase.atoms import FLARE_Atoms from ase.calculators.lammpsrun import LAMMPS from .fake_gp import get_gp, get_random_structure from .mgp_test import clean, compare_triplet, predict_atom_diag_var body_list = ["2", "3"] multi_list = [True, False] force_block_only = False curr_path = os.getcwd() @pytest.mark.skipif( not os.environ.get("lmp", False), reason=( "lmp not found " "in environment: Please install LAMMPS " "and set the $lmp env. " "variable to point to the executatble." ), ) @pytest.fixture(scope="module") def all_gp(): allgp_dict = {} np.random.seed(123) for bodies in body_list: for multihyps in multi_list: gp_model = get_gp( bodies, "mc", multihyps, cellabc=[1.5, 1, 2], force_only=force_block_only, noa=5, ) gp_model.parallel = True gp_model.n_cpus = 2 allgp_dict[f"{bodies}{multihyps}"] = gp_model yield allgp_dict del allgp_dict @pytest.fixture(scope="module") def all_mgp(): allmgp_dict = {} for bodies in ["2", "3", "2+3"]: for multihyps in [False, True]: allmgp_dict[f"{bodies}{multihyps}"] = None yield allmgp_dict del allmgp_dict @pytest.fixture(scope="module") def all_lmp(): all_lmp_dict = {} species = ["H", "He"] specie_symbol_list = " ".join(species) masses = [ f"{i} {_Z_to_mass[_element_to_Z[species[i]]]}" for i in range(len(species)) ] parameters = { "command": os.environ.get("lmp"), # set up executable for ASE "newton": "off", "pair_style": "mgp", "mass": masses, } # set up input params for bodies in body_list: for multihyps in multi_list: # create ASE calc label = f"{bodies}{multihyps}" files = [f"{label}.mgp"] by = "yes" if bodies == "2" else "no" ty = "yes" if bodies == "3" else "no" parameters["pair_coeff"] = [ f"* * {label}.mgp {specie_symbol_list} {by} {ty}" ] lmp_calc = LAMMPS( label=label, keep_tmp_files=True, tmp_dir="./tmp/", parameters=parameters, files=files, specorder=species, ) all_lmp_dict[f"{bodies}{multihyps}"] = lmp_calc yield all_lmp_dict del all_lmp_dict @pytest.mark.parametrize("bodies", body_list) @pytest.mark.parametrize("multihyps", multi_list) def test_init(bodies, multihyps, all_mgp, all_gp): """ test the init function """ clean() gp_model = all_gp[f"{bodies}{multihyps}"] # grid parameters grid_params = {} if "2" in bodies: grid_params["twobody"] = {"grid_num": [160], "lower_bound": [0.02]} if "3" in bodies: grid_params["threebody"] = {"grid_num": [31, 32, 33], "lower_bound": [0.02] * 3} lammps_location = f"{bodies}{multihyps}" data = gp_model.training_statistics try: mgp_model = MappedGaussianProcess( grid_params=grid_params, unique_species=data["species"], n_cpus=1, lmp_file_name=lammps_location, var_map="simple", ) except: mgp_model = MappedGaussianProcess( grid_params=grid_params, unique_species=data["species"], n_cpus=1, lmp_file_name=lammps_location, var_map=None, ) all_mgp[f"{bodies}{multihyps}"] = mgp_model @pytest.mark.parametrize("bodies", body_list) @pytest.mark.parametrize("multihyps", multi_list) def test_build_map(all_gp, all_mgp, bodies, multihyps): """ test the mapping for mc_simple kernel """ gp_model = all_gp[f"{bodies}{multihyps}"] mgp_model = all_mgp[f"{bodies}{multihyps}"] mgp_model.build_map(gp_model) # with open(f'grid_{bodies}_{multihyps}.pickle', 'wb') as f: # pickle.dump(mgp_model, f) @pytest.mark.parametrize("bodies", body_list) @pytest.mark.parametrize("multihyps", multi_list) def test_write_model(all_mgp, bodies, multihyps): """ test the mapping for mc_simple kernel """ mgp_model = all_mgp[f"{bodies}{multihyps}"] mgp_model.write_model(f"my_mgp_{bodies}_{multihyps}") mgp_model.write_model(f"my_mgp_{bodies}_{multihyps}", format="pickle") # Ensure that user is warned when a non-mean_only # model is serialized into a Dictionary with pytest.warns(Warning): mgp_model.var_map = "pca" mgp_model.as_dict() mgp_model.var_map = "simple" mgp_model.as_dict() @pytest.mark.parametrize("bodies", body_list) @pytest.mark.parametrize("multihyps", multi_list) def test_load_model(all_mgp, bodies, multihyps): """ test the mapping for mc_simple kernel """ name = f"my_mgp_{bodies}_{multihyps}.json" all_mgp[f"{bodies}{multihyps}"] = MappedGaussianProcess.from_file(name) os.remove(name) name = f"my_mgp_{bodies}_{multihyps}.pickle" all_mgp[f"{bodies}{multihyps}"] = MappedGaussianProcess.from_file(name) os.remove(name) @pytest.mark.parametrize("bodies", body_list) @pytest.mark.parametrize("multihyps", multi_list) def test_cubic_spline(all_gp, all_mgp, bodies, multihyps): """ test the predict for mc_simple kernel """ mgp_model = all_mgp[f"{bodies}{multihyps}"] delta = 1e-4 if "3" in bodies: body_name = "threebody" elif "2" in bodies: body_name = "twobody" nmap = len(mgp_model.maps[body_name].maps) print("nmap", nmap) for i in range(nmap): maxvalue = np.max(np.abs(mgp_model.maps[body_name].maps[i].mean.__coeffs__)) if maxvalue > 0: comp_code = mgp_model.maps[body_name].maps[i].species_code if "3" in bodies: c_pt = np.array([[0.3, 0.4, 0.5]]) c, cderv = ( mgp_model.maps[body_name].maps[i].mean(c_pt, with_derivatives=True) ) cderv = cderv.reshape([-1]) for j in range(3): a_pt = deepcopy(c_pt) b_pt = deepcopy(c_pt) a_pt[0][j] += delta b_pt[0][j] -= delta a = mgp_model.maps[body_name].maps[i].mean(a_pt)[0] b = mgp_model.maps[body_name].maps[i].mean(b_pt)[0] num_derv = (a - b) / (2 * delta) print("spline", comp_code, num_derv, cderv[j]) assert np.isclose(num_derv, cderv[j], rtol=1e-2) elif "2" in bodies: center = np.sum(mgp_model.maps[body_name].maps[i].bounds) / 2.0 a_pt = np.array([[center + delta]]) b_pt = np.array([[center - delta]]) c_pt = np.array([[center]]) a = mgp_model.maps[body_name].maps[i].mean(a_pt)[0] b = mgp_model.maps[body_name].maps[i].mean(b_pt)[0] c, cderv = ( mgp_model.maps[body_name].maps[i].mean(c_pt, with_derivatives=True) ) cderv = cderv.reshape([-1])[0] num_derv = (a - b) / (2 * delta) print("spline", num_derv, cderv) assert np.isclose(num_derv, cderv, rtol=1e-2) @pytest.mark.parametrize("bodies", body_list) @pytest.mark.parametrize("multihyps", multi_list) def test_predict(all_gp, all_mgp, bodies, multihyps): """ test the predict for mc_simple kernel """ gp_model = all_gp[f"{bodies}{multihyps}"] mgp_model = all_mgp[f"{bodies}{multihyps}"] # # debug # filename = f'grid_{bodies}_{multihyps}.pickle' # with open(filename, 'rb') as f: # mgp_model = pickle.load(f) nenv = 6 cell = 1.0 * np.eye(3) cutoffs = gp_model.cutoffs unique_species = gp_model.training_statistics["species"] struc_test, f = get_random_structure(cell, unique_species, nenv) test_envi = env.AtomicEnvironment( struc_test, 0, cutoffs, cutoffs_mask=gp_model.hyps_mask ) if "2" in bodies: kernel_name = "twobody" elif "3" in bodies: kernel_name = "threebody" # compare_triplet(mgp_model.maps['threebody'], gp_model, test_envi) mgp_f, mgp_e_var, mgp_s, mgp_e = mgp_model.predict(test_envi) assert Parameters.compare_dict( gp_model.hyps_mask, mgp_model.maps[kernel_name].hyps_mask ) if multihyps: gp_e, gp_e_var = gp_model.predict_local_energy_and_var(test_envi) gp_f, gp_f_var = gp_model.predict_force_xyz(test_envi) else: gp_e, gp_f, gp_s, gp_e_var, _, _ = gp_model.predict_efs(test_envi) gp_s = -gp_s[[0, 3, 5, 4, 2, 1]] # check stress assert np.allclose(mgp_s, gp_s, rtol=1e-2) # check mgp is within 2 meV/A of the gp print("mgp_en, gp_en", mgp_e, gp_e) assert np.allclose(mgp_e, gp_e, rtol=2e-3), ( f"{bodies} body" f" energy mapping is wrong" ) # check forces print("isclose?", mgp_f - gp_f, gp_f) assert np.allclose(mgp_f, gp_f, atol=1e-3), f"{bodies} body force mapping is wrong" if mgp_model.var_map == "simple": print(bodies, multihyps) for i in range(struc_test.nat): test_envi = env.AtomicEnvironment( struc_test, i, cutoffs, cutoffs_mask=gp_model.hyps_mask ) mgp_pred = mgp_model.predict(test_envi) mgp_var = mgp_pred[1] gp_var = predict_atom_diag_var(test_envi, gp_model, kernel_name) print("mgp_var, gp_var", mgp_var, gp_var) assert np.allclose(mgp_var, gp_var, rtol=1e-2) print("struc_test positions", struc_test.positions, struc_test.species_labels) @pytest.mark.skipif( not os.environ.get("lmp", False), reason=( "lmp not found " "in environment: Please install LAMMPS " "and set the $lmp env. " "variable to point to the executatble." ), ) @pytest.mark.parametrize("bodies", body_list) @pytest.mark.parametrize("multihyps", multi_list) def test_lmp_predict(all_lmp, all_gp, all_mgp, bodies, multihyps): """ test the lammps implementation """ # pytest.skip() prefix = f"{bodies}{multihyps}" mgp_model = all_mgp[prefix] gp_model = all_gp[prefix] lmp_calculator = all_lmp[prefix] ase_calculator = FLARE_Calculator(gp_model, mgp_model, par=False, use_mapping=True) # create test structure np.random.seed(1) cell = np.diag(np.array([1, 1, 1])) * 4 nenv = 10 unique_species = gp_model.training_statistics["species"] cutoffs = gp_model.cutoffs struc_test, f = get_random_structure(cell, unique_species, nenv) # build ase atom from struc ase_atoms_flare = struc_test.to_ase_atoms() ase_atoms_flare = FLARE_Atoms.from_ase_atoms(ase_atoms_flare) ase_atoms_flare.calc = ase_calculator ase_atoms_lmp = deepcopy(struc_test).to_ase_atoms() ase_atoms_lmp.calc = lmp_calculator try: lmp_en = ase_atoms_lmp.get_potential_energy() flare_en = ase_atoms_flare.get_potential_energy() lmp_stress = ase_atoms_lmp.get_stress() flare_stress = ase_atoms_flare.get_stress() lmp_forces = ase_atoms_lmp.get_forces() flare_forces = ase_atoms_flare.get_forces() except Exception as e: os.chdir(curr_path) print(e) raise e os.chdir(curr_path) # check that lammps agrees with mgp to within 1 meV/A print("energy", lmp_en - flare_en, flare_en) assert np.isclose(lmp_en, flare_en, atol=1e-3) print("force", lmp_forces - flare_forces, flare_forces) assert np.isclose(lmp_forces, flare_forces, atol=1e-3).all() print("stress", lmp_stress - flare_stress, flare_stress) assert np.isclose(lmp_stress, flare_stress, atol=1e-3).all() # check the lmp var # mgp_std = np.sqrt(mgp_pred[1]) # print("isclose? diff:", lammps_stds[atom_num]-mgp_std, "mgp value", mgp_std) # assert np.isclose(lammps_stds[atom_num], mgp_std, rtol=1e-2) clean(prefix=prefix)
mir-group/flare
tests/test_mgp.py
Python
mit
12,643
0.000791
#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # # Script which generates a collage of provider logos from multiple provider # logo files. # # It works in two steps: # # 1. Resize all the provider logo files (reduce the dimensions) # 2. Assemble a final image from the resized images import os import sys import argparse import subprocess import random from os.path import join as pjoin DIMENSIONS = '150x150' # Dimensions of the resized image (<width>x<height>) GEOMETRY = '+4+4' # How to arrange images (+<rows>+<columns>) TO_CREATE_DIRS = ['resized/', 'final/'] def setup(output_path): """ Create missing directories. """ for directory in TO_CREATE_DIRS: final_path = pjoin(output_path, directory) if not os.path.exists(final_path): os.makedirs(final_path) def get_logo_files(input_path): logo_files = os.listdir(input_path) logo_files = [name for name in logo_files if 'resized' not in name and name.endswith('png')] logo_files = [pjoin(input_path, name) for name in logo_files] return logo_files def resize_images(logo_files, output_path): resized_images = [] for logo_file in logo_files: name, ext = os.path.splitext(os.path.basename(logo_file)) new_name = '%s%s' % (name, ext) out_name = pjoin(output_path, 'resized/', new_name) print('Resizing image: %(name)s' % {'name': logo_file}) values = {'name': logo_file, 'out_name': out_name, 'dimensions': DIMENSIONS} cmd = 'convert %(name)s -resize %(dimensions)s %(out_name)s' cmd = cmd % values subprocess.call(cmd, shell=True) resized_images.append(out_name) return resized_images def assemble_final_image(resized_images, output_path): final_name = pjoin(output_path, 'final/logos.png') random.shuffle(resized_images) values = {'images': ' '.join(resized_images), 'geometry': GEOMETRY, 'out_name': final_name} cmd = 'montage %(images)s -geometry %(geometry)s %(out_name)s' cmd = cmd % values print('Generating final image: %(name)s' % {'name': final_name}) subprocess.call(cmd, shell=True) def main(input_path, output_path): if not os.path.exists(input_path): print('Path doesn\'t exist: %s' % (input_path)) sys.exit(2) if not os.path.exists(output_path): print('Path doesn\'t exist: %s' % (output_path)) sys.exit(2) logo_files = get_logo_files(input_path=input_path) setup(output_path=output_path) resized_images = resize_images(logo_files=logo_files, output_path=output_path) assemble_final_image(resized_images=resized_images, output_path=output_path) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Assemble provider logos ' ' in a single image') parser.add_argument('--input-path', action='store', help='Path to directory which contains provider ' 'logo files') parser.add_argument('--output-path', action='store', help='Path where the new files will be written') args = parser.parse_args() input_path = os.path.abspath(args.input_path) output_path = os.path.abspath(args.output_path) main(input_path=input_path, output_path=output_path)
Kami/libcloud
contrib/generate_provider_logos_collage_image.py
Python
apache-2.0
4,224
0
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False def build_reimage_request_initial( resource_group_name: str, vm_scale_set_name: str, instance_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'), "instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_deallocate_request_initial( resource_group_name: str, vm_scale_set_name: str, instance_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'), "instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_delete_request_initial( resource_group_name: str, vm_scale_set_name: str, instance_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'), "instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="DELETE", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_request( resource_group_name: str, vm_scale_set_name: str, instance_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'), "instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_get_instance_view_request( resource_group_name: str, vm_scale_set_name: str, instance_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'), "instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_list_request( resource_group_name: str, virtual_machine_scale_set_name: str, subscription_id: str, *, filter: Optional[str] = None, select: Optional[str] = None, expand: Optional[str] = None, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "virtualMachineScaleSetName": _SERIALIZER.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if filter is not None: query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str') if select is not None: query_parameters['$select'] = _SERIALIZER.query("select", select, 'str') if expand is not None: query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str') query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_power_off_request_initial( resource_group_name: str, vm_scale_set_name: str, instance_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'), "instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_restart_request_initial( resource_group_name: str, vm_scale_set_name: str, instance_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'), "instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) def build_start_request_initial( resource_group_name: str, vm_scale_set_name: str, instance_id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: api_version = "2016-03-30" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start') path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'), "vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'), "instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, params=query_parameters, headers=header_parameters, **kwargs ) class VirtualMachineScaleSetVMsOperations(object): """VirtualMachineScaleSetVMsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.compute.v2016_03_30.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def _reimage_initial( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> Optional["_models.OperationStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_reimage_request_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, subscription_id=self._config.subscription_id, template_url=self._reimage_initial.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _reimage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore @distributed_trace def begin_reimage( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> LROPoller["_models.OperationStatusResponse"]: """Reimages (upgrade the operating system) a specific virtual machine in a VM scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param vm_scale_set_name: The name of the VM scale set. :type vm_scale_set_name: str :param instance_id: The instance ID of the virtual machine. :type instance_id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationStatusResponse or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse] :raises: ~azure.core.exceptions.HttpResponseError """ polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._reimage_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) def get_long_running_output(pipeline_response): response = pipeline_response.http_response deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_reimage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore def _deallocate_initial( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> Optional["_models.OperationStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_deallocate_request_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, subscription_id=self._config.subscription_id, template_url=self._deallocate_initial.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _deallocate_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore @distributed_trace def begin_deallocate( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> LROPoller["_models.OperationStatusResponse"]: """Deallocates a specific virtual machine in a VM scale set. Shuts down the virtual machine and releases the compute resources it uses. You are not billed for the compute resources of this virtual machine once it is deallocated. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param vm_scale_set_name: The name of the VM scale set. :type vm_scale_set_name: str :param instance_id: The instance ID of the virtual machine. :type instance_id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationStatusResponse or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse] :raises: ~azure.core.exceptions.HttpResponseError """ polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._deallocate_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) def get_long_running_output(pipeline_response): response = pipeline_response.http_response deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_deallocate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore def _delete_initial( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> Optional["_models.OperationStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_request_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, subscription_id=self._config.subscription_id, template_url=self._delete_initial.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore @distributed_trace def begin_delete( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> LROPoller["_models.OperationStatusResponse"]: """Deletes a virtual machine from a VM scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param vm_scale_set_name: The name of the VM scale set. :type vm_scale_set_name: str :param instance_id: The instance ID of the virtual machine. :type instance_id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationStatusResponse or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse] :raises: ~azure.core.exceptions.HttpResponseError """ polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._delete_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) def get_long_running_output(pipeline_response): response = pipeline_response.http_response deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore @distributed_trace def get( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> "_models.VirtualMachineScaleSetVM": """Gets a virtual machine from a VM scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param vm_scale_set_name: The name of the VM scale set. :type vm_scale_set_name: str :param instance_id: The instance ID of the virtual machine. :type instance_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualMachineScaleSetVM, or the result of cls(response) :rtype: ~azure.mgmt.compute.v2016_03_30.models.VirtualMachineScaleSetVM :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVM"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_request( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, subscription_id=self._config.subscription_id, template_url=self.get.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('VirtualMachineScaleSetVM', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore @distributed_trace def get_instance_view( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> "_models.VirtualMachineScaleSetVMInstanceView": """Gets the status of a virtual machine from a VM scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param vm_scale_set_name: The name of the VM scale set. :type vm_scale_set_name: str :param instance_id: The instance ID of the virtual machine. :type instance_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualMachineScaleSetVMInstanceView, or the result of cls(response) :rtype: ~azure.mgmt.compute.v2016_03_30.models.VirtualMachineScaleSetVMInstanceView :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMInstanceView"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_instance_view_request( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, subscription_id=self._config.subscription_id, template_url=self.get_instance_view.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('VirtualMachineScaleSetVMInstanceView', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_instance_view.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView'} # type: ignore @distributed_trace def list( self, resource_group_name: str, virtual_machine_scale_set_name: str, filter: Optional[str] = None, select: Optional[str] = None, expand: Optional[str] = None, **kwargs: Any ) -> Iterable["_models.VirtualMachineScaleSetVMListResult"]: """Gets a list of all virtual machines in a VM scale sets. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_machine_scale_set_name: The name of the VM scale set. :type virtual_machine_scale_set_name: str :param filter: The filter to apply to the operation. Allowed values are 'startswith(instanceView/statuses/code, 'PowerState') eq true', 'properties/latestModelApplied eq true', 'properties/latestModelApplied eq false'. :type filter: str :param select: The list parameters. Allowed values are 'instanceView', 'instanceView/statuses'. :type select: str :param expand: The expand expression to apply to the operation. Allowed values are 'instanceView'. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualMachineScaleSetVMListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2016_03_30.models.VirtualMachineScaleSetVMListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_list_request( resource_group_name=resource_group_name, virtual_machine_scale_set_name=virtual_machine_scale_set_name, subscription_id=self._config.subscription_id, filter=filter, select=select, expand=expand, template_url=self.list.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_list_request( resource_group_name=resource_group_name, virtual_machine_scale_set_name=virtual_machine_scale_set_name, subscription_id=self._config.subscription_id, filter=filter, select=select, expand=expand, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request def extract_data(pipeline_response): deserialized = self._deserialize("VirtualMachineScaleSetVMListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines'} # type: ignore def _power_off_initial( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> Optional["_models.OperationStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_power_off_request_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, subscription_id=self._config.subscription_id, template_url=self._power_off_initial.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _power_off_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore @distributed_trace def begin_power_off( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> LROPoller["_models.OperationStatusResponse"]: """Power off (stop) a virtual machine in a VM scale set. Note that resources are still attached and you are getting charged for the resources. Instead, use deallocate to release resources and avoid charges. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param vm_scale_set_name: The name of the VM scale set. :type vm_scale_set_name: str :param instance_id: The instance ID of the virtual machine. :type instance_id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationStatusResponse or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse] :raises: ~azure.core.exceptions.HttpResponseError """ polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._power_off_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) def get_long_running_output(pipeline_response): response = pipeline_response.http_response deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_power_off.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore def _restart_initial( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> Optional["_models.OperationStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_restart_request_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, subscription_id=self._config.subscription_id, template_url=self._restart_initial.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore @distributed_trace def begin_restart( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> LROPoller["_models.OperationStatusResponse"]: """Restarts a virtual machine in a VM scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param vm_scale_set_name: The name of the VM scale set. :type vm_scale_set_name: str :param instance_id: The instance ID of the virtual machine. :type instance_id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationStatusResponse or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse] :raises: ~azure.core.exceptions.HttpResponseError """ polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._restart_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) def get_long_running_output(pipeline_response): response = pipeline_response.http_response deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore def _start_initial( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> Optional["_models.OperationStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_start_request_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, subscription_id=self._config.subscription_id, template_url=self._start_initial.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore @distributed_trace def begin_start( self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, **kwargs: Any ) -> LROPoller["_models.OperationStatusResponse"]: """Starts a virtual machine in a VM scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param vm_scale_set_name: The name of the VM scale set. :type vm_scale_set_name: str :param instance_id: The instance ID of the virtual machine. :type instance_id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationStatusResponse or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse] :raises: ~azure.core.exceptions.HttpResponseError """ polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._start_initial( resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, instance_id=instance_id, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) def get_long_running_output(pipeline_response): response = pipeline_response.http_response deserialized = self._deserialize('OperationStatusResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
Azure/azure-sdk-for-python
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2016_03_30/operations/_virtual_machine_scale_set_vms_operations.py
Python
mit
56,685
0.004869
import os import datetime import lib.maglib as MSG #这是一个对结果进行初步处理的库 #用来分离抓取结果,作者,发帖时间 #抓取结果应该储存在【用户端根目录】并以result命名 #在测试情况下,抓取结果文件为results.txt #重要全局变量 PATH_SUFFIX = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) print(PATH_SUFFIX) PATH_SUFFIX = PATH_SUFFIX[::-1] PATH_SUFFIX = PATH_SUFFIX[PATH_SUFFIX.find('\\'):] PATH_SUFFIX = PATH_SUFFIX[::-1] print(PATH_SUFFIX) PATH_RESULT_FILE = PATH_SUFFIX + "\\datasource.ini" DBSETTINGS = {'H':'', #HOST 'U':'', #USER 'P':'', #PASSWORD 'D':''} #DATABASE_NAME #该函数用于读取数据源信息 #返回值:成功true,否则false def loadDataSource(): print("加载数据源配置:",PATH_RESULT_FILE) f = open(PATH_RESULT_FILE,'rb') data = f.read() f.close() data = data.decode('gbk', 'ignore') dbl = data.split("\r\n") for db in dbl: DBSETTINGS[db[0]] = db[db.find('=')+1:].replace('\'','').replace(' ','') return data loadDataSource() DBCONN = pymysql.connect(host=DBSETTINGS['H'], port=3306,user=DBSETTINGS['U'],passwd=DBSETTINGS['P'],db=DBSETTINGS['D'],charset='UTF8') DBCUR = DBCONN.cursor() #从数据库查询包含指定字词的所有数据集 #返回值:包含指定字词的数据集列表 def queryWordContainPostListbyKeyword(word): SEL = "select CONTENT from `postdata` where CONTENT like('%" + word +"%')" DBCUR.execute("SET names 'utf8mb4'") DBCUR.execute(SEL) DBCONN.commit() datalist = DBCUR.fetchall() return datalist #从数据库查询指定作者的所有帖子信息 #返回值:指定作者的所有回帖信息 # [ [主题帖链接,贴吧名,作者,帖子内容,发帖时间,回复给sb,所在页面],[......],..... ] def queryPostdataListbyAuthor(author): SEL = "select * from `postdata` where AUTHOR=\"" + author +"\"" DBCUR.execute("SET names 'utf8mb4'") DBCUR.execute(SEL) DBCONN.commit() datalist = DBCUR.fetchall() return datalist #从数据库查询最大日期 #返回值:一个最大日期 def queryDatasourceLatestTime(): SEL = "select MAX(DATE) from `postdata`" DBCUR.execute("SET names 'utf8mb4'") DBCUR.execute(SEL) DBCONN.commit() datalist = DBCUR.fetchall() return datalist[0][0] #从数据库查询小日期 #返回值:一个最小日期 def queryDatasourceEarlyTime(): SEL = "select MIN(DATE) from `postdata`" DBCUR.execute("SET names 'utf8mb4'") DBCUR.execute(SEL) DBCONN.commit() datalist = DBCUR.fetchall() return datalist[0][0] #从数据库查询指定作者的指定日期之间的数据集 #返回值:指定日期之间的数据集列表 # [ [主题帖链接,贴吧名,作者,帖子内容,发帖时间,回复给sb,所在页面],[......],..... ] def queryPostdataListAfterTime(author,earlydatestr): SEL = "select * from `postdata` where AUTHOR=\"" + author + "\" and DATE>'" + earlydatestr + "'" DBCUR.execute("SET names 'utf8mb4'") DBCUR.execute(SEL) DBCONN.commit() datalist = DBCUR.fetchall() print(len(datalist)) return datalist
ankanch/tieba-zhuaqu
DSV-user-application-plugin-dev-kit/lib/result_functions_file.py
Python
gpl-3.0
3,240
0.01997
# Copyright 2017,2018 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import mock import unittest from zvmsdk.sdkwsgi.handlers import version from zvmsdk import version as sdk_version class HandlersRootTest(unittest.TestCase): def setUp(self): pass def test_version(self): req = mock.Mock() ver_str = {"rc": 0, "overallRC": 0, "errmsg": "", "modID": None, "output": {"api_version": version.APIVERSION, "min_version": version.APIVERSION, "version": sdk_version.__version__, "max_version": version.APIVERSION, }, "rs": 0} res = version.version(req) self.assertEqual('application/json', req.response.content_type) # version_json = json.dumps(ver_res) # version_str = utils.to_utf8(version_json) ver_res = json.loads(req.response.body.decode('utf-8')) self.assertEqual(ver_str, ver_res) self.assertEqual('application/json', res.content_type)
mfcloud/python-zvm-sdk
zvmsdk/tests/unit/sdkwsgi/handlers/test_version.py
Python
apache-2.0
1,674
0
#!/usr/bin/python import socket import sys HOST, PORT = "24.21.106.140", 8080 # Create a socket (SOCK_STREAM means a TCP socket) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: # Connect to server and send data print "Connected to ", HOST, ":", PORT, "\n","Awaiting input\n" data = sys.stdin.readline() sock.connect((HOST, PORT)) print "Connected to ", HOST, ":", PORT, "\n","Awaiting input\n" exit = False while exit != True: sock.sendall(data + "\n") if data.strip() == 'bye': exit = True received = sock.recv(1024) print "Sent: " , data print "Received: " , received data = sys.stdin.readline() # Receive data from the server and shut down finally: sock.close()
hillst/RnaMaker
bin/daemons/testclient.py
Python
mit
789
0.010139
from django.contrib import admin from survey.actions import make_published from survey.exporter.csv import Survey2Csv from survey.exporter.tex import Survey2Tex from survey.models import Answer, Category, Question, Response, Survey class QuestionInline(admin.StackedInline): model = Question ordering = ("order", "category") extra = 1 def get_formset(self, request, survey_obj, *args, **kwargs): formset = super().get_formset(request, survey_obj, *args, **kwargs) if survey_obj: formset.form.base_fields["category"].queryset = survey_obj.categories.all() return formset class CategoryInline(admin.TabularInline): model = Category extra = 0 class SurveyAdmin(admin.ModelAdmin): list_display = ("name", "is_published", "need_logged_user", "template") list_filter = ("is_published", "need_logged_user") inlines = [CategoryInline, QuestionInline] actions = [make_published, Survey2Csv.export_as_csv, Survey2Tex.export_as_tex] class AnswerBaseInline(admin.StackedInline): fields = ("question", "body") readonly_fields = ("question",) extra = 0 model = Answer class ResponseAdmin(admin.ModelAdmin): list_display = ("interview_uuid", "survey", "created", "user") list_filter = ("survey", "created") date_hierarchy = "created" inlines = [AnswerBaseInline] # specifies the order as well as which fields to act on readonly_fields = ("survey", "created", "updated", "interview_uuid", "user") # admin.site.register(Question, QuestionInline) # admin.site.register(Category, CategoryInline) admin.site.register(Survey, SurveyAdmin) admin.site.register(Response, ResponseAdmin)
Pierre-Sassoulas/django-survey
survey/admin.py
Python
agpl-3.0
1,693
0.001772
# -*- coding: utf-8 -*- from app import app import os if __name__ == "__main__": port = int(os.environ.get('PORT', 33507)) app.run(port=port, debug = True)
jorgegarciadev/estacaidooque
run.py
Python
mit
166
0.012048
import configparser from unittest import TestCase import os import subprocess import tempfile from test import subman_marker_functional, subman_marker_needs_envvars, subman_marker_zypper @subman_marker_functional @subman_marker_zypper @subman_marker_needs_envvars('RHSM_USER', 'RHSM_PASSWORD', 'RHSM_URL', 'RHSM_POOL', 'RHSM_TEST_REPO', 'RHSM_TEST_PACKAGE') class TestServicePlugin(TestCase): SUB_MAN = "PYTHONPATH=./src python -m subscription_manager.scripts.subscription_manager" def setUp(self): # start in a non-registered state subprocess.call('{sub_man} unregister'.format(sub_man=self.SUB_MAN), shell=True) def has_subman_repos(self): repos = configparser.ConfigParser() with tempfile.NamedTemporaryFile(suffix='.repo') as repofile: subprocess.call('zypper lr -e {0}'.format(repofile.name), shell=True) repos.read(repofile.name) for repo in repos.sections(): repo_info = dict(repos.items(repo)) service = repo_info.get('service', None) if service == 'rhsm': return True return False def test_provides_no_subman_repos_if_unregistered(self): self.assertFalse(self.has_subman_repos()) def test_provides_subman_repos_if_registered_and_subscribed(self): subprocess.call('{sub_man} register --username={RHSM_USER} --password={RHSM_PASSWORD} --serverurl={RHSM_URL}'.format(sub_man=self.SUB_MAN, **os.environ), shell=True) subprocess.call('{sub_man} attach --pool={RHSM_POOL}'.format(sub_man=self.SUB_MAN, **os.environ), shell=True) self.assertTrue(self.has_subman_repos()) def test_can_download_rpm(self): subprocess.check_call('{sub_man} register --username={RHSM_USER} --password={RHSM_PASSWORD} --serverurl={RHSM_URL}'.format(sub_man=self.SUB_MAN, **os.environ), shell=True) subprocess.check_call('{sub_man} attach --pool={RHSM_POOL}'.format(sub_man=self.SUB_MAN, **os.environ), shell=True) subprocess.check_call('{sub_man} repos --enable={RHSM_TEST_REPO}'.format(sub_man=self.SUB_MAN, **os.environ), shell=True) # remove cached subman packages subprocess.call('rm -rf /var/cache/zypp/packages/subscription-manager*', shell=True) # remove test package if installed subprocess.call('PYTHONPATH=./src zypper --non-interactive rm {RHSM_TEST_PACKAGE}'.format(**os.environ), shell=True) subprocess.call('PYTHONPATH=./src zypper --non-interactive --no-gpg-checks in --download-only {RHSM_TEST_PACKAGE}'.format(**os.environ), shell=True) subprocess.check_call('test "$(find /var/cache/zypp/packages/ -name \'{RHSM_TEST_PACKAGE}*.rpm\' | wc -l)" -gt 0'.format(**os.environ), shell=True)
candlepin/subscription-manager
test/zypper_test/test_serviceplugin.py
Python
gpl-2.0
2,745
0.0051
from collections import deque, OrderedDict from django.conf import settings from elasticsearch_dsl import Document, Date, Integer, Keyword, Text, Search, Index, Boolean, Completion, \ SearchAsYouType, normalizer, analyzer from elasticsearch_dsl.connections import connections from tqdm import tqdm from elasticsearch.helpers import bulk, parallel_bulk, BulkIndexError from .utils import normalise_lemma, normalise_form, get_ascii_from_unicode ''' http://localhost:9200/_cat/indices # TODO: print -> log or sys.out ''' # Define a default Elasticsearch client from . import utils c = connections.configure(**settings.ELASTICSEARCH_DSL) # https://github.com/elastic/elasticsearch-dsl-py/issues/669 # https://sunscrapers.com/blog/elasticsearch-with-python-7-tips-and-best-practices/ normalizer_insensitive = normalizer( 'insensitive_normalizer', filter=['lowercase', 'asciifolding'] ) analyzer_insensitive = analyzer( 'insensitive_analyzer', tokenizer='standard', filter=['lowercase', 'asciifolding'] ) class KeywordInsensitive(Keyword): '''A ES Keyword field with a .insensitive subfield which is case-insensitive''' def __init__(self, *args, **kwargs): kwargs['fields'] = { 'insensitive': Keyword(normalizer=normalizer_insensitive) } super().__init__(*args, **kwargs) class AnnotatedToken(Document): '''An ElasticSearch document for an annotated token in the text. The tokens and annotations come from the kwic file. Constraints: Text(): searchable (any token), case-insensitive but can't be sorted (unless we use fielddata=True which is not recommended) accent-sensitive by default Keyword(): exact search only ('julius cesar' won't match 'cesar') accent-sensitive & case-sensitive search ''' # string token = Keyword() form = KeywordInsensitive() lemma = Keyword() # searchable searchable = Text(analyzer=analyzer_insensitive) pos = Keyword() lemmapos = Keyword() speech_cat = Keyword() verse_cat = Keyword() manuscript_number = Integer() section_number = Keyword() is_rubric = Boolean() preceding = Text() following = Text() para_number = Integer() seg_number = Integer() # n token_number = Integer() previous_word = KeywordInsensitive() next_word = KeywordInsensitive() # the seq order of appearance in the text # for efficient sorting. seq_order = Integer() class Index: name = 'tokens' def set_derived_fields(self): self.form = self.token if self.pos != 'nom propre': # capital in first letter may be due to: # . proper name (form should preserve it) # . capital at beginning of sentence (we lowercase it) self.form = self.form.lower() self.searchable = '{} {}'.format( self.form, self.lemma ) @classmethod def new_from_token_element(cls, token_element, parsing_context): attrib = utils.get_data_from_kwik_item(None, token_element) ret = cls() # print(attrib) for k, v in attrib.items(): field = None if k == 'string': field = 'token' v = (v or '').strip() elif k == 'n': field = 'token_number' elif k == 'type': field = 'is_rubric' v = v == 'rubric_item' elif k == 'location': parts = v.split('_') ret.manuscript_number = int('edRoyal20D1' in v) ret.seg_number = parts[2] if len(parts) > 2 else 0 field = 'para_number' v = int(parts[1]) elif k == 'following': field = k ret.next_word = v.split(' ')[0] elif k == 'preceding': field = k ret.previous_word = v.split(' ')[-1] if field is None and hasattr(ret, k): field = k if field: setattr(ret, field, v) else: # print('WARNING: no field mapped to kwic attribute: {}'.format(k)) # TODO: sp pass ret.meta.id = '{}.{:03d}'.format(attrib['location'], int(attrib['n'])) ret.set_derived_fields() if not str(getattr(ret, 'lemma', '') or '').strip(): # we don't index unlemmatised tokens (asked by partners, 01/2021) return [] return [ret] class LemmaDocument(Document): '''Indexing model for a lemma''' # for as-is lemma = Keyword( fields={ # for search 'searchable': Text(analyzer=analyzer_insensitive), # for sorting 'insensitive': Keyword(normalizer=normalizer_insensitive) } ) # TODO? forms = Keyword() pos = Keyword() name_type = Keyword() class Index: name = 'lemmata' @classmethod def new_from_token_element(cls, token_element, parsing_context): tokenised_names = parsing_context.get('tokenised_names', None) if tokenised_names is None: tokenised_names = utils.read_tokenised_name_types() parsing_context['tokenised_names'] = tokenised_names ret = [] lemma = normalise_lemma(token_element.attrib.get('lemma', '')) if lemma: location_full = '__'.join([ token_element.attrib.get('location', ''), token_element.attrib.get('n', '') ]) doc = cls( lemma=lemma, # lemma_sort=lemma.split(',')[0].strip().lower(), pos=token_element.attrib.get('pos', 'Other').strip(), name_type=tokenised_names.get( location_full, 'Other' ) ) if 0 and lemma == 'Ester': print(location_full, doc.name_type) # ES won't produce duplicates thanks to that id doc.meta.id = lemma ret.append(doc) # doc.set_derived_fields() return ret class AutocompleteDocument(Document): '''Indexing model for a form or lemma TODO: check if there's a more standard or efficient way of doing such suggestions. ''' # autocomplete = Completion() # For searching only, not displayed # Basically we want a field with multiple tokens that can be search # by prefix (e.g. par*) and we can sort by the first token. autocomplete = SearchAsYouType() # I don't think SearchAsYouType can be sorted or accepts sub-fields # so we define a sortable version separately just for sorting purpose autocomplete_sortable = Keyword() # for display form = Keyword() lemma = Keyword() class Index: name = 'autocomplete' @classmethod def new_from_token_element(cls, token_element, parsing_context): ret = [] lemma = normalise_lemma(token_element.attrib.get('lemma', '')) if lemma: form = normalise_form(token_element) for i in [0, 2]: autocomplete = '{} {}'.format(form or lemma, lemma) doc = cls( lemma=lemma, form=form, autocomplete=get_ascii_from_unicode(autocomplete).lower(), autocomplete_sortable=get_ascii_from_unicode(autocomplete).lower(), ) doc.meta.id = autocomplete + ' ' + str(i) ret.append(doc) form = '' return ret class Indexer: ''' Manages the search indexes. The index_names argument used in the methods is a list of index names the action should work on. If empty or None the action will apply to all available indexes. ''' # available indexes indexes = OrderedDict([ ['tokens', { 'document_class': AnnotatedToken, }], ['lemmata', { 'document_class': LemmaDocument, }], ['autocomplete', { 'document_class': AutocompleteDocument, }] ]) def list(self, index_names=None): '''Retrieves stats about indexes''' # todo: return list and move actual display to textsearch import time titles = { 'name': 'Name', 'size': 'Docs', 'created': 'Created', 'disk': 'MBytes', } print('{name:15} | {size:7} | {disk:8} | {created:19}'.format(**titles)) print('-' * 62) for index_name, index_data in self._get_selected_indexes(index_names): info = { 'name': index_name, 'size': 0, 'disk': 0, 'created': 'absent', } index = Index(name=index_name) if index.exists(): info['size'] = 'exists' get_res = index.get() info['created'] = int(get_res[index_name]['settings']['index']['creation_date'])/1000 info['created'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(info['created'])) stats = index.stats() info['size'] = stats['_all']['total']['docs']['count'] info['disk'] = stats['_all']['total']['store']['size_in_bytes'] / 1024 / 1024 print('{name:15} | {size:>7} | {disk:>8.2f} | {created:>19}'.format(**info)) def clear(self, index_names=None): '''Recreate the tokens index''' for index_name, index_data in self._get_selected_indexes(index_names): index = Index(name=index_name) if index.exists(): index.delete() # recreate index with imposed schema. # without this the index schema would have text instead of keywords, etc. index_data['document_class'].init() print('cleared {}'.format(index_name)) def rebuild(self, index_names=None, cap=-1): '''index all the tokens from the kwic''' self.clear(index_names) # https://github.com/elastic/elasticsearch-py/blob/master/examples/bulk-ingest/bulk-ingest.py stats = {} # set this to 1 to debug the indexing. # it seems the parallel version will just silence errors! debug_bulk = 1 for index_name, index_data in self._get_selected_indexes(index_names): options = { 'client': connections.get_connection(), 'actions': self._bulk_actions(index_name, index_data, cap), 'chunk_size': settings.SEARCH_INDEX_CHUNK_SIZE } try: if debug_bulk: bulk(**options) else: deque(parallel_bulk(**options), maxlen=0) except BulkIndexError as e: print('Errors while indexing: {}'.format(e)) def _bulk_actions(self, index_name, index_data, cap=-1): '''elasticsearch-dsl bulk_actions callback''' # https://elasticsearch-py.readthedocs.io/en/master/helpers.html # #bulk-helpers count = 0 if cap > -1: total = cap else: print('Read token counts') total = utils.KwicParser.read_token_count() print('Indexing {} {}...'.format(total, index_name)) # A working area for the callback that persists across multiple calls. # Used for caching, etc. parsing_context = {} def parsing_callback(token_element): return index_data['document_class'].new_from_token_element(token_element, parsing_context) with tqdm(total=total) as t: for document in utils.KwicParser(parsing_callback): if -1 < cap <= count: break yield document.to_dict(True) t.update() count += 1 if total != count: print('WARNING: {} indexed != {} expected'.format(count, total)) def _get_selected_indexes(self, index_names=None): '''Returns a list of index information. One item per name in index_names. Thrown Exception if name not found.''' ret = [ (name, data) for name, data in self.indexes.items() if (not index_names) or (name in index_names) ] if index_names and len(index_names) != len(ret): raise Exception( 'Index name not found ({}). Possible names are: {}.'.format( ', '.join(index_names), ', '.join(self.indexes.keys()) ) ) return ret
kingsdigitallab/tvof-django
tvof/text_search/es_indexes.py
Python
mit
12,832
0.000935
import unittest from pyshould import * from pyshould.expect import expect, expect_all, expect_any, expect_none class ExpectTestCase(unittest.TestCase): """ Simple tests for the expect based api """ def test_expect(self): expect(1).to_equal(1) expect(1).to_not_equal(0) def test_expect_all(self): expect_all([1, 2]).to_be_integer() expect_all(1, 2).to_be_integer() def test_expect_any(self): expect_any([1, 2]).to_equal(2) expect_any(1, 2).to_equal(2) def test_expect_none(self): expect_none([1, 2]).to_equal(0) expect_none(1, 2).to_equal(0) def test_expect_quantifiers(self): expect(all_of(1, 2)).to_be_integer() expect(any_of([1, 2])).to_eq(1) def test_ignore_keywords(self): it(1).should.be_an_int() expect(1).to.equal(1)
drslump/pyshould
tests/expect.py
Python
mit
856
0
import os import sys import re def read_text_from_include(line): match = re.search("^#:include *(.*.kv)", line) filename = match.group(1) return open(filename, 'r').read() def main(combine): if combine: if not os.path.isfile('shugou_original.kv'): os.rename('shugou.kv', 'shugou_original.kv') infile = open('shugou_original.kv', 'r') outfile = open('shugou.kv', 'w') for line in infile: if '#:include' in line: text_from_include = read_text_from_include(line) outfile.write(text_from_include + '\n') else: outfile.write(line) infile.close() outfile.close() print("Files successfully concatenated.") else: try: os.rename('shugou_original.kv', 'shugou.kv') print("Original file restored.") except: print("No backup file.\ Maybe the original file has been restored already?") if __name__ == '__main__': if len(sys.argv) == 1: print("Missing necessary argument. Use \n\ 'combine' to concatenate the include files into shugou.kv \n\ 'clean' to restore the original shugou.kv file") else: if (sys.argv[1] == 'Combine' or sys.argv[1] == 'combine' or sys.argv[1] == 'True'): main(True) elif (sys.argv[1] == 'Clean' or sys.argv[1] == 'clean' or sys.argv[1] == 'False'): main(False) else: print("Can not understand the argument.\ Call this file again with no arguments to see possible arguments.")
IsabellKonrad/Shugou
combineFiles.py
Python
mit
1,675
0.001194
"""Tests of email marketing signal handlers.""" import logging import ddt from django.test import TestCase from django.test.utils import override_settings from mock import patch from util.json_request import JsonResponse from email_marketing.signals import handle_unenroll_done, \ email_marketing_register_user, \ email_marketing_user_field_changed, \ add_email_marketing_cookies from email_marketing.tasks import update_user, update_user_email from email_marketing.models import EmailMarketingConfiguration from django.test.client import RequestFactory from student.tests.factories import UserFactory, UserProfileFactory from sailthru.sailthru_client import SailthruClient from sailthru.sailthru_response import SailthruResponse from sailthru.sailthru_error import SailthruClientError log = logging.getLogger(__name__) TEST_EMAIL = "test@edx.org" def update_email_marketing_config(enabled=False, key='badkey', secret='badsecret', new_user_list='new list', template='Activation'): """ Enable / Disable Sailthru integration """ EmailMarketingConfiguration.objects.create( enabled=enabled, sailthru_key=key, sailthru_secret=secret, sailthru_new_user_list=new_user_list, sailthru_activation_template=template ) @ddt.ddt class EmailMarketingTests(TestCase): """ Tests for the EmailMarketing signals and tasks classes. """ def setUp(self): self.request_factory = RequestFactory() self.user = UserFactory.create(username='test', email=TEST_EMAIL) self.profile = self.user.profile self.request = self.request_factory.get("foo") update_email_marketing_config(enabled=True) super(EmailMarketingTests, self).setUp() @patch('email_marketing.signals.SailthruClient.api_post') def test_drop_cookie(self, mock_sailthru): """ Test add_email_marketing_cookies """ response = JsonResponse({ "success": True, "redirect_url": 'test.com/test', }) mock_sailthru.return_value = SailthruResponse(JsonResponse({'keys': {'cookie': 'test_cookie'}})) add_email_marketing_cookies(None, response=response, user=self.user) self.assertTrue('sailthru_hid' in response.cookies) self.assertEquals(mock_sailthru.call_args[0][0], "user") userparms = mock_sailthru.call_args[0][1] self.assertEquals(userparms['fields']['keys'], 1) self.assertEquals(userparms['id'], TEST_EMAIL) self.assertEquals(response.cookies['sailthru_hid'].value, "test_cookie") @patch('email_marketing.signals.SailthruClient.api_post') def test_drop_cookie_error_path(self, mock_sailthru): """ test that error paths return no cookie """ response = JsonResponse({ "success": True, "redirect_url": 'test.com/test', }) mock_sailthru.return_value = SailthruResponse(JsonResponse({'keys': {'cookiexx': 'test_cookie'}})) add_email_marketing_cookies(None, response=response, user=self.user) self.assertFalse('sailthru_hid' in response.cookies) mock_sailthru.return_value = SailthruResponse(JsonResponse({'error': "error", "errormsg": "errormsg"})) add_email_marketing_cookies(None, response=response, user=self.user) self.assertFalse('sailthru_hid' in response.cookies) mock_sailthru.side_effect = SailthruClientError add_email_marketing_cookies(None, response=response, user=self.user) self.assertFalse('sailthru_hid' in response.cookies) @patch('email_marketing.tasks.log.error') @patch('email_marketing.tasks.SailthruClient.api_post') def test_add_user(self, mock_sailthru, mock_log_error): """ test async method in tasks that actually updates Sailthru """ mock_sailthru.return_value = SailthruResponse(JsonResponse({'ok': True})) update_user.delay(self.user.username, new_user=True) self.assertFalse(mock_log_error.called) self.assertEquals(mock_sailthru.call_args[0][0], "user") userparms = mock_sailthru.call_args[0][1] self.assertEquals(userparms['key'], "email") self.assertEquals(userparms['id'], TEST_EMAIL) self.assertEquals(userparms['vars']['gender'], "m") self.assertEquals(userparms['vars']['username'], "test") self.assertEquals(userparms['vars']['activated'], 1) self.assertEquals(userparms['lists']['new list'], 1) @patch('email_marketing.tasks.SailthruClient.api_post') def test_activation(self, mock_sailthru): """ test send of activation template """ mock_sailthru.return_value = SailthruResponse(JsonResponse({'ok': True})) update_user.delay(self.user.username, new_user=True, activation=True) # look for call args for 2nd call self.assertEquals(mock_sailthru.call_args[0][0], "send") userparms = mock_sailthru.call_args[0][1] self.assertEquals(userparms['email'], TEST_EMAIL) self.assertEquals(userparms['template'], "Activation") @patch('email_marketing.tasks.log.error') @patch('email_marketing.tasks.SailthruClient.api_post') def test_error_logging(self, mock_sailthru, mock_log_error): """ Ensure that error returned from Sailthru api is logged """ mock_sailthru.return_value = SailthruResponse(JsonResponse({'error': 100, 'errormsg': 'Got an error'})) update_user.delay(self.user.username) self.assertTrue(mock_log_error.called) @patch('email_marketing.tasks.log.error') @patch('email_marketing.tasks.SailthruClient.api_post') def test_just_return(self, mock_sailthru, mock_log_error): """ Ensure that disabling Sailthru just returns """ update_email_marketing_config(enabled=False) update_user.delay(self.user.username) self.assertFalse(mock_log_error.called) self.assertFalse(mock_sailthru.called) update_user_email.delay(self.user.username, "newemail2@test.com") self.assertFalse(mock_log_error.called) self.assertFalse(mock_sailthru.called) update_email_marketing_config(enabled=True) @patch('email_marketing.tasks.SailthruClient.api_post') def test_change_email(self, mock_sailthru): """ test async method in task that changes email in Sailthru """ mock_sailthru.return_value = SailthruResponse(JsonResponse({'ok': True})) #self.user.email = "newemail@test.com" update_user_email.delay(self.user.username, "old@edx.org") self.assertEquals(mock_sailthru.call_args[0][0], "user") userparms = mock_sailthru.call_args[0][1] self.assertEquals(userparms['key'], "email") self.assertEquals(userparms['id'], "old@edx.org") self.assertEquals(userparms['keys']['email'], TEST_EMAIL) @patch('email_marketing.tasks.log.error') @patch('email_marketing.tasks.SailthruClient.api_post') def test_error_logging1(self, mock_sailthru, mock_log_error): """ Ensure that error returned from Sailthru api is logged """ mock_sailthru.return_value = SailthruResponse(JsonResponse({'error': 100, 'errormsg': 'Got an error'})) update_user_email.delay(self.user.username, "newemail2@test.com") self.assertTrue(mock_log_error.called) @patch('lms.djangoapps.email_marketing.tasks.update_user.delay') def test_register_user(self, mock_update_user): """ make sure register user call invokes update_user """ email_marketing_register_user(None, user=self.user, profile=self.profile) self.assertTrue(mock_update_user.called) @patch('lms.djangoapps.email_marketing.tasks.update_user.delay') @ddt.data(('auth_userprofile', 'gender', 'f', True), ('auth_user', 'is_active', 1, True), ('auth_userprofile', 'shoe_size', 1, False)) @ddt.unpack def test_modify_field(self, table, setting, value, result, mock_update_user): """ Test that correct fields call update_user """ email_marketing_user_field_changed(None, self.user, table=table, setting=setting, new_value=value) self.assertEqual(mock_update_user.called, result)
waheedahmed/edx-platform
lms/djangoapps/email_marketing/tests/test_signals.py
Python
agpl-3.0
8,373
0.001672
#!/usr/bin/env python ################################################## ## DEPENDENCIES import sys import os import os.path try: import builtins as builtin except ImportError: import __builtin__ as builtin from os.path import getmtime, exists import time import types from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple from Cheetah.Template import Template from Cheetah.DummyTransaction import * from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList from Cheetah.CacheRegion import CacheRegion import Cheetah.Filters as Filters import Cheetah.ErrorCatchers as ErrorCatchers ################################################## ## MODULE CONSTANTS VFFSL=valueFromFrameOrSearchList VFSL=valueFromSearchList VFN=valueForName currentTime=time.time __CHEETAH_version__ = '2.4.4' __CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0) __CHEETAH_genTime__ = 1447321436.264256 __CHEETAH_genTimestamp__ = 'Thu Nov 12 18:43:56 2015' __CHEETAH_src__ = '/home/knuth/openpli-oe-core/build/tmp/work/fusionhd-oe-linux/enigma2-plugin-extensions-openwebif/1+gitAUTOINC+5837c87afc-r0/git/plugin/controllers/views/web/getservices.tmpl' __CHEETAH_srcLastModified__ = 'Thu Nov 12 18:43:41 2015' __CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine' if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple: raise AssertionError( 'This template was compiled with Cheetah version' ' %s. Templates compiled before version %s must be recompiled.'%( __CHEETAH_version__, RequiredCheetahVersion)) ################################################## ## CLASSES class getservices(Template): ################################################## ## CHEETAH GENERATED METHODS def __init__(self, *args, **KWs): super(getservices, self).__init__(*args, **KWs) if not self._CHEETAH__instanceInitialized: cheetahKWArgs = {} allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split() for k,v in KWs.items(): if k in allowedKWs: cheetahKWArgs[k] = v self._initCheetahInstance(**cheetahKWArgs) def respond(self, trans=None): ## CHEETAH: main method generated for this template if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)): trans = self.transaction # is None unless self.awake() was called if not trans: trans = DummyTransaction() _dummyTrans = True else: _dummyTrans = False write = trans.response().write SL = self._CHEETAH__searchList _filter = self._CHEETAH__currentFilter ######################################## ## START - generated method body _orig_filter_18987506 = _filter filterName = u'WebSafe' if self._CHEETAH__filters.has_key("WebSafe"): _filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName] else: _filter = self._CHEETAH__currentFilter = \ self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter write(u'''<?xml version="1.0" encoding="UTF-8"?> <e2servicelist> ''') for service in VFFSL(SL,"services",True): # generated from line 4, col 2 write(u'''\t<e2service> \t\t<e2servicereference>''') _v = VFFSL(SL,"service.servicereference",True) # u'$service.servicereference' on line 6, col 23 if _v is not None: write(_filter(_v, rawExpr=u'$service.servicereference')) # from line 6, col 23. write(u'''</e2servicereference> \t\t<e2servicename>''') _v = VFFSL(SL,"service.servicename",True) # u'$service.servicename' on line 7, col 18 if _v is not None: write(_filter(_v, rawExpr=u'$service.servicename')) # from line 7, col 18. write(u'''</e2servicename> \t</e2service> ''') write(u'''</e2servicelist> ''') _filter = self._CHEETAH__currentFilter = _orig_filter_18987506 ######################################## ## END - generated method body return _dummyTrans and trans.response().getvalue() or "" ################################################## ## CHEETAH GENERATED ATTRIBUTES _CHEETAH__instanceInitialized = False _CHEETAH_version = __CHEETAH_version__ _CHEETAH_versionTuple = __CHEETAH_versionTuple__ _CHEETAH_genTime = __CHEETAH_genTime__ _CHEETAH_genTimestamp = __CHEETAH_genTimestamp__ _CHEETAH_src = __CHEETAH_src__ _CHEETAH_srcLastModified = __CHEETAH_srcLastModified__ _mainCheetahMethod_for_getservices= 'respond' ## END CLASS DEFINITION if not hasattr(getservices, '_initCheetahAttributes'): templateAPIClass = getattr(getservices, '_CHEETAH_templateClass', Template) templateAPIClass._addCheetahPlumbingCodeToClass(getservices) # CHEETAH was developed by Tavis Rudd and Mike Orr # with code, advice and input from many other volunteers. # For more information visit http://www.CheetahTemplate.org/ ################################################## ## if run from command line: if __name__ == '__main__': from Cheetah.TemplateCmdLineIface import CmdLineIface CmdLineIface(templateObj=getservices()).run()
pli3/e2-openwbif
plugin/controllers/views/web/getservices.py
Python
gpl-2.0
5,478
0.011683
# Copyright (c) 2011 Nokia # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import Common from common import Log # Try to import matplotlib for charting try: import matplotlib matplotlib.use("Agg") import pylab except ImportError, e: matplotlib = None pylab = None Log.warn("Matplotlib or one of it's dependencies not found (%s). Charts will not be generated." % e) def slicePlot(x, y, sliceLength = 100, style = "line", *args, **kwargs): assert len(x) == len(y) if style == "line": plotFunc = pylab.plot elif style == "bar": plotFunc = pylab.bar else: raise RuntimeError("Unknown plotting style: %s" % style) if len(x) < sliceLength: plotFunc(x, y, *args, **kwargs) return slices = int(len(x) / sliceLength) pylab.figure(figsize = (8, slices * 1)) for i in range(slices): pylab.subplot(slices, 1, i + 1) plotFunc(x[i * sliceLength: (i + 1) * sliceLength], y[i * sliceLength: (i + 1) * sliceLength], *args, **kwargs)
skyostil/tracy
src/analyzer/Charting.py
Python
mit
2,058
0.014091
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2015, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import numpy from nupic.bindings.regions.PyRegion import PyRegion try: import capnp except ImportError: capnp = None if capnp: from nupic.proto.TestNodeProto_capnp import TestNodeProto class TestNode(PyRegion): @classmethod def getSpec(cls): if hasattr(TestNode, '_failIngetSpec'): assert False, 'Failing in TestNode.getSpec() as requested' result = dict( description='The node spec of the NuPIC 2 Python TestNode', singleNodeOnly=False, inputs=dict( bottomUpIn=dict( description='Primary input for the node', dataType='Real64', count=0, required=True, regionLevel=False, isDefaultInput=True, requireSplitterMap=False ) ), outputs=dict( bottomUpOut=dict( description='Primary output for the node', dataType='Real64', count=0, regionLevel=False, isDefaultOutput=True ) ), parameters=dict( int32Param=dict( description='Int32 scalar parameter', dataType='Int32', count=1, constraints='', defaultValue='32', accessMode='ReadWrite' ), uint32Param=dict( description='UInt32 scalar parameter', dataType='UInt32', count=1, constraints='', defaultValue='33', accessMode='ReadWrite' ), int64Param=dict( description='Int64 scalar parameter', dataType='Int64', count=1, constraints='', defaultValue='64', accessMode='ReadWrite' ), uint64Param=dict( description='UInt64 scalar parameter', dataType='UInt64', count=1, constraints='', defaultValue='65', accessMode='ReadWrite' ), real32Param=dict( description='Real32 scalar parameter', dataType='Real32', count=1, constraints='', defaultValue='32.1', accessMode='ReadWrite' ), real64Param=dict( description='Real64 scalar parameter', dataType='Real64', count=1, constraints='', defaultValue='64.1', accessMode='ReadWrite' ), boolParam=dict( description='bool parameter', dataType='Bool', count=1, constraints='', defaultValue='false', accessMode='ReadWrite' ), real32arrayParam=dict( description='Real32 array parameter', dataType='Real32', count=0, # array constraints='', defaultValue='', accessMode='ReadWrite' ), int64arrayParam=dict( description='Int64 array parameter', dataType='Int64', count=0, # array constraints='', defaultValue='', accessMode='ReadWrite' ), boolArrayParam=dict( description='bool array parameter', dataType='Bool', count=0, # array constraints='', defaultValue='', accessMode='ReadWrite' ), stringParam=dict( description='String parameter', dataType='Byte', count=0, # string is conventionally Byte/0 constraints='', defaultValue='nodespec value', accessMode='ReadWrite' ), failInInit=dict( description='For testing failure in __init__()', dataType='Int32', count=1, constraints='', defaultValue='0', accessMode='ReadWrite' ), failInCompute=dict( description='For testing failure in compute()', dataType='Int32', count=1, constraints='', defaultValue='0', accessMode='ReadWrite' ), ), commands=dict() ) print result return result def __init__(self, *args, **kwargs): """ """ # Facilitate failing in __init__ to test error handling if 'failInInit' in kwargs: assert False, 'TestNode.__init__() Failing on purpose as requested' # Check if should fail in compute to test error handling self._failInCompute = kwargs.pop('failInCompute', False) # set these to a bunch of incorrect values, just to make # sure they are set correctly by the nodespec. self.parameters = dict( int32Param=32, uint32Param=33, int64Param=64, uint64Param=65, real32Param=32.1, real64Param=64.1, boolParam=False, real32ArrayParam=numpy.arange(10).astype('float32'), real64ArrayParam=numpy.arange(10).astype('float64'), # Construct int64 array in the same way as in C++ int64ArrayParam=numpy.arange(4).astype('int64'), boolArrayParam=numpy.array([False]*4), stringParam="nodespec value") for key in kwargs: if not key in self.parameters: raise Exception("TestNode found keyword %s but there is no parameter with that name" % key) self.parameters[key] = kwargs[key] self.outputElementCount = 2 # used for computation self._delta = 1 self._iter = 0 for i in xrange(0,4): self.parameters["int64ArrayParam"][i] = i*64 def getParameter(self, name, index): assert name in self.parameters return self.parameters[name] def setParameter(self, name, index, value): assert name in self.parameters self.parameters[name] = value def initialize(self): print 'TestNode.initialize() here.' def compute(self, inputs, outputs): if self._failInCompute: assert False, 'TestNode.compute() Failing on purpose as requested' def getOutputElementCount(self, name): assert name == 'bottomUpOut' return self.outputElementCount def getParameterArrayCount(self, name, index): assert name.endswith('ArrayParam') print 'len(self.parameters[%s]) = %d' % (name, len(self.parameters[name])) return len(self.parameters[name]) def getParameterArray(self, name, index, array): assert name.endswith('ArrayParam') assert name in self.parameters v = self.parameters[name] assert len(array) == len(v) assert array.dtype == v.dtype array[:] = v def setParameterArray(self, name, index, array): assert name.endswith('ArrayParam') assert name in self.parameters assert array.dtype == self.parameters[name].dtype self.parameters[name] = numpy.array(array) def getSchema(): return None def writeArray(self, regionImpl, name, dtype, castFn): count = self.getParameterArrayCount(name, 0) param = numpy.zeros(count, dtype=dtype) self.getParameterArray(name, 0, param) field = regionImpl.init(name, count) for i in range(count): field[i] = castFn(param[i]) def write(self, proto): regionImpl = proto.regionImpl.as_struct(TestNodeProto) regionImpl.int32Param = self.getParameter("int32Param", 0) regionImpl.uint32Param = self.getParameter("uint32Param", 0); regionImpl.int64Param = self.getParameter("int64Param", 0); regionImpl.uint64Param = self.getParameter("uint64Param", 0); regionImpl.real32Param = self.getParameter("real32Param", 0); regionImpl.real64Param = self.getParameter("real64Param", 0); regionImpl.boolParam = self.getParameter("boolParam", 0); regionImpl.stringParam = self.getParameter("stringParam", 0); regionImpl.delta = self._delta regionImpl.iterations = self._iter self.writeArray(regionImpl, "int64ArrayParam", "Int64", lambda x: int(x)) self.writeArray(regionImpl, "real32ArrayParam", "Float32", lambda x: float(x)) self.writeArray(regionImpl, "boolArrayParam", "Bool", lambda x: bool(x)) def readArray(self, regionImpl, name, dtype): field = getattr(regionImpl, name) count = len(field) param = numpy.zeros(count, dtype=dtype) for i in range(count): param[i] = field[i] self.setParameter(name, 0, param) @classmethod def read(cls, proto): instance = cls() regionImpl = proto.regionImpl.as_struct(TestNodeProto) instance.setParameter("int32Param", 0, regionImpl.int32Param) instance.setParameter("uint32Param", 0, regionImpl.uint32Param) instance.setParameter("int64Param", 0, regionImpl.int64Param) instance.setParameter("uint64Param", 0, regionImpl.uint64Param) instance.setParameter("real32Param", 0, regionImpl.real32Param) instance.setParameter("real64Param", 0, regionImpl.real64Param) instance.setParameter("boolParam", 0, regionImpl.boolParam) instance.setParameter("stringParam", 0, regionImpl.stringParam) instance._delta = regionImpl.delta instance._iter = regionImpl.iterations instance.readArray(regionImpl, "int64ArrayParam", "Int64") instance.readArray(regionImpl, "real32ArrayParam", "Float32") instance.readArray(regionImpl, "boolArrayParam", "Bool") return instance
rhyolight/nupic.core
bindings/py/src/nupic/bindings/regions/TestNode.py
Python
agpl-3.0
10,011
0.005894
import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 7, transform = "Logit", sigma = 0.0, exog_count = 20, ar_order = 12);
antoinecarme/pyaf
tests/artificial/transf_Logit/trend_PolyTrend/cycle_7/ar_12/test_artificial_1024_Logit_PolyTrend_7_12_20.py
Python
bsd-3-clause
262
0.087786
"""WebSocket-specific events.""" import mitmproxy.http import mitmproxy.websocket class Events: # Websocket lifecycle def websocket_handshake(self, flow: mitmproxy.http.HTTPFlow): """ Called when a client wants to establish a WebSocket connection. The WebSocket-specific headers can be manipulated to alter the handshake. The flow object is guaranteed to have a non-None request attribute. """ def websocket_start(self, flow: mitmproxy.websocket.WebSocketFlow): """ A websocket connection has commenced. """ def websocket_message(self, flow: mitmproxy.websocket.WebSocketFlow): """ Called when a WebSocket message is received from the client or server. The most recent message will be flow.messages[-1]. The message is user-modifiable. Currently there are two types of messages, corresponding to the BINARY and TEXT frame types. """ def websocket_error(self, flow: mitmproxy.websocket.WebSocketFlow): """ A websocket connection has had an error. """ def websocket_end(self, flow: mitmproxy.websocket.WebSocketFlow): """ A websocket connection has ended. """
vhaupert/mitmproxy
examples/addons/events-websocket-specific.py
Python
mit
1,300
0
__version__ = "master"
jvandijk/pla
pla/version.py
Python
mit
23
0
""" WSGI config for example_project project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "example_project.settings" os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
yetty/django-embed-video
example_project/example_project/wsgi.py
Python
mit
1,447
0.000691
# -*- coding: utf-8 -*- from collections.abc import MutableMapping import numpy as np import scipy.io as sio from ..structures import PLSResults _result_mapping = ( ('u', 'x_weights'), ('s', 'singvals'), ('v', 'y_weights'), ('usc', 'x_scores'), ('vsc', 'y_scores'), ('lvcorrs', 'y_loadings'), # permres ('perm_result_sprob', 'pvals'), ('perm_result_permsamp', 'permsamples'), # bootres ('boot_result_compare_u', 'x_weights_normed'), ('boot_result_u_se', 'x_weights_stderr'), ('boot_result_bootsamp', 'bootsamples'), # splitres ('perm_splithalf_orig_ucorr', 'ucorr'), ('perm_splithalf_orig_vcorr', 'vcorr'), ('perm_splithalf_ucorr_prob', 'ucorr_pvals'), ('perm_splithalf_vcorr_prob', 'vcorr_pvals'), ('perm_splithalf_ucorr_ul', 'ucorr_uplim'), ('perm_splithalf_vcorr_ul', 'vcorr_lolim'), ('perm_splithalf_ucorr_ll', 'ucorr_uplim'), ('perm_splithalf_vcorr_ll', 'vcorr_lolim'), # inputs ('inputs_X', 'X'), ('stacked_behavdata', 'Y'), ('num_subj_lst', 'groups'), ('num_conditions', 'n_cond'), ('perm_result_num_perm', 'n_perm'), ('boot_result_num_boot', 'n_boot'), ('perm_splithalf_num_split', 'n_split'), ('boot_result_clim', 'ci'), ('other_input_meancentering_type', 'mean_centering'), ('method', 'method') ) _mean_centered_mapping = ( ('boot_result_orig_usc', 'contrast'), ('boot_result_distrib', 'contrast_boot'), ('boot_result_ulusc', 'contrast_ci_up'), ('boot_result_llusc', 'contrast_ci_lo'), ) _behavioral_mapping = ( ('boot_result_orig_corr', 'y_loadings'), ('boot_result_distrib', 'y_loadings_boot'), ('boot_result_ulcorr', 'y_loadings_ci_up'), ('boot_result_llcorr', 'y_loadings_ci_lo'), ) def _coerce_void(value): """ Converts `value` to `value.dtype` Parameters ---------- value : array_like Returns ------- value : dtype `Value` coerced to `dtype` """ if np.squeeze(value).ndim == 0: return value.dtype.type(value.squeeze()) else: return np.squeeze(value) def _flatten(d, parent_key='', sep='_'): """ Flattens nested dictionary `d` into single dictionary with new keyset Parameters ---------- d : dict Dictionary to be flattened parent_key : str, optional Key of parent dictionary of `d`. Default: '' sep : str, optional How to join keys of `d` with `parent_key`, if provided. Default: '_' Returns ------- flat : dict Flattened input dictionary `d` Notes ----- Taken directly from https://stackoverflow.com/a/6027615 """ items = [] for k, v in d.items(): new_key = parent_key + sep + k if parent_key else k if isinstance(v, MutableMapping): items.extend(_flatten(v, new_key, sep=sep).items()) else: items.append((new_key, v)) return dict(items) def _rename_keys(d, mapping): """ Renames keys in dictionary `d` based on tuples in `mapping` Parameters ---------- d : dict Dictionary with keys to be renamed mapping : list of tuples List of (oldkey, newkey) pairs to rename entries in `d` Returns ------- renamed : dict Input dictionary `d` with keys renamed """ new_dict = d.copy() for oldkey, newkey in mapping: try: new_dict[newkey] = new_dict.pop(oldkey) except KeyError: pass return new_dict def import_matlab_result(fname, datamat='datamat_lst'): """ Imports `fname` PLS result from Matlab Parameters ---------- fname : str Filepath to output mat file obtained from Matlab PLS toolbox. Should contain at least a result struct object. datamat : str, optional Variable name of datamat ('X' array) provided to original PLS if it exists `fname`. By default the datamat is not stored in the PLS results structure, but if it is was saved in `fname` it can be loaded and cached in the returned results object. Default: 'datamat_lst' Returns ------- results : :obj:`~.structures.PLSResults` Matlab results in a Python-friendly format """ def get_labels(fields): labels = [k for k, v in sorted(fields.items(), key=lambda x: x[-1][-1])] return labels # load mat file using scipy.io matfile = sio.loadmat(fname) # if 'result' key is missing then consider this a malformed PLS result mat try: result = matfile.get('result')[0, 0] except (IndexError, TypeError): raise ValueError('Cannot get result struct from provided mat file') # convert result structure to a dictionary using dtypes as keys labels = get_labels(result.dtype.fields) result = {labels[n]: value for n, value in enumerate(result)} # convert sub-structures to dictionaries using dtypes as keys struct = ['boot_result', 'perm_result', 'perm_splithalf', 'other_input'] for attr in struct: if result.get(attr) is not None: labels = get_labels(result[attr].dtype.fields) result[attr] = {labels[n]: _coerce_void(value) for n, value in enumerate(result[attr][0, 0])} # get input data from results file, if it exists X = matfile.get(datamat) result['inputs'] = dict(X=np.vstack(X[:, 0])) if X is not None else dict() # squeeze all the values so they're a bit more interpretable for key, val in result.items(): if isinstance(val, np.ndarray): result[key] = _coerce_void(val) # flatten the dictionary and rename the keys according to our mapping result = _rename_keys(_flatten(result), _result_mapping) if result['method'] == 3: result = _rename_keys(result, _behavioral_mapping) if 'y_loadings_ci_up' in result: result['y_loadings_ci'] = np.stack([ result['y_loadings_ci_lo'], result['y_loadings_ci_up'] ], axis=-1) else: result = _rename_keys(result, _mean_centered_mapping) if 'contrast_ci_up' in result: result['contrast_ci'] = np.stack([ result['contrast_ci_lo'], result['contrast_ci_up'] ], axis=-1) # index arrays - 1 to account for Matlab vs Python 1- vs 0-indexing for key in ['bootsamples', 'permsamples']: try: result[key] -= 1 except KeyError: continue if result.get('n_split', None) is None: result['n_split'] = None # pack it into a `PLSResults` class instance for easy attribute access results = PLSResults(**result) return results
rmarkello/pyls
pyls/matlab/io.py
Python
gpl-2.0
6,754
0
# This file was automatically generated by SWIG (http://www.swig.org). # Version 1.3.36 # # Don't modify this file, modify the SWIG interface instead. # This file is compatible with both classic and new-style classes. import _AAPI import new new_instancemethod = new.instancemethod try: _swig_property = property except NameError: pass # Python < 2.2 doesn't have 'property'. def _swig_setattr_nondynamic(self,class_type,name,value,static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'PySwigObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name,None) if method: return method(self,value) if (not static) or hasattr(self,name): self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self,class_type,name,value): return _swig_setattr_nondynamic(self,class_type,name,value,0) def _swig_getattr(self,class_type,name): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name,None) if method: return method(self) raise AttributeError,name def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) import types try: _object = types.ObjectType _newclass = 1 except AttributeError: class _object : pass _newclass = 0 del types class intp(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, intp, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, intp, name) __repr__ = _swig_repr def __init__(self, *args): this = _AAPI.new_intp(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_intp __del__ = lambda self : None; def assign(*args): return _AAPI.intp_assign(*args) def value(*args): return _AAPI.intp_value(*args) def cast(*args): return _AAPI.intp_cast(*args) __swig_getmethods__["frompointer"] = lambda x: _AAPI.intp_frompointer if _newclass:frompointer = staticmethod(_AAPI.intp_frompointer) intp_swigregister = _AAPI.intp_swigregister intp_swigregister(intp) intp_frompointer = _AAPI.intp_frompointer class floatp(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, floatp, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, floatp, name) __repr__ = _swig_repr def __init__(self, *args): this = _AAPI.new_floatp(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_floatp __del__ = lambda self : None; def assign(*args): return _AAPI.floatp_assign(*args) def value(*args): return _AAPI.floatp_value(*args) def cast(*args): return _AAPI.floatp_cast(*args) __swig_getmethods__["frompointer"] = lambda x: _AAPI.floatp_frompointer if _newclass:frompointer = staticmethod(_AAPI.floatp_frompointer) floatp_swigregister = _AAPI.floatp_swigregister floatp_swigregister(floatp) floatp_frompointer = _AAPI.floatp_frompointer class doublep(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, doublep, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, doublep, name) __repr__ = _swig_repr def __init__(self, *args): this = _AAPI.new_doublep(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_doublep __del__ = lambda self : None; def assign(*args): return _AAPI.doublep_assign(*args) def value(*args): return _AAPI.doublep_value(*args) def cast(*args): return _AAPI.doublep_cast(*args) __swig_getmethods__["frompointer"] = lambda x: _AAPI.doublep_frompointer if _newclass:frompointer = staticmethod(_AAPI.doublep_frompointer) doublep_swigregister = _AAPI.doublep_swigregister doublep_swigregister(doublep) doublep_frompointer = _AAPI.doublep_frompointer class boolp(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, boolp, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, boolp, name) __repr__ = _swig_repr def __init__(self, *args): this = _AAPI.new_boolp(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_boolp __del__ = lambda self : None; def assign(*args): return _AAPI.boolp_assign(*args) def value(*args): return _AAPI.boolp_value(*args) def cast(*args): return _AAPI.boolp_cast(*args) __swig_getmethods__["frompointer"] = lambda x: _AAPI.boolp_frompointer if _newclass:frompointer = staticmethod(_AAPI.boolp_frompointer) boolp_swigregister = _AAPI.boolp_swigregister boolp_swigregister(boolp) boolp_frompointer = _AAPI.boolp_frompointer AKIActionAddSpeedAction = _AAPI.AKIActionAddSpeedAction AKIActionAddDetailedSpeedAction = _AAPI.AKIActionAddDetailedSpeedAction AKIActionCloseLaneAction = _AAPI.AKIActionCloseLaneAction AKIActionCloseLaneActionBySegment = _AAPI.AKIActionCloseLaneActionBySegment AKIActionCloseLaneDetailedAction = _AAPI.AKIActionCloseLaneDetailedAction AKIActionAddNextTurningODAction = _AAPI.AKIActionAddNextTurningODAction AKIActionAddNextTurningResultAction = _AAPI.AKIActionAddNextTurningResultAction AKIActionAddChangeDestAction = _AAPI.AKIActionAddChangeDestAction AKIActionChangeTurningProbAction = _AAPI.AKIActionChangeTurningProbAction AKIActionDisableReservedLaneAction = _AAPI.AKIActionDisableReservedLaneAction AKIActionCongestionPricingODAction = _AAPI.AKIActionCongestionPricingODAction AKIActionRemoveAction = _AAPI.AKIActionRemoveAction AKIActionReset = _AAPI.AKIActionReset AKIActionAddNextSubPathODAction = _AAPI.AKIActionAddNextSubPathODAction AKIActionAddNextSubPathResultAction = _AAPI.AKIActionAddNextSubPathResultAction AKIActionAddNextSubPathPTAction = _AAPI.AKIActionAddNextSubPathPTAction AKIActionModifyNextTurningODAction = _AAPI.AKIActionModifyNextTurningODAction AKIActionModifyNextTurningResultAction = _AAPI.AKIActionModifyNextTurningResultAction AKIActionModifyChangeDestAction = _AAPI.AKIActionModifyChangeDestAction AKIActionModifyNextSubPathResultAction = _AAPI.AKIActionModifyNextSubPathResultAction AKIActionModifyNextSubPathODAction = _AAPI.AKIActionModifyNextSubPathODAction AKIActionModifyCloseTurningODAction = _AAPI.AKIActionModifyCloseTurningODAction class InfVeh(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, InfVeh, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, InfVeh, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.InfVeh_report_set __swig_getmethods__["report"] = _AAPI.InfVeh_report_get if _newclass:report = _swig_property(_AAPI.InfVeh_report_get, _AAPI.InfVeh_report_set) __swig_setmethods__["idVeh"] = _AAPI.InfVeh_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.InfVeh_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.InfVeh_idVeh_get, _AAPI.InfVeh_idVeh_set) __swig_setmethods__["type"] = _AAPI.InfVeh_type_set __swig_getmethods__["type"] = _AAPI.InfVeh_type_get if _newclass:type = _swig_property(_AAPI.InfVeh_type_get, _AAPI.InfVeh_type_set) __swig_setmethods__["idSection"] = _AAPI.InfVeh_idSection_set __swig_getmethods__["idSection"] = _AAPI.InfVeh_idSection_get if _newclass:idSection = _swig_property(_AAPI.InfVeh_idSection_get, _AAPI.InfVeh_idSection_set) __swig_setmethods__["segment"] = _AAPI.InfVeh_segment_set __swig_getmethods__["segment"] = _AAPI.InfVeh_segment_get if _newclass:segment = _swig_property(_AAPI.InfVeh_segment_get, _AAPI.InfVeh_segment_set) __swig_setmethods__["numberLane"] = _AAPI.InfVeh_numberLane_set __swig_getmethods__["numberLane"] = _AAPI.InfVeh_numberLane_get if _newclass:numberLane = _swig_property(_AAPI.InfVeh_numberLane_get, _AAPI.InfVeh_numberLane_set) __swig_setmethods__["idJunction"] = _AAPI.InfVeh_idJunction_set __swig_getmethods__["idJunction"] = _AAPI.InfVeh_idJunction_get if _newclass:idJunction = _swig_property(_AAPI.InfVeh_idJunction_get, _AAPI.InfVeh_idJunction_set) __swig_setmethods__["idSectionFrom"] = _AAPI.InfVeh_idSectionFrom_set __swig_getmethods__["idSectionFrom"] = _AAPI.InfVeh_idSectionFrom_get if _newclass:idSectionFrom = _swig_property(_AAPI.InfVeh_idSectionFrom_get, _AAPI.InfVeh_idSectionFrom_set) __swig_setmethods__["idLaneFrom"] = _AAPI.InfVeh_idLaneFrom_set __swig_getmethods__["idLaneFrom"] = _AAPI.InfVeh_idLaneFrom_get if _newclass:idLaneFrom = _swig_property(_AAPI.InfVeh_idLaneFrom_get, _AAPI.InfVeh_idLaneFrom_set) __swig_setmethods__["idSectionTo"] = _AAPI.InfVeh_idSectionTo_set __swig_getmethods__["idSectionTo"] = _AAPI.InfVeh_idSectionTo_get if _newclass:idSectionTo = _swig_property(_AAPI.InfVeh_idSectionTo_get, _AAPI.InfVeh_idSectionTo_set) __swig_setmethods__["idLaneTo"] = _AAPI.InfVeh_idLaneTo_set __swig_getmethods__["idLaneTo"] = _AAPI.InfVeh_idLaneTo_get if _newclass:idLaneTo = _swig_property(_AAPI.InfVeh_idLaneTo_get, _AAPI.InfVeh_idLaneTo_set) __swig_setmethods__["CurrentPos"] = _AAPI.InfVeh_CurrentPos_set __swig_getmethods__["CurrentPos"] = _AAPI.InfVeh_CurrentPos_get if _newclass:CurrentPos = _swig_property(_AAPI.InfVeh_CurrentPos_get, _AAPI.InfVeh_CurrentPos_set) __swig_setmethods__["distance2End"] = _AAPI.InfVeh_distance2End_set __swig_getmethods__["distance2End"] = _AAPI.InfVeh_distance2End_get if _newclass:distance2End = _swig_property(_AAPI.InfVeh_distance2End_get, _AAPI.InfVeh_distance2End_set) __swig_setmethods__["xCurrentPos"] = _AAPI.InfVeh_xCurrentPos_set __swig_getmethods__["xCurrentPos"] = _AAPI.InfVeh_xCurrentPos_get if _newclass:xCurrentPos = _swig_property(_AAPI.InfVeh_xCurrentPos_get, _AAPI.InfVeh_xCurrentPos_set) __swig_setmethods__["yCurrentPos"] = _AAPI.InfVeh_yCurrentPos_set __swig_getmethods__["yCurrentPos"] = _AAPI.InfVeh_yCurrentPos_get if _newclass:yCurrentPos = _swig_property(_AAPI.InfVeh_yCurrentPos_get, _AAPI.InfVeh_yCurrentPos_set) __swig_setmethods__["zCurrentPos"] = _AAPI.InfVeh_zCurrentPos_set __swig_getmethods__["zCurrentPos"] = _AAPI.InfVeh_zCurrentPos_get if _newclass:zCurrentPos = _swig_property(_AAPI.InfVeh_zCurrentPos_get, _AAPI.InfVeh_zCurrentPos_set) __swig_setmethods__["xCurrentPosBack"] = _AAPI.InfVeh_xCurrentPosBack_set __swig_getmethods__["xCurrentPosBack"] = _AAPI.InfVeh_xCurrentPosBack_get if _newclass:xCurrentPosBack = _swig_property(_AAPI.InfVeh_xCurrentPosBack_get, _AAPI.InfVeh_xCurrentPosBack_set) __swig_setmethods__["yCurrentPosBack"] = _AAPI.InfVeh_yCurrentPosBack_set __swig_getmethods__["yCurrentPosBack"] = _AAPI.InfVeh_yCurrentPosBack_get if _newclass:yCurrentPosBack = _swig_property(_AAPI.InfVeh_yCurrentPosBack_get, _AAPI.InfVeh_yCurrentPosBack_set) __swig_setmethods__["zCurrentPosBack"] = _AAPI.InfVeh_zCurrentPosBack_set __swig_getmethods__["zCurrentPosBack"] = _AAPI.InfVeh_zCurrentPosBack_get if _newclass:zCurrentPosBack = _swig_property(_AAPI.InfVeh_zCurrentPosBack_get, _AAPI.InfVeh_zCurrentPosBack_set) __swig_setmethods__["CurrentSpeed"] = _AAPI.InfVeh_CurrentSpeed_set __swig_getmethods__["CurrentSpeed"] = _AAPI.InfVeh_CurrentSpeed_get if _newclass:CurrentSpeed = _swig_property(_AAPI.InfVeh_CurrentSpeed_get, _AAPI.InfVeh_CurrentSpeed_set) __swig_setmethods__["PreviousSpeed"] = _AAPI.InfVeh_PreviousSpeed_set __swig_getmethods__["PreviousSpeed"] = _AAPI.InfVeh_PreviousSpeed_get if _newclass:PreviousSpeed = _swig_property(_AAPI.InfVeh_PreviousSpeed_get, _AAPI.InfVeh_PreviousSpeed_set) __swig_setmethods__["TotalDistance"] = _AAPI.InfVeh_TotalDistance_set __swig_getmethods__["TotalDistance"] = _AAPI.InfVeh_TotalDistance_get if _newclass:TotalDistance = _swig_property(_AAPI.InfVeh_TotalDistance_get, _AAPI.InfVeh_TotalDistance_set) __swig_setmethods__["SystemEntranceT"] = _AAPI.InfVeh_SystemEntranceT_set __swig_getmethods__["SystemEntranceT"] = _AAPI.InfVeh_SystemEntranceT_get if _newclass:SystemEntranceT = _swig_property(_AAPI.InfVeh_SystemEntranceT_get, _AAPI.InfVeh_SystemEntranceT_set) __swig_setmethods__["SectionEntranceT"] = _AAPI.InfVeh_SectionEntranceT_set __swig_getmethods__["SectionEntranceT"] = _AAPI.InfVeh_SectionEntranceT_get if _newclass:SectionEntranceT = _swig_property(_AAPI.InfVeh_SectionEntranceT_get, _AAPI.InfVeh_SectionEntranceT_set) __swig_setmethods__["CurrentStopTime"] = _AAPI.InfVeh_CurrentStopTime_set __swig_getmethods__["CurrentStopTime"] = _AAPI.InfVeh_CurrentStopTime_get if _newclass:CurrentStopTime = _swig_property(_AAPI.InfVeh_CurrentStopTime_get, _AAPI.InfVeh_CurrentStopTime_set) def __init__(self, *args): this = _AAPI.new_InfVeh(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_InfVeh __del__ = lambda self : None; InfVeh_swigregister = _AAPI.InfVeh_swigregister InfVeh_swigregister(InfVeh) AKIActionAddCloseTurningODAction = _AAPI.AKIActionAddCloseTurningODAction class StaticInfVeh(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, StaticInfVeh, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, StaticInfVeh, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.StaticInfVeh_report_set __swig_getmethods__["report"] = _AAPI.StaticInfVeh_report_get if _newclass:report = _swig_property(_AAPI.StaticInfVeh_report_get, _AAPI.StaticInfVeh_report_set) __swig_setmethods__["idVeh"] = _AAPI.StaticInfVeh_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.StaticInfVeh_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.StaticInfVeh_idVeh_get, _AAPI.StaticInfVeh_idVeh_set) __swig_setmethods__["type"] = _AAPI.StaticInfVeh_type_set __swig_getmethods__["type"] = _AAPI.StaticInfVeh_type_get if _newclass:type = _swig_property(_AAPI.StaticInfVeh_type_get, _AAPI.StaticInfVeh_type_set) __swig_setmethods__["length"] = _AAPI.StaticInfVeh_length_set __swig_getmethods__["length"] = _AAPI.StaticInfVeh_length_get if _newclass:length = _swig_property(_AAPI.StaticInfVeh_length_get, _AAPI.StaticInfVeh_length_set) __swig_setmethods__["width"] = _AAPI.StaticInfVeh_width_set __swig_getmethods__["width"] = _AAPI.StaticInfVeh_width_get if _newclass:width = _swig_property(_AAPI.StaticInfVeh_width_get, _AAPI.StaticInfVeh_width_set) __swig_setmethods__["maxDesiredSpeed"] = _AAPI.StaticInfVeh_maxDesiredSpeed_set __swig_getmethods__["maxDesiredSpeed"] = _AAPI.StaticInfVeh_maxDesiredSpeed_get if _newclass:maxDesiredSpeed = _swig_property(_AAPI.StaticInfVeh_maxDesiredSpeed_get, _AAPI.StaticInfVeh_maxDesiredSpeed_set) __swig_setmethods__["maxAcceleration"] = _AAPI.StaticInfVeh_maxAcceleration_set __swig_getmethods__["maxAcceleration"] = _AAPI.StaticInfVeh_maxAcceleration_get if _newclass:maxAcceleration = _swig_property(_AAPI.StaticInfVeh_maxAcceleration_get, _AAPI.StaticInfVeh_maxAcceleration_set) __swig_setmethods__["normalDeceleration"] = _AAPI.StaticInfVeh_normalDeceleration_set __swig_getmethods__["normalDeceleration"] = _AAPI.StaticInfVeh_normalDeceleration_get if _newclass:normalDeceleration = _swig_property(_AAPI.StaticInfVeh_normalDeceleration_get, _AAPI.StaticInfVeh_normalDeceleration_set) __swig_setmethods__["maxDeceleration"] = _AAPI.StaticInfVeh_maxDeceleration_set __swig_getmethods__["maxDeceleration"] = _AAPI.StaticInfVeh_maxDeceleration_get if _newclass:maxDeceleration = _swig_property(_AAPI.StaticInfVeh_maxDeceleration_get, _AAPI.StaticInfVeh_maxDeceleration_set) __swig_setmethods__["speedAcceptance"] = _AAPI.StaticInfVeh_speedAcceptance_set __swig_getmethods__["speedAcceptance"] = _AAPI.StaticInfVeh_speedAcceptance_get if _newclass:speedAcceptance = _swig_property(_AAPI.StaticInfVeh_speedAcceptance_get, _AAPI.StaticInfVeh_speedAcceptance_set) __swig_setmethods__["minDistanceVeh"] = _AAPI.StaticInfVeh_minDistanceVeh_set __swig_getmethods__["minDistanceVeh"] = _AAPI.StaticInfVeh_minDistanceVeh_get if _newclass:minDistanceVeh = _swig_property(_AAPI.StaticInfVeh_minDistanceVeh_get, _AAPI.StaticInfVeh_minDistanceVeh_set) __swig_setmethods__["minSafetyDistance"] = _AAPI.StaticInfVeh_minSafetyDistance_set __swig_getmethods__["minSafetyDistance"] = _AAPI.StaticInfVeh_minSafetyDistance_get if _newclass:minSafetyDistance = _swig_property(_AAPI.StaticInfVeh_minSafetyDistance_get, _AAPI.StaticInfVeh_minSafetyDistance_set) __swig_setmethods__["giveWayTime"] = _AAPI.StaticInfVeh_giveWayTime_set __swig_getmethods__["giveWayTime"] = _AAPI.StaticInfVeh_giveWayTime_get if _newclass:giveWayTime = _swig_property(_AAPI.StaticInfVeh_giveWayTime_get, _AAPI.StaticInfVeh_giveWayTime_set) __swig_setmethods__["guidanceAcceptance"] = _AAPI.StaticInfVeh_guidanceAcceptance_set __swig_getmethods__["guidanceAcceptance"] = _AAPI.StaticInfVeh_guidanceAcceptance_get if _newclass:guidanceAcceptance = _swig_property(_AAPI.StaticInfVeh_guidanceAcceptance_get, _AAPI.StaticInfVeh_guidanceAcceptance_set) __swig_setmethods__["enrouted"] = _AAPI.StaticInfVeh_enrouted_set __swig_getmethods__["enrouted"] = _AAPI.StaticInfVeh_enrouted_get if _newclass:enrouted = _swig_property(_AAPI.StaticInfVeh_enrouted_get, _AAPI.StaticInfVeh_enrouted_set) __swig_setmethods__["equipped"] = _AAPI.StaticInfVeh_equipped_set __swig_getmethods__["equipped"] = _AAPI.StaticInfVeh_equipped_get if _newclass:equipped = _swig_property(_AAPI.StaticInfVeh_equipped_get, _AAPI.StaticInfVeh_equipped_set) __swig_setmethods__["tracked"] = _AAPI.StaticInfVeh_tracked_set __swig_getmethods__["tracked"] = _AAPI.StaticInfVeh_tracked_get if _newclass:tracked = _swig_property(_AAPI.StaticInfVeh_tracked_get, _AAPI.StaticInfVeh_tracked_set) __swig_setmethods__["keepfastLane"] = _AAPI.StaticInfVeh_keepfastLane_set __swig_getmethods__["keepfastLane"] = _AAPI.StaticInfVeh_keepfastLane_get if _newclass:keepfastLane = _swig_property(_AAPI.StaticInfVeh_keepfastLane_get, _AAPI.StaticInfVeh_keepfastLane_set) __swig_setmethods__["headwayMin"] = _AAPI.StaticInfVeh_headwayMin_set __swig_getmethods__["headwayMin"] = _AAPI.StaticInfVeh_headwayMin_get if _newclass:headwayMin = _swig_property(_AAPI.StaticInfVeh_headwayMin_get, _AAPI.StaticInfVeh_headwayMin_set) __swig_setmethods__["sensitivityFactor"] = _AAPI.StaticInfVeh_sensitivityFactor_set __swig_getmethods__["sensitivityFactor"] = _AAPI.StaticInfVeh_sensitivityFactor_get if _newclass:sensitivityFactor = _swig_property(_AAPI.StaticInfVeh_sensitivityFactor_get, _AAPI.StaticInfVeh_sensitivityFactor_set) __swig_setmethods__["reactionTime"] = _AAPI.StaticInfVeh_reactionTime_set __swig_getmethods__["reactionTime"] = _AAPI.StaticInfVeh_reactionTime_get if _newclass:reactionTime = _swig_property(_AAPI.StaticInfVeh_reactionTime_get, _AAPI.StaticInfVeh_reactionTime_set) __swig_setmethods__["reactionTimeAtStop"] = _AAPI.StaticInfVeh_reactionTimeAtStop_set __swig_getmethods__["reactionTimeAtStop"] = _AAPI.StaticInfVeh_reactionTimeAtStop_get if _newclass:reactionTimeAtStop = _swig_property(_AAPI.StaticInfVeh_reactionTimeAtStop_get, _AAPI.StaticInfVeh_reactionTimeAtStop_set) __swig_setmethods__["reactionTimeAtTrafficLight"] = _AAPI.StaticInfVeh_reactionTimeAtTrafficLight_set __swig_getmethods__["reactionTimeAtTrafficLight"] = _AAPI.StaticInfVeh_reactionTimeAtTrafficLight_get if _newclass:reactionTimeAtTrafficLight = _swig_property(_AAPI.StaticInfVeh_reactionTimeAtTrafficLight_get, _AAPI.StaticInfVeh_reactionTimeAtTrafficLight_set) __swig_setmethods__["centroidOrigin"] = _AAPI.StaticInfVeh_centroidOrigin_set __swig_getmethods__["centroidOrigin"] = _AAPI.StaticInfVeh_centroidOrigin_get if _newclass:centroidOrigin = _swig_property(_AAPI.StaticInfVeh_centroidOrigin_get, _AAPI.StaticInfVeh_centroidOrigin_set) __swig_setmethods__["centroidDest"] = _AAPI.StaticInfVeh_centroidDest_set __swig_getmethods__["centroidDest"] = _AAPI.StaticInfVeh_centroidDest_get if _newclass:centroidDest = _swig_property(_AAPI.StaticInfVeh_centroidDest_get, _AAPI.StaticInfVeh_centroidDest_set) __swig_setmethods__["idsectionExit"] = _AAPI.StaticInfVeh_idsectionExit_set __swig_getmethods__["idsectionExit"] = _AAPI.StaticInfVeh_idsectionExit_get if _newclass:idsectionExit = _swig_property(_AAPI.StaticInfVeh_idsectionExit_get, _AAPI.StaticInfVeh_idsectionExit_set) __swig_setmethods__["idLine"] = _AAPI.StaticInfVeh_idLine_set __swig_getmethods__["idLine"] = _AAPI.StaticInfVeh_idLine_get if _newclass:idLine = _swig_property(_AAPI.StaticInfVeh_idLine_get, _AAPI.StaticInfVeh_idLine_set) __swig_setmethods__["internalInfo"] = _AAPI.StaticInfVeh_internalInfo_set __swig_getmethods__["internalInfo"] = _AAPI.StaticInfVeh_internalInfo_get if _newclass:internalInfo = _swig_property(_AAPI.StaticInfVeh_internalInfo_get, _AAPI.StaticInfVeh_internalInfo_set) def __init__(self, *args): this = _AAPI.new_StaticInfVeh(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_StaticInfVeh __del__ = lambda self : None; StaticInfVeh_swigregister = _AAPI.StaticInfVeh_swigregister StaticInfVeh_swigregister(StaticInfVeh) class VehPos(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, VehPos, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, VehPos, name) __repr__ = _swig_repr __swig_setmethods__["xPos"] = _AAPI.VehPos_xPos_set __swig_getmethods__["xPos"] = _AAPI.VehPos_xPos_get if _newclass:xPos = _swig_property(_AAPI.VehPos_xPos_get, _AAPI.VehPos_xPos_set) __swig_setmethods__["yPos"] = _AAPI.VehPos_yPos_set __swig_getmethods__["yPos"] = _AAPI.VehPos_yPos_get if _newclass:yPos = _swig_property(_AAPI.VehPos_yPos_get, _AAPI.VehPos_yPos_set) __swig_setmethods__["zPos"] = _AAPI.VehPos_zPos_set __swig_getmethods__["zPos"] = _AAPI.VehPos_zPos_get if _newclass:zPos = _swig_property(_AAPI.VehPos_zPos_get, _AAPI.VehPos_zPos_set) __swig_setmethods__["xPosBack"] = _AAPI.VehPos_xPosBack_set __swig_getmethods__["xPosBack"] = _AAPI.VehPos_xPosBack_get if _newclass:xPosBack = _swig_property(_AAPI.VehPos_xPosBack_get, _AAPI.VehPos_xPosBack_set) __swig_setmethods__["yPosBack"] = _AAPI.VehPos_yPosBack_set __swig_getmethods__["yPosBack"] = _AAPI.VehPos_yPosBack_get if _newclass:yPosBack = _swig_property(_AAPI.VehPos_yPosBack_get, _AAPI.VehPos_yPosBack_set) __swig_setmethods__["zPosBack"] = _AAPI.VehPos_zPosBack_set __swig_getmethods__["zPosBack"] = _AAPI.VehPos_zPosBack_get if _newclass:zPosBack = _swig_property(_AAPI.VehPos_zPosBack_get, _AAPI.VehPos_zPosBack_set) def __init__(self, *args): this = _AAPI.new_VehPos(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_VehPos __del__ = lambda self : None; VehPos_swigregister = _AAPI.VehPos_swigregister VehPos_swigregister(VehPos) class InfVehPos(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, InfVehPos, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, InfVehPos, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.InfVehPos_report_set __swig_getmethods__["report"] = _AAPI.InfVehPos_report_get if _newclass:report = _swig_property(_AAPI.InfVehPos_report_get, _AAPI.InfVehPos_report_set) __swig_setmethods__["idVeh"] = _AAPI.InfVehPos_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.InfVehPos_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.InfVehPos_idVeh_get, _AAPI.InfVehPos_idVeh_set) __swig_setmethods__["Npos"] = _AAPI.InfVehPos_Npos_set __swig_getmethods__["Npos"] = _AAPI.InfVehPos_Npos_get if _newclass:Npos = _swig_property(_AAPI.InfVehPos_Npos_get, _AAPI.InfVehPos_Npos_set) __swig_setmethods__["vehiclePos"] = _AAPI.InfVehPos_vehiclePos_set __swig_getmethods__["vehiclePos"] = _AAPI.InfVehPos_vehiclePos_get if _newclass:vehiclePos = _swig_property(_AAPI.InfVehPos_vehiclePos_get, _AAPI.InfVehPos_vehiclePos_set) def __init__(self, *args): this = _AAPI.new_InfVehPos(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_InfVehPos __del__ = lambda self : None; InfVehPos_swigregister = _AAPI.InfVehPos_swigregister InfVehPos_swigregister(InfVehPos) class DynInfVeh(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, DynInfVeh, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, DynInfVeh, name) __repr__ = _swig_repr __swig_setmethods__["xCurrentPos"] = _AAPI.DynInfVeh_xCurrentPos_set __swig_getmethods__["xCurrentPos"] = _AAPI.DynInfVeh_xCurrentPos_get if _newclass:xCurrentPos = _swig_property(_AAPI.DynInfVeh_xCurrentPos_get, _AAPI.DynInfVeh_xCurrentPos_set) __swig_setmethods__["yCurrentPos"] = _AAPI.DynInfVeh_yCurrentPos_set __swig_getmethods__["yCurrentPos"] = _AAPI.DynInfVeh_yCurrentPos_get if _newclass:yCurrentPos = _swig_property(_AAPI.DynInfVeh_yCurrentPos_get, _AAPI.DynInfVeh_yCurrentPos_set) __swig_setmethods__["xCurrentPosBack"] = _AAPI.DynInfVeh_xCurrentPosBack_set __swig_getmethods__["xCurrentPosBack"] = _AAPI.DynInfVeh_xCurrentPosBack_get if _newclass:xCurrentPosBack = _swig_property(_AAPI.DynInfVeh_xCurrentPosBack_get, _AAPI.DynInfVeh_xCurrentPosBack_set) __swig_setmethods__["yCurrentPosBack"] = _AAPI.DynInfVeh_yCurrentPosBack_set __swig_getmethods__["yCurrentPosBack"] = _AAPI.DynInfVeh_yCurrentPosBack_get if _newclass:yCurrentPosBack = _swig_property(_AAPI.DynInfVeh_yCurrentPosBack_get, _AAPI.DynInfVeh_yCurrentPosBack_set) __swig_setmethods__["currentSpeed"] = _AAPI.DynInfVeh_currentSpeed_set __swig_getmethods__["currentSpeed"] = _AAPI.DynInfVeh_currentSpeed_get if _newclass:currentSpeed = _swig_property(_AAPI.DynInfVeh_currentSpeed_get, _AAPI.DynInfVeh_currentSpeed_set) __swig_setmethods__["turning"] = _AAPI.DynInfVeh_turning_set __swig_getmethods__["turning"] = _AAPI.DynInfVeh_turning_get if _newclass:turning = _swig_property(_AAPI.DynInfVeh_turning_get, _AAPI.DynInfVeh_turning_set) def __init__(self, *args): this = _AAPI.new_DynInfVeh(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_DynInfVeh __del__ = lambda self : None; DynInfVeh_swigregister = _AAPI.DynInfVeh_swigregister DynInfVeh_swigregister(DynInfVeh) class GraphicInfVeh(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, GraphicInfVeh, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, GraphicInfVeh, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.GraphicInfVeh_report_set __swig_getmethods__["report"] = _AAPI.GraphicInfVeh_report_get if _newclass:report = _swig_property(_AAPI.GraphicInfVeh_report_get, _AAPI.GraphicInfVeh_report_set) __swig_setmethods__["idVeh"] = _AAPI.GraphicInfVeh_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.GraphicInfVeh_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.GraphicInfVeh_idVeh_get, _AAPI.GraphicInfVeh_idVeh_set) __swig_setmethods__["leftTurnSignal"] = _AAPI.GraphicInfVeh_leftTurnSignal_set __swig_getmethods__["leftTurnSignal"] = _AAPI.GraphicInfVeh_leftTurnSignal_get if _newclass:leftTurnSignal = _swig_property(_AAPI.GraphicInfVeh_leftTurnSignal_get, _AAPI.GraphicInfVeh_leftTurnSignal_set) __swig_setmethods__["rightTurnSignal"] = _AAPI.GraphicInfVeh_rightTurnSignal_set __swig_getmethods__["rightTurnSignal"] = _AAPI.GraphicInfVeh_rightTurnSignal_get if _newclass:rightTurnSignal = _swig_property(_AAPI.GraphicInfVeh_rightTurnSignal_get, _AAPI.GraphicInfVeh_rightTurnSignal_set) __swig_setmethods__["brakeLight"] = _AAPI.GraphicInfVeh_brakeLight_set __swig_getmethods__["brakeLight"] = _AAPI.GraphicInfVeh_brakeLight_get if _newclass:brakeLight = _swig_property(_AAPI.GraphicInfVeh_brakeLight_get, _AAPI.GraphicInfVeh_brakeLight_set) __swig_setmethods__["drivingBackwards"] = _AAPI.GraphicInfVeh_drivingBackwards_set __swig_getmethods__["drivingBackwards"] = _AAPI.GraphicInfVeh_drivingBackwards_get if _newclass:drivingBackwards = _swig_property(_AAPI.GraphicInfVeh_drivingBackwards_get, _AAPI.GraphicInfVeh_drivingBackwards_set) def __init__(self, *args): this = _AAPI.new_GraphicInfVeh(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_GraphicInfVeh __del__ = lambda self : None; GraphicInfVeh_swigregister = _AAPI.GraphicInfVeh_swigregister GraphicInfVeh_swigregister(GraphicInfVeh) AKIRemoveVehicle = _AAPI.AKIRemoveVehicle AKIVehDisableGraphicalInf = _AAPI.AKIVehDisableGraphicalInf AKIVehStateGetNbVehiclesSection = _AAPI.AKIVehStateGetNbVehiclesSection AKIVehStateGetVehicleInfSection = _AAPI.AKIVehStateGetVehicleInfSection AKIVehGetVehicleStaticInfSection = _AAPI.AKIVehGetVehicleStaticInfSection AKIVehSetVehicleStaticInfSection = _AAPI.AKIVehSetVehicleStaticInfSection AKIVehGetVehicleGetPosSection = _AAPI.AKIVehGetVehicleGetPosSection AKIVehGetVehicleGetPosSectionWithStruct = _AAPI.AKIVehGetVehicleGetPosSectionWithStruct AKIVehGetVehicleGraphicInfSection = _AAPI.AKIVehGetVehicleGraphicInfSection AKIVehSetDrivingBackwards = _AAPI.AKIVehSetDrivingBackwards AKIVehStateGetNbVehiclesJunction = _AAPI.AKIVehStateGetNbVehiclesJunction AKIVehStateGetVehicleInfJunction = _AAPI.AKIVehStateGetVehicleInfJunction AKIVehGetVehicleStaticInfJunction = _AAPI.AKIVehGetVehicleStaticInfJunction AKIVehSetVehicleStaticInfJunction = _AAPI.AKIVehSetVehicleStaticInfJunction AKIVehGetVehicleGetPosJunction = _AAPI.AKIVehGetVehicleGetPosJunction AKIVehGetVehicleGetPosJunctionWithStruct = _AAPI.AKIVehGetVehicleGetPosJunctionWithStruct AKIVehGetVehicleGraphicInfJunction = _AAPI.AKIVehGetVehicleGraphicInfJunction AKIVehGetInf = _AAPI.AKIVehGetInf AKIVehGetStaticInf = _AAPI.AKIVehGetStaticInf AKIVehSetStaticInf = _AAPI.AKIVehSetStaticInf AKIVehGetNbVehTypes = _AAPI.AKIVehGetNbVehTypes AKIVehTypeGetIdVehTypeANG = _AAPI.AKIVehTypeGetIdVehTypeANG AKIVehGetMinLengthVehType = _AAPI.AKIVehGetMinLengthVehType AKIVehGetMaxLengthVehType = _AAPI.AKIVehGetMaxLengthVehType AKIVehGetVehTypeName = _AAPI.AKIVehGetVehTypeName AKIVehGetVehTypeInternalPosition = _AAPI.AKIVehGetVehTypeInternalPosition AKIVehStateGetNbSectionsVehiclePathJunction = _AAPI.AKIVehStateGetNbSectionsVehiclePathJunction AKIVehStateGetIdSectionVehiclePathJunction = _AAPI.AKIVehStateGetIdSectionVehiclePathJunction AKIVehStateGetNbSectionsVehiclePathSection = _AAPI.AKIVehStateGetNbSectionsVehiclePathSection AKIVehStateGetIdSectionVehiclePathSection = _AAPI.AKIVehStateGetIdSectionVehiclePathSection AKIVehTrackedGetNbSectionsVehiclePath = _AAPI.AKIVehTrackedGetNbSectionsVehiclePath AKIVehTrackedGetIdSectionVehiclePath = _AAPI.AKIVehTrackedGetIdSectionVehiclePath AKIRemoveVehicleJunction = _AAPI.AKIRemoveVehicleJunction AKIVehTrackedRemove = _AAPI.AKIVehTrackedRemove class structA2KDetector(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, structA2KDetector, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, structA2KDetector, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.structA2KDetector_report_set __swig_getmethods__["report"] = _AAPI.structA2KDetector_report_get if _newclass:report = _swig_property(_AAPI.structA2KDetector_report_get, _AAPI.structA2KDetector_report_set) __swig_setmethods__["Id"] = _AAPI.structA2KDetector_Id_set __swig_getmethods__["Id"] = _AAPI.structA2KDetector_Id_get if _newclass:Id = _swig_property(_AAPI.structA2KDetector_Id_get, _AAPI.structA2KDetector_Id_set) __swig_setmethods__["IdSection"] = _AAPI.structA2KDetector_IdSection_set __swig_getmethods__["IdSection"] = _AAPI.structA2KDetector_IdSection_get if _newclass:IdSection = _swig_property(_AAPI.structA2KDetector_IdSection_get, _AAPI.structA2KDetector_IdSection_set) __swig_setmethods__["IdFirstLane"] = _AAPI.structA2KDetector_IdFirstLane_set __swig_getmethods__["IdFirstLane"] = _AAPI.structA2KDetector_IdFirstLane_get if _newclass:IdFirstLane = _swig_property(_AAPI.structA2KDetector_IdFirstLane_get, _AAPI.structA2KDetector_IdFirstLane_set) __swig_setmethods__["IdLastLane"] = _AAPI.structA2KDetector_IdLastLane_set __swig_getmethods__["IdLastLane"] = _AAPI.structA2KDetector_IdLastLane_get if _newclass:IdLastLane = _swig_property(_AAPI.structA2KDetector_IdLastLane_get, _AAPI.structA2KDetector_IdLastLane_set) __swig_setmethods__["DistinguishType"] = _AAPI.structA2KDetector_DistinguishType_set __swig_getmethods__["DistinguishType"] = _AAPI.structA2KDetector_DistinguishType_get if _newclass:DistinguishType = _swig_property(_AAPI.structA2KDetector_DistinguishType_get, _AAPI.structA2KDetector_DistinguishType_set) __swig_setmethods__["Capabilities"] = _AAPI.structA2KDetector_Capabilities_set __swig_getmethods__["Capabilities"] = _AAPI.structA2KDetector_Capabilities_get if _newclass:Capabilities = _swig_property(_AAPI.structA2KDetector_Capabilities_get, _AAPI.structA2KDetector_Capabilities_set) __swig_setmethods__["InitialPosition"] = _AAPI.structA2KDetector_InitialPosition_set __swig_getmethods__["InitialPosition"] = _AAPI.structA2KDetector_InitialPosition_get if _newclass:InitialPosition = _swig_property(_AAPI.structA2KDetector_InitialPosition_get, _AAPI.structA2KDetector_InitialPosition_set) __swig_setmethods__["FinalPosition"] = _AAPI.structA2KDetector_FinalPosition_set __swig_getmethods__["FinalPosition"] = _AAPI.structA2KDetector_FinalPosition_get if _newclass:FinalPosition = _swig_property(_AAPI.structA2KDetector_FinalPosition_get, _AAPI.structA2KDetector_FinalPosition_set) def __init__(self, *args): this = _AAPI.new_structA2KDetector(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_structA2KDetector __del__ = lambda self : None; structA2KDetector_swigregister = _AAPI.structA2KDetector_swigregister structA2KDetector_swigregister(structA2KDetector) AKIVehEnableGraphicalInf = _AAPI.AKIVehEnableGraphicalInf class EquippedInfVeh(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, EquippedInfVeh, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, EquippedInfVeh, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.EquippedInfVeh_report_set __swig_getmethods__["report"] = _AAPI.EquippedInfVeh_report_get if _newclass:report = _swig_property(_AAPI.EquippedInfVeh_report_get, _AAPI.EquippedInfVeh_report_set) __swig_setmethods__["timedetected"] = _AAPI.EquippedInfVeh_timedetected_set __swig_getmethods__["timedetected"] = _AAPI.EquippedInfVeh_timedetected_get if _newclass:timedetected = _swig_property(_AAPI.EquippedInfVeh_timedetected_get, _AAPI.EquippedInfVeh_timedetected_set) __swig_setmethods__["idVeh"] = _AAPI.EquippedInfVeh_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.EquippedInfVeh_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.EquippedInfVeh_idVeh_get, _AAPI.EquippedInfVeh_idVeh_set) __swig_setmethods__["vehType"] = _AAPI.EquippedInfVeh_vehType_set __swig_getmethods__["vehType"] = _AAPI.EquippedInfVeh_vehType_get if _newclass:vehType = _swig_property(_AAPI.EquippedInfVeh_vehType_get, _AAPI.EquippedInfVeh_vehType_set) __swig_setmethods__["speed"] = _AAPI.EquippedInfVeh_speed_set __swig_getmethods__["speed"] = _AAPI.EquippedInfVeh_speed_get if _newclass:speed = _swig_property(_AAPI.EquippedInfVeh_speed_get, _AAPI.EquippedInfVeh_speed_set) __swig_setmethods__["headway"] = _AAPI.EquippedInfVeh_headway_set __swig_getmethods__["headway"] = _AAPI.EquippedInfVeh_headway_get if _newclass:headway = _swig_property(_AAPI.EquippedInfVeh_headway_get, _AAPI.EquippedInfVeh_headway_set) __swig_setmethods__["idptline"] = _AAPI.EquippedInfVeh_idptline_set __swig_getmethods__["idptline"] = _AAPI.EquippedInfVeh_idptline_get if _newclass:idptline = _swig_property(_AAPI.EquippedInfVeh_idptline_get, _AAPI.EquippedInfVeh_idptline_set) def __init__(self, *args): this = _AAPI.new_EquippedInfVeh(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_EquippedInfVeh __del__ = lambda self : None; EquippedInfVeh_swigregister = _AAPI.EquippedInfVeh_swigregister EquippedInfVeh_swigregister(EquippedInfVeh) AKIDetGetNumberDetectors = _AAPI.AKIDetGetNumberDetectors AKIDetGetIdDetector = _AAPI.AKIDetGetIdDetector AKIDetGetPropertiesDetector = _AAPI.AKIDetGetPropertiesDetector AKIDetGetPropertiesDetectorById = _AAPI.AKIDetGetPropertiesDetectorById AKIDetIsCountGather = _AAPI.AKIDetIsCountGather AKIDetIsPresenceGather = _AAPI.AKIDetIsPresenceGather AKIDetIsSpeedGather = _AAPI.AKIDetIsSpeedGather AKIDetIsOccupancyGather = _AAPI.AKIDetIsOccupancyGather AKIDetIsHeadwayGather = _AAPI.AKIDetIsHeadwayGather AKIDetIsDensityGather = _AAPI.AKIDetIsDensityGather AKIDetIsInfEquippedVehGather = _AAPI.AKIDetIsInfEquippedVehGather AKIDetGetIntervalDetection = _AAPI.AKIDetGetIntervalDetection AKIDetGetCycleInstantDetection = _AAPI.AKIDetGetCycleInstantDetection AKIDetGetNbMeasuresAvailableInstantDetection = _AAPI.AKIDetGetNbMeasuresAvailableInstantDetection AKIDetGetEndTimeMeasureAvailableInstantDetection = _AAPI.AKIDetGetEndTimeMeasureAvailableInstantDetection AKIDetGetSCOOTOccupancyCyclebyId = _AAPI.AKIDetGetSCOOTOccupancyCyclebyId AKIDetGetFinTimeOccupedCyclebyId = _AAPI.AKIDetGetFinTimeOccupedCyclebyId AKIDetGetIniTimeOccupedCyclebyId = _AAPI.AKIDetGetIniTimeOccupedCyclebyId AKIDetGetNbintervalsOccupedCyclebyId = _AAPI.AKIDetGetNbintervalsOccupedCyclebyId AKIDetGetCounterCyclebyId = _AAPI.AKIDetGetCounterCyclebyId AKIDetGetSpeedCyclebyId = _AAPI.AKIDetGetSpeedCyclebyId AKIDetGetTimeOccupedCyclebyId = _AAPI.AKIDetGetTimeOccupedCyclebyId AKIDetGetPresenceCyclebyId = _AAPI.AKIDetGetPresenceCyclebyId AKIDetGetHeadwayCyclebyId = _AAPI.AKIDetGetHeadwayCyclebyId AKIDetGetDensityCyclebyId = _AAPI.AKIDetGetDensityCyclebyId AKIDetGetNbVehsEquippedInDetectionCyclebyId = _AAPI.AKIDetGetNbVehsEquippedInDetectionCyclebyId AKIDetGetInfVehInDetectionStaticInfVehCyclebyId = _AAPI.AKIDetGetInfVehInDetectionStaticInfVehCyclebyId AKIDetGetInfVehInDetectionInfVehCyclebyId = _AAPI.AKIDetGetInfVehInDetectionInfVehCyclebyId AKIDetGetSCOOTOccupancyInstantDetectionbyId = _AAPI.AKIDetGetSCOOTOccupancyInstantDetectionbyId AKIDetGetIniTimeOccupedInstantDetectionbyId = _AAPI.AKIDetGetIniTimeOccupedInstantDetectionbyId AKIDetGetEndTimeOccupedInstantDetectionbyId = _AAPI.AKIDetGetEndTimeOccupedInstantDetectionbyId AKIDetGetNbintervalsOccupedInstantDetectionbyId = _AAPI.AKIDetGetNbintervalsOccupedInstantDetectionbyId AKIDetGetCounterInstantDetectionbyId = _AAPI.AKIDetGetCounterInstantDetectionbyId AKIDetGetSpeedInstantDetectionbyId = _AAPI.AKIDetGetSpeedInstantDetectionbyId AKIDetGetTimeOccupedInstantDetectionbyId = _AAPI.AKIDetGetTimeOccupedInstantDetectionbyId AKIDetGetPresenceInstantDetectionbyId = _AAPI.AKIDetGetPresenceInstantDetectionbyId AKIDetGetHeadwayInstantDetectionbyId = _AAPI.AKIDetGetHeadwayInstantDetectionbyId AKIDetGetDensityInstantDetectionbyId = _AAPI.AKIDetGetDensityInstantDetectionbyId AKIDetGetNbVehsEquippedInDetectionInstantDetectionbyId = _AAPI.AKIDetGetNbVehsEquippedInDetectionInstantDetectionbyId AKIDetGetInfVehInDetectionStaticInfVehInstantDetectionbyId = _AAPI.AKIDetGetInfVehInDetectionStaticInfVehInstantDetectionbyId AKIDetGetInfVehInDetectionInfVehInstantDetectionbyId = _AAPI.AKIDetGetInfVehInDetectionInfVehInstantDetectionbyId AKIDetGetCounterAggregatedbyId = _AAPI.AKIDetGetCounterAggregatedbyId AKIDetGetSpeedAggregatedbyId = _AAPI.AKIDetGetSpeedAggregatedbyId AKIDetGetTimeOccupedAggregatedbyId = _AAPI.AKIDetGetTimeOccupedAggregatedbyId AKIDetGetPresenceAggregatedbyId = _AAPI.AKIDetGetPresenceAggregatedbyId AKIDetGetDensityAggregatedbyId = _AAPI.AKIDetGetDensityAggregatedbyId AKIDetGetHeadwayAggregatedbyId = _AAPI.AKIDetGetHeadwayAggregatedbyId AKIDetGetNbVehsInDetectionAggregatedbyId = _AAPI.AKIDetGetNbVehsInDetectionAggregatedbyId AKIDetGetInfVehInDetectionAggregatedbyId = _AAPI.AKIDetGetInfVehInDetectionAggregatedbyId AKIDetectorEventsEnable = _AAPI.AKIDetectorEventsEnable AKIDetectorEventsDisable = _AAPI.AKIDetectorEventsDisable AKIDetectorEventsAddEvent = _AAPI.AKIDetectorEventsAddEvent AKIDetectorEventsClear = _AAPI.AKIDetectorEventsClear class A2KSectionInf(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, A2KSectionInf, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, A2KSectionInf, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.A2KSectionInf_report_set __swig_getmethods__["report"] = _AAPI.A2KSectionInf_report_get if _newclass:report = _swig_property(_AAPI.A2KSectionInf_report_get, _AAPI.A2KSectionInf_report_set) __swig_setmethods__["id"] = _AAPI.A2KSectionInf_id_set __swig_getmethods__["id"] = _AAPI.A2KSectionInf_id_get if _newclass:id = _swig_property(_AAPI.A2KSectionInf_id_get, _AAPI.A2KSectionInf_id_set) __swig_setmethods__["angId"] = _AAPI.A2KSectionInf_angId_set __swig_getmethods__["angId"] = _AAPI.A2KSectionInf_angId_get if _newclass:angId = _swig_property(_AAPI.A2KSectionInf_angId_get, _AAPI.A2KSectionInf_angId_set) __swig_setmethods__["nbCentralLanes"] = _AAPI.A2KSectionInf_nbCentralLanes_set __swig_getmethods__["nbCentralLanes"] = _AAPI.A2KSectionInf_nbCentralLanes_get if _newclass:nbCentralLanes = _swig_property(_AAPI.A2KSectionInf_nbCentralLanes_get, _AAPI.A2KSectionInf_nbCentralLanes_set) __swig_setmethods__["nbSideLanes"] = _AAPI.A2KSectionInf_nbSideLanes_set __swig_getmethods__["nbSideLanes"] = _AAPI.A2KSectionInf_nbSideLanes_get if _newclass:nbSideLanes = _swig_property(_AAPI.A2KSectionInf_nbSideLanes_get, _AAPI.A2KSectionInf_nbSideLanes_set) __swig_setmethods__["speedLimit"] = _AAPI.A2KSectionInf_speedLimit_set __swig_getmethods__["speedLimit"] = _AAPI.A2KSectionInf_speedLimit_get if _newclass:speedLimit = _swig_property(_AAPI.A2KSectionInf_speedLimit_get, _AAPI.A2KSectionInf_speedLimit_set) __swig_setmethods__["visibilityDistance"] = _AAPI.A2KSectionInf_visibilityDistance_set __swig_getmethods__["visibilityDistance"] = _AAPI.A2KSectionInf_visibilityDistance_get if _newclass:visibilityDistance = _swig_property(_AAPI.A2KSectionInf_visibilityDistance_get, _AAPI.A2KSectionInf_visibilityDistance_set) __swig_setmethods__["yellowBoxSpeed"] = _AAPI.A2KSectionInf_yellowBoxSpeed_set __swig_getmethods__["yellowBoxSpeed"] = _AAPI.A2KSectionInf_yellowBoxSpeed_get if _newclass:yellowBoxSpeed = _swig_property(_AAPI.A2KSectionInf_yellowBoxSpeed_get, _AAPI.A2KSectionInf_yellowBoxSpeed_set) __swig_setmethods__["capacity"] = _AAPI.A2KSectionInf_capacity_set __swig_getmethods__["capacity"] = _AAPI.A2KSectionInf_capacity_get if _newclass:capacity = _swig_property(_AAPI.A2KSectionInf_capacity_get, _AAPI.A2KSectionInf_capacity_set) __swig_setmethods__["distance_zone1"] = _AAPI.A2KSectionInf_distance_zone1_set __swig_getmethods__["distance_zone1"] = _AAPI.A2KSectionInf_distance_zone1_get if _newclass:distance_zone1 = _swig_property(_AAPI.A2KSectionInf_distance_zone1_get, _AAPI.A2KSectionInf_distance_zone1_set) __swig_setmethods__["distance_zone2"] = _AAPI.A2KSectionInf_distance_zone2_set __swig_getmethods__["distance_zone2"] = _AAPI.A2KSectionInf_distance_zone2_get if _newclass:distance_zone2 = _swig_property(_AAPI.A2KSectionInf_distance_zone2_get, _AAPI.A2KSectionInf_distance_zone2_set) __swig_setmethods__["distance_OnRamp"] = _AAPI.A2KSectionInf_distance_OnRamp_set __swig_getmethods__["distance_OnRamp"] = _AAPI.A2KSectionInf_distance_OnRamp_get if _newclass:distance_OnRamp = _swig_property(_AAPI.A2KSectionInf_distance_OnRamp_get, _AAPI.A2KSectionInf_distance_OnRamp_set) __swig_setmethods__["cooperation_OnRamp"] = _AAPI.A2KSectionInf_cooperation_OnRamp_set __swig_getmethods__["cooperation_OnRamp"] = _AAPI.A2KSectionInf_cooperation_OnRamp_get if _newclass:cooperation_OnRamp = _swig_property(_AAPI.A2KSectionInf_cooperation_OnRamp_get, _AAPI.A2KSectionInf_cooperation_OnRamp_set) __swig_setmethods__["slope_percentage"] = _AAPI.A2KSectionInf_slope_percentage_set __swig_getmethods__["slope_percentage"] = _AAPI.A2KSectionInf_slope_percentage_get if _newclass:slope_percentage = _swig_property(_AAPI.A2KSectionInf_slope_percentage_get, _AAPI.A2KSectionInf_slope_percentage_set) __swig_setmethods__["length"] = _AAPI.A2KSectionInf_length_set __swig_getmethods__["length"] = _AAPI.A2KSectionInf_length_get if _newclass:length = _swig_property(_AAPI.A2KSectionInf_length_get, _AAPI.A2KSectionInf_length_set) __swig_setmethods__["userDefinedCost"] = _AAPI.A2KSectionInf_userDefinedCost_set __swig_getmethods__["userDefinedCost"] = _AAPI.A2KSectionInf_userDefinedCost_get if _newclass:userDefinedCost = _swig_property(_AAPI.A2KSectionInf_userDefinedCost_get, _AAPI.A2KSectionInf_userDefinedCost_set) __swig_setmethods__["maxGivewayTimeVariation"] = _AAPI.A2KSectionInf_maxGivewayTimeVariation_set __swig_getmethods__["maxGivewayTimeVariation"] = _AAPI.A2KSectionInf_maxGivewayTimeVariation_get if _newclass:maxGivewayTimeVariation = _swig_property(_AAPI.A2KSectionInf_maxGivewayTimeVariation_get, _AAPI.A2KSectionInf_maxGivewayTimeVariation_set) __swig_setmethods__["reactionTimeVariation"] = _AAPI.A2KSectionInf_reactionTimeVariation_set __swig_getmethods__["reactionTimeVariation"] = _AAPI.A2KSectionInf_reactionTimeVariation_get if _newclass:reactionTimeVariation = _swig_property(_AAPI.A2KSectionInf_reactionTimeVariation_get, _AAPI.A2KSectionInf_reactionTimeVariation_set) __swig_setmethods__["reactionTimeAtTrafficLightVariation"] = _AAPI.A2KSectionInf_reactionTimeAtTrafficLightVariation_set __swig_getmethods__["reactionTimeAtTrafficLightVariation"] = _AAPI.A2KSectionInf_reactionTimeAtTrafficLightVariation_get if _newclass:reactionTimeAtTrafficLightVariation = _swig_property(_AAPI.A2KSectionInf_reactionTimeAtTrafficLightVariation_get, _AAPI.A2KSectionInf_reactionTimeAtTrafficLightVariation_set) __swig_setmethods__["reactionTimeAtStopVariation"] = _AAPI.A2KSectionInf_reactionTimeAtStopVariation_set __swig_getmethods__["reactionTimeAtStopVariation"] = _AAPI.A2KSectionInf_reactionTimeAtStopVariation_get if _newclass:reactionTimeAtStopVariation = _swig_property(_AAPI.A2KSectionInf_reactionTimeAtStopVariation_get, _AAPI.A2KSectionInf_reactionTimeAtStopVariation_set) __swig_setmethods__["imprudentLaneChangingFactor"] = _AAPI.A2KSectionInf_imprudentLaneChangingFactor_set __swig_getmethods__["imprudentLaneChangingFactor"] = _AAPI.A2KSectionInf_imprudentLaneChangingFactor_get if _newclass:imprudentLaneChangingFactor = _swig_property(_AAPI.A2KSectionInf_imprudentLaneChangingFactor_get, _AAPI.A2KSectionInf_imprudentLaneChangingFactor_set) __swig_setmethods__["sensitivity2ImprudentLaneChangingFactor"] = _AAPI.A2KSectionInf_sensitivity2ImprudentLaneChangingFactor_set __swig_getmethods__["sensitivity2ImprudentLaneChangingFactor"] = _AAPI.A2KSectionInf_sensitivity2ImprudentLaneChangingFactor_get if _newclass:sensitivity2ImprudentLaneChangingFactor = _swig_property(_AAPI.A2KSectionInf_sensitivity2ImprudentLaneChangingFactor_get, _AAPI.A2KSectionInf_sensitivity2ImprudentLaneChangingFactor_set) __swig_setmethods__["nbTurnings"] = _AAPI.A2KSectionInf_nbTurnings_set __swig_getmethods__["nbTurnings"] = _AAPI.A2KSectionInf_nbTurnings_get if _newclass:nbTurnings = _swig_property(_AAPI.A2KSectionInf_nbTurnings_get, _AAPI.A2KSectionInf_nbTurnings_set) def __init__(self, *args): this = _AAPI.new_A2KSectionInf(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_A2KSectionInf __del__ = lambda self : None; A2KSectionInf_swigregister = _AAPI.A2KSectionInf_swigregister A2KSectionInf_swigregister(A2KSectionInf) class A2KSectionBehaviourParam(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, A2KSectionBehaviourParam, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, A2KSectionBehaviourParam, name) __repr__ = _swig_repr __swig_setmethods__["speedLimit"] = _AAPI.A2KSectionBehaviourParam_speedLimit_set __swig_getmethods__["speedLimit"] = _AAPI.A2KSectionBehaviourParam_speedLimit_get if _newclass:speedLimit = _swig_property(_AAPI.A2KSectionBehaviourParam_speedLimit_get, _AAPI.A2KSectionBehaviourParam_speedLimit_set) __swig_setmethods__["visibilityDistance"] = _AAPI.A2KSectionBehaviourParam_visibilityDistance_set __swig_getmethods__["visibilityDistance"] = _AAPI.A2KSectionBehaviourParam_visibilityDistance_get if _newclass:visibilityDistance = _swig_property(_AAPI.A2KSectionBehaviourParam_visibilityDistance_get, _AAPI.A2KSectionBehaviourParam_visibilityDistance_set) __swig_setmethods__["yellowBoxSpeed"] = _AAPI.A2KSectionBehaviourParam_yellowBoxSpeed_set __swig_getmethods__["yellowBoxSpeed"] = _AAPI.A2KSectionBehaviourParam_yellowBoxSpeed_get if _newclass:yellowBoxSpeed = _swig_property(_AAPI.A2KSectionBehaviourParam_yellowBoxSpeed_get, _AAPI.A2KSectionBehaviourParam_yellowBoxSpeed_set) __swig_setmethods__["capacity"] = _AAPI.A2KSectionBehaviourParam_capacity_set __swig_getmethods__["capacity"] = _AAPI.A2KSectionBehaviourParam_capacity_get if _newclass:capacity = _swig_property(_AAPI.A2KSectionBehaviourParam_capacity_get, _AAPI.A2KSectionBehaviourParam_capacity_set) __swig_setmethods__["distance_zone1"] = _AAPI.A2KSectionBehaviourParam_distance_zone1_set __swig_getmethods__["distance_zone1"] = _AAPI.A2KSectionBehaviourParam_distance_zone1_get if _newclass:distance_zone1 = _swig_property(_AAPI.A2KSectionBehaviourParam_distance_zone1_get, _AAPI.A2KSectionBehaviourParam_distance_zone1_set) __swig_setmethods__["distance_zone2"] = _AAPI.A2KSectionBehaviourParam_distance_zone2_set __swig_getmethods__["distance_zone2"] = _AAPI.A2KSectionBehaviourParam_distance_zone2_get if _newclass:distance_zone2 = _swig_property(_AAPI.A2KSectionBehaviourParam_distance_zone2_get, _AAPI.A2KSectionBehaviourParam_distance_zone2_set) __swig_setmethods__["distance_OnRamp"] = _AAPI.A2KSectionBehaviourParam_distance_OnRamp_set __swig_getmethods__["distance_OnRamp"] = _AAPI.A2KSectionBehaviourParam_distance_OnRamp_get if _newclass:distance_OnRamp = _swig_property(_AAPI.A2KSectionBehaviourParam_distance_OnRamp_get, _AAPI.A2KSectionBehaviourParam_distance_OnRamp_set) __swig_setmethods__["cooperation_OnRamp"] = _AAPI.A2KSectionBehaviourParam_cooperation_OnRamp_set __swig_getmethods__["cooperation_OnRamp"] = _AAPI.A2KSectionBehaviourParam_cooperation_OnRamp_get if _newclass:cooperation_OnRamp = _swig_property(_AAPI.A2KSectionBehaviourParam_cooperation_OnRamp_get, _AAPI.A2KSectionBehaviourParam_cooperation_OnRamp_set) __swig_setmethods__["userDefinedCost"] = _AAPI.A2KSectionBehaviourParam_userDefinedCost_set __swig_getmethods__["userDefinedCost"] = _AAPI.A2KSectionBehaviourParam_userDefinedCost_get if _newclass:userDefinedCost = _swig_property(_AAPI.A2KSectionBehaviourParam_userDefinedCost_get, _AAPI.A2KSectionBehaviourParam_userDefinedCost_set) __swig_setmethods__["maxGivewayTimeVariation"] = _AAPI.A2KSectionBehaviourParam_maxGivewayTimeVariation_set __swig_getmethods__["maxGivewayTimeVariation"] = _AAPI.A2KSectionBehaviourParam_maxGivewayTimeVariation_get if _newclass:maxGivewayTimeVariation = _swig_property(_AAPI.A2KSectionBehaviourParam_maxGivewayTimeVariation_get, _AAPI.A2KSectionBehaviourParam_maxGivewayTimeVariation_set) __swig_setmethods__["reactionTimeVariation"] = _AAPI.A2KSectionBehaviourParam_reactionTimeVariation_set __swig_getmethods__["reactionTimeVariation"] = _AAPI.A2KSectionBehaviourParam_reactionTimeVariation_get if _newclass:reactionTimeVariation = _swig_property(_AAPI.A2KSectionBehaviourParam_reactionTimeVariation_get, _AAPI.A2KSectionBehaviourParam_reactionTimeVariation_set) __swig_setmethods__["reactionTimeAtTrafficLightVariation"] = _AAPI.A2KSectionBehaviourParam_reactionTimeAtTrafficLightVariation_set __swig_getmethods__["reactionTimeAtTrafficLightVariation"] = _AAPI.A2KSectionBehaviourParam_reactionTimeAtTrafficLightVariation_get if _newclass:reactionTimeAtTrafficLightVariation = _swig_property(_AAPI.A2KSectionBehaviourParam_reactionTimeAtTrafficLightVariation_get, _AAPI.A2KSectionBehaviourParam_reactionTimeAtTrafficLightVariation_set) __swig_setmethods__["reactionTimeAtStopVariation"] = _AAPI.A2KSectionBehaviourParam_reactionTimeAtStopVariation_set __swig_getmethods__["reactionTimeAtStopVariation"] = _AAPI.A2KSectionBehaviourParam_reactionTimeAtStopVariation_get if _newclass:reactionTimeAtStopVariation = _swig_property(_AAPI.A2KSectionBehaviourParam_reactionTimeAtStopVariation_get, _AAPI.A2KSectionBehaviourParam_reactionTimeAtStopVariation_set) def __init__(self, *args): this = _AAPI.new_A2KSectionBehaviourParam(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_A2KSectionBehaviourParam __del__ = lambda self : None; A2KSectionBehaviourParam_swigregister = _AAPI.A2KSectionBehaviourParam_swigregister A2KSectionBehaviourParam_swigregister(A2KSectionBehaviourParam) class A2KCentroidInf(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, A2KCentroidInf, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, A2KCentroidInf, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.A2KCentroidInf_report_set __swig_getmethods__["report"] = _AAPI.A2KCentroidInf_report_get if _newclass:report = _swig_property(_AAPI.A2KCentroidInf_report_get, _AAPI.A2KCentroidInf_report_set) __swig_setmethods__["id"] = _AAPI.A2KCentroidInf_id_set __swig_getmethods__["id"] = _AAPI.A2KCentroidInf_id_get if _newclass:id = _swig_property(_AAPI.A2KCentroidInf_id_get, _AAPI.A2KCentroidInf_id_set) __swig_setmethods__["AsDestConsider_percentage"] = _AAPI.A2KCentroidInf_AsDestConsider_percentage_set __swig_getmethods__["AsDestConsider_percentage"] = _AAPI.A2KCentroidInf_AsDestConsider_percentage_get if _newclass:AsDestConsider_percentage = _swig_property(_AAPI.A2KCentroidInf_AsDestConsider_percentage_get, _AAPI.A2KCentroidInf_AsDestConsider_percentage_set) __swig_setmethods__["AsOrigConsider_percentage"] = _AAPI.A2KCentroidInf_AsOrigConsider_percentage_set __swig_getmethods__["AsOrigConsider_percentage"] = _AAPI.A2KCentroidInf_AsOrigConsider_percentage_get if _newclass:AsOrigConsider_percentage = _swig_property(_AAPI.A2KCentroidInf_AsOrigConsider_percentage_get, _AAPI.A2KCentroidInf_AsOrigConsider_percentage_set) __swig_setmethods__["IsOrigin"] = _AAPI.A2KCentroidInf_IsOrigin_set __swig_getmethods__["IsOrigin"] = _AAPI.A2KCentroidInf_IsOrigin_get if _newclass:IsOrigin = _swig_property(_AAPI.A2KCentroidInf_IsOrigin_get, _AAPI.A2KCentroidInf_IsOrigin_set) __swig_setmethods__["IsDestination"] = _AAPI.A2KCentroidInf_IsDestination_set __swig_getmethods__["IsDestination"] = _AAPI.A2KCentroidInf_IsDestination_get if _newclass:IsDestination = _swig_property(_AAPI.A2KCentroidInf_IsDestination_get, _AAPI.A2KCentroidInf_IsDestination_set) __swig_setmethods__["NumConnecTo"] = _AAPI.A2KCentroidInf_NumConnecTo_set __swig_getmethods__["NumConnecTo"] = _AAPI.A2KCentroidInf_NumConnecTo_get if _newclass:NumConnecTo = _swig_property(_AAPI.A2KCentroidInf_NumConnecTo_get, _AAPI.A2KCentroidInf_NumConnecTo_set) __swig_setmethods__["NumConnecFrom"] = _AAPI.A2KCentroidInf_NumConnecFrom_set __swig_getmethods__["NumConnecFrom"] = _AAPI.A2KCentroidInf_NumConnecFrom_get if _newclass:NumConnecFrom = _swig_property(_AAPI.A2KCentroidInf_NumConnecFrom_get, _AAPI.A2KCentroidInf_NumConnecFrom_set) def __init__(self, *args): this = _AAPI.new_A2KCentroidInf(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_A2KCentroidInf __del__ = lambda self : None; A2KCentroidInf_swigregister = _AAPI.A2KCentroidInf_swigregister A2KCentroidInf_swigregister(A2KCentroidInf) AKIInfNetGetUnits = _AAPI.AKIInfNetGetUnits AKIInfNetGetWorldCoordinates = _AAPI.AKIInfNetGetWorldCoordinates AKIInfNetNbSectionsANG = _AAPI.AKIInfNetNbSectionsANG AKIInfNetGetSectionANGId = _AAPI.AKIInfNetGetSectionANGId AKIInfNetGetSectionANGInf = _AAPI.AKIInfNetGetSectionANGInf AKIInfNetGetIdSectionANGDestinationofTurning = _AAPI.AKIInfNetGetIdSectionANGDestinationofTurning AKIInfNetGetDestinationFromLaneofTurning = _AAPI.AKIInfNetGetDestinationFromLaneofTurning AKIInfNetGetDestinationToLaneofTurning = _AAPI.AKIInfNetGetDestinationToLaneofTurning AKIInfNetGetOriginFromLaneofTurning = _AAPI.AKIInfNetGetOriginFromLaneofTurning AKIInfNetGetOriginToLaneofTurning = _AAPI.AKIInfNetGetOriginToLaneofTurning AKIInfNetGetTurningId = _AAPI.AKIInfNetGetTurningId AKIInfNetGetTurningDestinationFromLane = _AAPI.AKIInfNetGetTurningDestinationFromLane AKIInfNetGetTurningDestinationToLane = _AAPI.AKIInfNetGetTurningDestinationToLane AKIInfNetGetTurningOriginFromLane = _AAPI.AKIInfNetGetTurningOriginFromLane AKIInfNetGetTurningOriginToLane = _AAPI.AKIInfNetGetTurningOriginToLane AKIInfNetSetSectionBehaviouralParam = _AAPI.AKIInfNetSetSectionBehaviouralParam AKIInfNetGetSectionBehaviouralParam = _AAPI.AKIInfNetGetSectionBehaviouralParam AKISetSectionCapacity = _AAPI.AKISetSectionCapacity AKISetSectionUserDefinedCost = _AAPI.AKISetSectionUserDefinedCost AKISetSectionUserDefinedCost2 = _AAPI.AKISetSectionUserDefinedCost2 AKISetSectionUserDefinedCost3 = _AAPI.AKISetSectionUserDefinedCost3 AKIGetSectionCapacity = _AAPI.AKIGetSectionCapacity AKIGetSectionUserDefinedCost = _AAPI.AKIGetSectionUserDefinedCost AKIGetSectionUserDefinedCost2 = _AAPI.AKIGetSectionUserDefinedCost2 AKIGetSectionUserDefinedCost3 = _AAPI.AKIGetSectionUserDefinedCost3 AKIInfNetNbJunctions = _AAPI.AKIInfNetNbJunctions AKIInfNetGetJunctionId = _AAPI.AKIInfNetGetJunctionId AKIInfNetNbCentroids = _AAPI.AKIInfNetNbCentroids AKIInfNetGetCentroidId = _AAPI.AKIInfNetGetCentroidId AKIInfNetGetCentroidInf = _AAPI.AKIInfNetGetCentroidInf AKIInfNetGetIdSectionofOriginCentroidConnector = _AAPI.AKIInfNetGetIdSectionofOriginCentroidConnector AKIInfNetGetIdSectionofDestinationCentroidConnector = _AAPI.AKIInfNetGetIdSectionofDestinationCentroidConnector AKIInfNetGetIdSectionANGofOriginCentroidConnector = _AAPI.AKIInfNetGetIdSectionANGofOriginCentroidConnector AKIInfNetGetIdSectionANGofDestinationCentroidConnector = _AAPI.AKIInfNetGetIdSectionANGofDestinationCentroidConnector AKIInfNetGetNetworkPathA = _AAPI.AKIInfNetGetNetworkPathA AKIInfNetGetNetworkNameA = _AAPI.AKIInfNetGetNetworkNameA AKIInfNetGetTrafficDemandNameA = _AAPI.AKIInfNetGetTrafficDemandNameA AKIInfNetGetTrafficDemandType = _AAPI.AKIInfNetGetTrafficDemandType class StructAkiEstadSystem(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, StructAkiEstadSystem, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, StructAkiEstadSystem, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.StructAkiEstadSystem_report_set __swig_getmethods__["report"] = _AAPI.StructAkiEstadSystem_report_get if _newclass:report = _swig_property(_AAPI.StructAkiEstadSystem_report_get, _AAPI.StructAkiEstadSystem_report_set) __swig_setmethods__["Flow"] = _AAPI.StructAkiEstadSystem_Flow_set __swig_getmethods__["Flow"] = _AAPI.StructAkiEstadSystem_Flow_get if _newclass:Flow = _swig_property(_AAPI.StructAkiEstadSystem_Flow_get, _AAPI.StructAkiEstadSystem_Flow_set) __swig_setmethods__["TTa"] = _AAPI.StructAkiEstadSystem_TTa_set __swig_getmethods__["TTa"] = _AAPI.StructAkiEstadSystem_TTa_get if _newclass:TTa = _swig_property(_AAPI.StructAkiEstadSystem_TTa_get, _AAPI.StructAkiEstadSystem_TTa_set) __swig_setmethods__["TTd"] = _AAPI.StructAkiEstadSystem_TTd_set __swig_getmethods__["TTd"] = _AAPI.StructAkiEstadSystem_TTd_get if _newclass:TTd = _swig_property(_AAPI.StructAkiEstadSystem_TTd_get, _AAPI.StructAkiEstadSystem_TTd_set) __swig_setmethods__["DTa"] = _AAPI.StructAkiEstadSystem_DTa_set __swig_getmethods__["DTa"] = _AAPI.StructAkiEstadSystem_DTa_get if _newclass:DTa = _swig_property(_AAPI.StructAkiEstadSystem_DTa_get, _AAPI.StructAkiEstadSystem_DTa_set) __swig_setmethods__["DTd"] = _AAPI.StructAkiEstadSystem_DTd_set __swig_getmethods__["DTd"] = _AAPI.StructAkiEstadSystem_DTd_get if _newclass:DTd = _swig_property(_AAPI.StructAkiEstadSystem_DTd_get, _AAPI.StructAkiEstadSystem_DTd_set) __swig_setmethods__["Sa"] = _AAPI.StructAkiEstadSystem_Sa_set __swig_getmethods__["Sa"] = _AAPI.StructAkiEstadSystem_Sa_get if _newclass:Sa = _swig_property(_AAPI.StructAkiEstadSystem_Sa_get, _AAPI.StructAkiEstadSystem_Sa_set) __swig_setmethods__["Sd"] = _AAPI.StructAkiEstadSystem_Sd_set __swig_getmethods__["Sd"] = _AAPI.StructAkiEstadSystem_Sd_get if _newclass:Sd = _swig_property(_AAPI.StructAkiEstadSystem_Sd_get, _AAPI.StructAkiEstadSystem_Sd_set) __swig_setmethods__["SHa"] = _AAPI.StructAkiEstadSystem_SHa_set __swig_getmethods__["SHa"] = _AAPI.StructAkiEstadSystem_SHa_get if _newclass:SHa = _swig_property(_AAPI.StructAkiEstadSystem_SHa_get, _AAPI.StructAkiEstadSystem_SHa_set) __swig_setmethods__["SHd"] = _AAPI.StructAkiEstadSystem_SHd_set __swig_getmethods__["SHd"] = _AAPI.StructAkiEstadSystem_SHd_get if _newclass:SHd = _swig_property(_AAPI.StructAkiEstadSystem_SHd_get, _AAPI.StructAkiEstadSystem_SHd_set) __swig_setmethods__["Density"] = _AAPI.StructAkiEstadSystem_Density_set __swig_getmethods__["Density"] = _AAPI.StructAkiEstadSystem_Density_get if _newclass:Density = _swig_property(_AAPI.StructAkiEstadSystem_Density_get, _AAPI.StructAkiEstadSystem_Density_set) __swig_setmethods__["STa"] = _AAPI.StructAkiEstadSystem_STa_set __swig_getmethods__["STa"] = _AAPI.StructAkiEstadSystem_STa_get if _newclass:STa = _swig_property(_AAPI.StructAkiEstadSystem_STa_get, _AAPI.StructAkiEstadSystem_STa_set) __swig_setmethods__["STd"] = _AAPI.StructAkiEstadSystem_STd_set __swig_getmethods__["STd"] = _AAPI.StructAkiEstadSystem_STd_get if _newclass:STd = _swig_property(_AAPI.StructAkiEstadSystem_STd_get, _AAPI.StructAkiEstadSystem_STd_set) __swig_setmethods__["NumStops"] = _AAPI.StructAkiEstadSystem_NumStops_set __swig_getmethods__["NumStops"] = _AAPI.StructAkiEstadSystem_NumStops_get if _newclass:NumStops = _swig_property(_AAPI.StructAkiEstadSystem_NumStops_get, _AAPI.StructAkiEstadSystem_NumStops_set) __swig_setmethods__["LongQueueAvg"] = _AAPI.StructAkiEstadSystem_LongQueueAvg_set __swig_getmethods__["LongQueueAvg"] = _AAPI.StructAkiEstadSystem_LongQueueAvg_get if _newclass:LongQueueAvg = _swig_property(_AAPI.StructAkiEstadSystem_LongQueueAvg_get, _AAPI.StructAkiEstadSystem_LongQueueAvg_set) __swig_setmethods__["LongQueueMax"] = _AAPI.StructAkiEstadSystem_LongQueueMax_set __swig_getmethods__["LongQueueMax"] = _AAPI.StructAkiEstadSystem_LongQueueMax_get if _newclass:LongQueueMax = _swig_property(_AAPI.StructAkiEstadSystem_LongQueueMax_get, _AAPI.StructAkiEstadSystem_LongQueueMax_set) __swig_setmethods__["TotalTravel"] = _AAPI.StructAkiEstadSystem_TotalTravel_set __swig_getmethods__["TotalTravel"] = _AAPI.StructAkiEstadSystem_TotalTravel_get if _newclass:TotalTravel = _swig_property(_AAPI.StructAkiEstadSystem_TotalTravel_get, _AAPI.StructAkiEstadSystem_TotalTravel_set) __swig_setmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadSystem_TotalTravelTime_set __swig_getmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadSystem_TotalTravelTime_get if _newclass:TotalTravelTime = _swig_property(_AAPI.StructAkiEstadSystem_TotalTravelTime_get, _AAPI.StructAkiEstadSystem_TotalTravelTime_set) __swig_setmethods__["virtualQueueAvg"] = _AAPI.StructAkiEstadSystem_virtualQueueAvg_set __swig_getmethods__["virtualQueueAvg"] = _AAPI.StructAkiEstadSystem_virtualQueueAvg_get if _newclass:virtualQueueAvg = _swig_property(_AAPI.StructAkiEstadSystem_virtualQueueAvg_get, _AAPI.StructAkiEstadSystem_virtualQueueAvg_set) __swig_setmethods__["virtualQueueMax"] = _AAPI.StructAkiEstadSystem_virtualQueueMax_set __swig_getmethods__["virtualQueueMax"] = _AAPI.StructAkiEstadSystem_virtualQueueMax_get if _newclass:virtualQueueMax = _swig_property(_AAPI.StructAkiEstadSystem_virtualQueueMax_get, _AAPI.StructAkiEstadSystem_virtualQueueMax_set) def __init__(self, *args): this = _AAPI.new_StructAkiEstadSystem(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_StructAkiEstadSystem __del__ = lambda self : None; StructAkiEstadSystem_swigregister = _AAPI.StructAkiEstadSystem_swigregister StructAkiEstadSystem_swigregister(StructAkiEstadSystem) class StructAkiEstadSection(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, StructAkiEstadSection, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, StructAkiEstadSection, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.StructAkiEstadSection_report_set __swig_getmethods__["report"] = _AAPI.StructAkiEstadSection_report_get if _newclass:report = _swig_property(_AAPI.StructAkiEstadSection_report_get, _AAPI.StructAkiEstadSection_report_set) __swig_setmethods__["Id"] = _AAPI.StructAkiEstadSection_Id_set __swig_getmethods__["Id"] = _AAPI.StructAkiEstadSection_Id_get if _newclass:Id = _swig_property(_AAPI.StructAkiEstadSection_Id_get, _AAPI.StructAkiEstadSection_Id_set) __swig_setmethods__["Flow"] = _AAPI.StructAkiEstadSection_Flow_set __swig_getmethods__["Flow"] = _AAPI.StructAkiEstadSection_Flow_get if _newclass:Flow = _swig_property(_AAPI.StructAkiEstadSection_Flow_get, _AAPI.StructAkiEstadSection_Flow_set) __swig_setmethods__["TTa"] = _AAPI.StructAkiEstadSection_TTa_set __swig_getmethods__["TTa"] = _AAPI.StructAkiEstadSection_TTa_get if _newclass:TTa = _swig_property(_AAPI.StructAkiEstadSection_TTa_get, _AAPI.StructAkiEstadSection_TTa_set) __swig_setmethods__["TTd"] = _AAPI.StructAkiEstadSection_TTd_set __swig_getmethods__["TTd"] = _AAPI.StructAkiEstadSection_TTd_get if _newclass:TTd = _swig_property(_AAPI.StructAkiEstadSection_TTd_get, _AAPI.StructAkiEstadSection_TTd_set) __swig_setmethods__["DTa"] = _AAPI.StructAkiEstadSection_DTa_set __swig_getmethods__["DTa"] = _AAPI.StructAkiEstadSection_DTa_get if _newclass:DTa = _swig_property(_AAPI.StructAkiEstadSection_DTa_get, _AAPI.StructAkiEstadSection_DTa_set) __swig_setmethods__["DTd"] = _AAPI.StructAkiEstadSection_DTd_set __swig_getmethods__["DTd"] = _AAPI.StructAkiEstadSection_DTd_get if _newclass:DTd = _swig_property(_AAPI.StructAkiEstadSection_DTd_get, _AAPI.StructAkiEstadSection_DTd_set) __swig_setmethods__["Sa"] = _AAPI.StructAkiEstadSection_Sa_set __swig_getmethods__["Sa"] = _AAPI.StructAkiEstadSection_Sa_get if _newclass:Sa = _swig_property(_AAPI.StructAkiEstadSection_Sa_get, _AAPI.StructAkiEstadSection_Sa_set) __swig_setmethods__["Sd"] = _AAPI.StructAkiEstadSection_Sd_set __swig_getmethods__["Sd"] = _AAPI.StructAkiEstadSection_Sd_get if _newclass:Sd = _swig_property(_AAPI.StructAkiEstadSection_Sd_get, _AAPI.StructAkiEstadSection_Sd_set) __swig_setmethods__["SHa"] = _AAPI.StructAkiEstadSection_SHa_set __swig_getmethods__["SHa"] = _AAPI.StructAkiEstadSection_SHa_get if _newclass:SHa = _swig_property(_AAPI.StructAkiEstadSection_SHa_get, _AAPI.StructAkiEstadSection_SHa_set) __swig_setmethods__["SHd"] = _AAPI.StructAkiEstadSection_SHd_set __swig_getmethods__["SHd"] = _AAPI.StructAkiEstadSection_SHd_get if _newclass:SHd = _swig_property(_AAPI.StructAkiEstadSection_SHd_get, _AAPI.StructAkiEstadSection_SHd_set) __swig_setmethods__["Density"] = _AAPI.StructAkiEstadSection_Density_set __swig_getmethods__["Density"] = _AAPI.StructAkiEstadSection_Density_get if _newclass:Density = _swig_property(_AAPI.StructAkiEstadSection_Density_get, _AAPI.StructAkiEstadSection_Density_set) __swig_setmethods__["STa"] = _AAPI.StructAkiEstadSection_STa_set __swig_getmethods__["STa"] = _AAPI.StructAkiEstadSection_STa_get if _newclass:STa = _swig_property(_AAPI.StructAkiEstadSection_STa_get, _AAPI.StructAkiEstadSection_STa_set) __swig_setmethods__["STd"] = _AAPI.StructAkiEstadSection_STd_set __swig_getmethods__["STd"] = _AAPI.StructAkiEstadSection_STd_get if _newclass:STd = _swig_property(_AAPI.StructAkiEstadSection_STd_get, _AAPI.StructAkiEstadSection_STd_set) __swig_setmethods__["NumStops"] = _AAPI.StructAkiEstadSection_NumStops_set __swig_getmethods__["NumStops"] = _AAPI.StructAkiEstadSection_NumStops_get if _newclass:NumStops = _swig_property(_AAPI.StructAkiEstadSection_NumStops_get, _AAPI.StructAkiEstadSection_NumStops_set) __swig_setmethods__["LongQueueAvg"] = _AAPI.StructAkiEstadSection_LongQueueAvg_set __swig_getmethods__["LongQueueAvg"] = _AAPI.StructAkiEstadSection_LongQueueAvg_get if _newclass:LongQueueAvg = _swig_property(_AAPI.StructAkiEstadSection_LongQueueAvg_get, _AAPI.StructAkiEstadSection_LongQueueAvg_set) __swig_setmethods__["LongQueueMax"] = _AAPI.StructAkiEstadSection_LongQueueMax_set __swig_getmethods__["LongQueueMax"] = _AAPI.StructAkiEstadSection_LongQueueMax_get if _newclass:LongQueueMax = _swig_property(_AAPI.StructAkiEstadSection_LongQueueMax_get, _AAPI.StructAkiEstadSection_LongQueueMax_set) __swig_setmethods__["TotalTravel"] = _AAPI.StructAkiEstadSection_TotalTravel_set __swig_getmethods__["TotalTravel"] = _AAPI.StructAkiEstadSection_TotalTravel_get if _newclass:TotalTravel = _swig_property(_AAPI.StructAkiEstadSection_TotalTravel_get, _AAPI.StructAkiEstadSection_TotalTravel_set) __swig_setmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadSection_TotalTravelTime_set __swig_getmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadSection_TotalTravelTime_get if _newclass:TotalTravelTime = _swig_property(_AAPI.StructAkiEstadSection_TotalTravelTime_get, _AAPI.StructAkiEstadSection_TotalTravelTime_set) __swig_setmethods__["virtualQueueAvg"] = _AAPI.StructAkiEstadSection_virtualQueueAvg_set __swig_getmethods__["virtualQueueAvg"] = _AAPI.StructAkiEstadSection_virtualQueueAvg_get if _newclass:virtualQueueAvg = _swig_property(_AAPI.StructAkiEstadSection_virtualQueueAvg_get, _AAPI.StructAkiEstadSection_virtualQueueAvg_set) __swig_setmethods__["virtualQueueMax"] = _AAPI.StructAkiEstadSection_virtualQueueMax_set __swig_getmethods__["virtualQueueMax"] = _AAPI.StructAkiEstadSection_virtualQueueMax_get if _newclass:virtualQueueMax = _swig_property(_AAPI.StructAkiEstadSection_virtualQueueMax_get, _AAPI.StructAkiEstadSection_virtualQueueMax_set) def __init__(self, *args): this = _AAPI.new_StructAkiEstadSection(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_StructAkiEstadSection __del__ = lambda self : None; StructAkiEstadSection_swigregister = _AAPI.StructAkiEstadSection_swigregister StructAkiEstadSection_swigregister(StructAkiEstadSection) class StructAkiEstadSectionLane(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, StructAkiEstadSectionLane, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, StructAkiEstadSectionLane, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.StructAkiEstadSectionLane_report_set __swig_getmethods__["report"] = _AAPI.StructAkiEstadSectionLane_report_get if _newclass:report = _swig_property(_AAPI.StructAkiEstadSectionLane_report_get, _AAPI.StructAkiEstadSectionLane_report_set) __swig_setmethods__["IdSection"] = _AAPI.StructAkiEstadSectionLane_IdSection_set __swig_getmethods__["IdSection"] = _AAPI.StructAkiEstadSectionLane_IdSection_get if _newclass:IdSection = _swig_property(_AAPI.StructAkiEstadSectionLane_IdSection_get, _AAPI.StructAkiEstadSectionLane_IdSection_set) __swig_setmethods__["Flow"] = _AAPI.StructAkiEstadSectionLane_Flow_set __swig_getmethods__["Flow"] = _AAPI.StructAkiEstadSectionLane_Flow_get if _newclass:Flow = _swig_property(_AAPI.StructAkiEstadSectionLane_Flow_get, _AAPI.StructAkiEstadSectionLane_Flow_set) __swig_setmethods__["TTa"] = _AAPI.StructAkiEstadSectionLane_TTa_set __swig_getmethods__["TTa"] = _AAPI.StructAkiEstadSectionLane_TTa_get if _newclass:TTa = _swig_property(_AAPI.StructAkiEstadSectionLane_TTa_get, _AAPI.StructAkiEstadSectionLane_TTa_set) __swig_setmethods__["TTd"] = _AAPI.StructAkiEstadSectionLane_TTd_set __swig_getmethods__["TTd"] = _AAPI.StructAkiEstadSectionLane_TTd_get if _newclass:TTd = _swig_property(_AAPI.StructAkiEstadSectionLane_TTd_get, _AAPI.StructAkiEstadSectionLane_TTd_set) __swig_setmethods__["DTa"] = _AAPI.StructAkiEstadSectionLane_DTa_set __swig_getmethods__["DTa"] = _AAPI.StructAkiEstadSectionLane_DTa_get if _newclass:DTa = _swig_property(_AAPI.StructAkiEstadSectionLane_DTa_get, _AAPI.StructAkiEstadSectionLane_DTa_set) __swig_setmethods__["DTd"] = _AAPI.StructAkiEstadSectionLane_DTd_set __swig_getmethods__["DTd"] = _AAPI.StructAkiEstadSectionLane_DTd_get if _newclass:DTd = _swig_property(_AAPI.StructAkiEstadSectionLane_DTd_get, _AAPI.StructAkiEstadSectionLane_DTd_set) __swig_setmethods__["Sa"] = _AAPI.StructAkiEstadSectionLane_Sa_set __swig_getmethods__["Sa"] = _AAPI.StructAkiEstadSectionLane_Sa_get if _newclass:Sa = _swig_property(_AAPI.StructAkiEstadSectionLane_Sa_get, _AAPI.StructAkiEstadSectionLane_Sa_set) __swig_setmethods__["Sd"] = _AAPI.StructAkiEstadSectionLane_Sd_set __swig_getmethods__["Sd"] = _AAPI.StructAkiEstadSectionLane_Sd_get if _newclass:Sd = _swig_property(_AAPI.StructAkiEstadSectionLane_Sd_get, _AAPI.StructAkiEstadSectionLane_Sd_set) __swig_setmethods__["SHa"] = _AAPI.StructAkiEstadSectionLane_SHa_set __swig_getmethods__["SHa"] = _AAPI.StructAkiEstadSectionLane_SHa_get if _newclass:SHa = _swig_property(_AAPI.StructAkiEstadSectionLane_SHa_get, _AAPI.StructAkiEstadSectionLane_SHa_set) __swig_setmethods__["SHd"] = _AAPI.StructAkiEstadSectionLane_SHd_set __swig_getmethods__["SHd"] = _AAPI.StructAkiEstadSectionLane_SHd_get if _newclass:SHd = _swig_property(_AAPI.StructAkiEstadSectionLane_SHd_get, _AAPI.StructAkiEstadSectionLane_SHd_set) __swig_setmethods__["Density"] = _AAPI.StructAkiEstadSectionLane_Density_set __swig_getmethods__["Density"] = _AAPI.StructAkiEstadSectionLane_Density_get if _newclass:Density = _swig_property(_AAPI.StructAkiEstadSectionLane_Density_get, _AAPI.StructAkiEstadSectionLane_Density_set) __swig_setmethods__["LongQueueAvg"] = _AAPI.StructAkiEstadSectionLane_LongQueueAvg_set __swig_getmethods__["LongQueueAvg"] = _AAPI.StructAkiEstadSectionLane_LongQueueAvg_get if _newclass:LongQueueAvg = _swig_property(_AAPI.StructAkiEstadSectionLane_LongQueueAvg_get, _AAPI.StructAkiEstadSectionLane_LongQueueAvg_set) __swig_setmethods__["LongQueueMax"] = _AAPI.StructAkiEstadSectionLane_LongQueueMax_set __swig_getmethods__["LongQueueMax"] = _AAPI.StructAkiEstadSectionLane_LongQueueMax_get if _newclass:LongQueueMax = _swig_property(_AAPI.StructAkiEstadSectionLane_LongQueueMax_get, _AAPI.StructAkiEstadSectionLane_LongQueueMax_set) def __init__(self, *args): this = _AAPI.new_StructAkiEstadSectionLane(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_StructAkiEstadSectionLane __del__ = lambda self : None; StructAkiEstadSectionLane_swigregister = _AAPI.StructAkiEstadSectionLane_swigregister StructAkiEstadSectionLane_swigregister(StructAkiEstadSectionLane) class StructAkiEstadTurning(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, StructAkiEstadTurning, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, StructAkiEstadTurning, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.StructAkiEstadTurning_report_set __swig_getmethods__["report"] = _AAPI.StructAkiEstadTurning_report_get if _newclass:report = _swig_property(_AAPI.StructAkiEstadTurning_report_get, _AAPI.StructAkiEstadTurning_report_set) __swig_setmethods__["IdSectionFrom"] = _AAPI.StructAkiEstadTurning_IdSectionFrom_set __swig_getmethods__["IdSectionFrom"] = _AAPI.StructAkiEstadTurning_IdSectionFrom_get if _newclass:IdSectionFrom = _swig_property(_AAPI.StructAkiEstadTurning_IdSectionFrom_get, _AAPI.StructAkiEstadTurning_IdSectionFrom_set) __swig_setmethods__["IdSectionTo"] = _AAPI.StructAkiEstadTurning_IdSectionTo_set __swig_getmethods__["IdSectionTo"] = _AAPI.StructAkiEstadTurning_IdSectionTo_get if _newclass:IdSectionTo = _swig_property(_AAPI.StructAkiEstadTurning_IdSectionTo_get, _AAPI.StructAkiEstadTurning_IdSectionTo_set) __swig_setmethods__["Flow"] = _AAPI.StructAkiEstadTurning_Flow_set __swig_getmethods__["Flow"] = _AAPI.StructAkiEstadTurning_Flow_get if _newclass:Flow = _swig_property(_AAPI.StructAkiEstadTurning_Flow_get, _AAPI.StructAkiEstadTurning_Flow_set) __swig_setmethods__["TTa"] = _AAPI.StructAkiEstadTurning_TTa_set __swig_getmethods__["TTa"] = _AAPI.StructAkiEstadTurning_TTa_get if _newclass:TTa = _swig_property(_AAPI.StructAkiEstadTurning_TTa_get, _AAPI.StructAkiEstadTurning_TTa_set) __swig_setmethods__["TTd"] = _AAPI.StructAkiEstadTurning_TTd_set __swig_getmethods__["TTd"] = _AAPI.StructAkiEstadTurning_TTd_get if _newclass:TTd = _swig_property(_AAPI.StructAkiEstadTurning_TTd_get, _AAPI.StructAkiEstadTurning_TTd_set) __swig_setmethods__["DTa"] = _AAPI.StructAkiEstadTurning_DTa_set __swig_getmethods__["DTa"] = _AAPI.StructAkiEstadTurning_DTa_get if _newclass:DTa = _swig_property(_AAPI.StructAkiEstadTurning_DTa_get, _AAPI.StructAkiEstadTurning_DTa_set) __swig_setmethods__["DTd"] = _AAPI.StructAkiEstadTurning_DTd_set __swig_getmethods__["DTd"] = _AAPI.StructAkiEstadTurning_DTd_get if _newclass:DTd = _swig_property(_AAPI.StructAkiEstadTurning_DTd_get, _AAPI.StructAkiEstadTurning_DTd_set) __swig_setmethods__["Sa"] = _AAPI.StructAkiEstadTurning_Sa_set __swig_getmethods__["Sa"] = _AAPI.StructAkiEstadTurning_Sa_get if _newclass:Sa = _swig_property(_AAPI.StructAkiEstadTurning_Sa_get, _AAPI.StructAkiEstadTurning_Sa_set) __swig_setmethods__["Sd"] = _AAPI.StructAkiEstadTurning_Sd_set __swig_getmethods__["Sd"] = _AAPI.StructAkiEstadTurning_Sd_get if _newclass:Sd = _swig_property(_AAPI.StructAkiEstadTurning_Sd_get, _AAPI.StructAkiEstadTurning_Sd_set) __swig_setmethods__["SHa"] = _AAPI.StructAkiEstadTurning_SHa_set __swig_getmethods__["SHa"] = _AAPI.StructAkiEstadTurning_SHa_get if _newclass:SHa = _swig_property(_AAPI.StructAkiEstadTurning_SHa_get, _AAPI.StructAkiEstadTurning_SHa_set) __swig_setmethods__["SHd"] = _AAPI.StructAkiEstadTurning_SHd_set __swig_getmethods__["SHd"] = _AAPI.StructAkiEstadTurning_SHd_get if _newclass:SHd = _swig_property(_AAPI.StructAkiEstadTurning_SHd_get, _AAPI.StructAkiEstadTurning_SHd_set) __swig_setmethods__["STa"] = _AAPI.StructAkiEstadTurning_STa_set __swig_getmethods__["STa"] = _AAPI.StructAkiEstadTurning_STa_get if _newclass:STa = _swig_property(_AAPI.StructAkiEstadTurning_STa_get, _AAPI.StructAkiEstadTurning_STa_set) __swig_setmethods__["STd"] = _AAPI.StructAkiEstadTurning_STd_set __swig_getmethods__["STd"] = _AAPI.StructAkiEstadTurning_STd_get if _newclass:STd = _swig_property(_AAPI.StructAkiEstadTurning_STd_get, _AAPI.StructAkiEstadTurning_STd_set) __swig_setmethods__["NumStops"] = _AAPI.StructAkiEstadTurning_NumStops_set __swig_getmethods__["NumStops"] = _AAPI.StructAkiEstadTurning_NumStops_get if _newclass:NumStops = _swig_property(_AAPI.StructAkiEstadTurning_NumStops_get, _AAPI.StructAkiEstadTurning_NumStops_set) __swig_setmethods__["LongQueueAvg"] = _AAPI.StructAkiEstadTurning_LongQueueAvg_set __swig_getmethods__["LongQueueAvg"] = _AAPI.StructAkiEstadTurning_LongQueueAvg_get if _newclass:LongQueueAvg = _swig_property(_AAPI.StructAkiEstadTurning_LongQueueAvg_get, _AAPI.StructAkiEstadTurning_LongQueueAvg_set) __swig_setmethods__["LongQueueMax"] = _AAPI.StructAkiEstadTurning_LongQueueMax_set __swig_getmethods__["LongQueueMax"] = _AAPI.StructAkiEstadTurning_LongQueueMax_get if _newclass:LongQueueMax = _swig_property(_AAPI.StructAkiEstadTurning_LongQueueMax_get, _AAPI.StructAkiEstadTurning_LongQueueMax_set) __swig_setmethods__["TotalTravel"] = _AAPI.StructAkiEstadTurning_TotalTravel_set __swig_getmethods__["TotalTravel"] = _AAPI.StructAkiEstadTurning_TotalTravel_get if _newclass:TotalTravel = _swig_property(_AAPI.StructAkiEstadTurning_TotalTravel_get, _AAPI.StructAkiEstadTurning_TotalTravel_set) __swig_setmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadTurning_TotalTravelTime_set __swig_getmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadTurning_TotalTravelTime_get if _newclass:TotalTravelTime = _swig_property(_AAPI.StructAkiEstadTurning_TotalTravelTime_get, _AAPI.StructAkiEstadTurning_TotalTravelTime_set) def __init__(self, *args): this = _AAPI.new_StructAkiEstadTurning(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_StructAkiEstadTurning __del__ = lambda self : None; StructAkiEstadTurning_swigregister = _AAPI.StructAkiEstadTurning_swigregister StructAkiEstadTurning_swigregister(StructAkiEstadTurning) class StructAkiEstadODPair(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, StructAkiEstadODPair, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, StructAkiEstadODPair, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.StructAkiEstadODPair_report_set __swig_getmethods__["report"] = _AAPI.StructAkiEstadODPair_report_get if _newclass:report = _swig_property(_AAPI.StructAkiEstadODPair_report_get, _AAPI.StructAkiEstadODPair_report_set) __swig_setmethods__["IdOrigin"] = _AAPI.StructAkiEstadODPair_IdOrigin_set __swig_getmethods__["IdOrigin"] = _AAPI.StructAkiEstadODPair_IdOrigin_get if _newclass:IdOrigin = _swig_property(_AAPI.StructAkiEstadODPair_IdOrigin_get, _AAPI.StructAkiEstadODPair_IdOrigin_set) __swig_setmethods__["IdDest"] = _AAPI.StructAkiEstadODPair_IdDest_set __swig_getmethods__["IdDest"] = _AAPI.StructAkiEstadODPair_IdDest_get if _newclass:IdDest = _swig_property(_AAPI.StructAkiEstadODPair_IdDest_get, _AAPI.StructAkiEstadODPair_IdDest_set) __swig_setmethods__["Flow"] = _AAPI.StructAkiEstadODPair_Flow_set __swig_getmethods__["Flow"] = _AAPI.StructAkiEstadODPair_Flow_get if _newclass:Flow = _swig_property(_AAPI.StructAkiEstadODPair_Flow_get, _AAPI.StructAkiEstadODPair_Flow_set) __swig_setmethods__["TTa"] = _AAPI.StructAkiEstadODPair_TTa_set __swig_getmethods__["TTa"] = _AAPI.StructAkiEstadODPair_TTa_get if _newclass:TTa = _swig_property(_AAPI.StructAkiEstadODPair_TTa_get, _AAPI.StructAkiEstadODPair_TTa_set) __swig_setmethods__["TTd"] = _AAPI.StructAkiEstadODPair_TTd_set __swig_getmethods__["TTd"] = _AAPI.StructAkiEstadODPair_TTd_get if _newclass:TTd = _swig_property(_AAPI.StructAkiEstadODPair_TTd_get, _AAPI.StructAkiEstadODPair_TTd_set) __swig_setmethods__["DTa"] = _AAPI.StructAkiEstadODPair_DTa_set __swig_getmethods__["DTa"] = _AAPI.StructAkiEstadODPair_DTa_get if _newclass:DTa = _swig_property(_AAPI.StructAkiEstadODPair_DTa_get, _AAPI.StructAkiEstadODPair_DTa_set) __swig_setmethods__["DTd"] = _AAPI.StructAkiEstadODPair_DTd_set __swig_getmethods__["DTd"] = _AAPI.StructAkiEstadODPair_DTd_get if _newclass:DTd = _swig_property(_AAPI.StructAkiEstadODPair_DTd_get, _AAPI.StructAkiEstadODPair_DTd_set) __swig_setmethods__["Sa"] = _AAPI.StructAkiEstadODPair_Sa_set __swig_getmethods__["Sa"] = _AAPI.StructAkiEstadODPair_Sa_get if _newclass:Sa = _swig_property(_AAPI.StructAkiEstadODPair_Sa_get, _AAPI.StructAkiEstadODPair_Sa_set) __swig_setmethods__["Sd"] = _AAPI.StructAkiEstadODPair_Sd_set __swig_getmethods__["Sd"] = _AAPI.StructAkiEstadODPair_Sd_get if _newclass:Sd = _swig_property(_AAPI.StructAkiEstadODPair_Sd_get, _AAPI.StructAkiEstadODPair_Sd_set) __swig_setmethods__["SHa"] = _AAPI.StructAkiEstadODPair_SHa_set __swig_getmethods__["SHa"] = _AAPI.StructAkiEstadODPair_SHa_get if _newclass:SHa = _swig_property(_AAPI.StructAkiEstadODPair_SHa_get, _AAPI.StructAkiEstadODPair_SHa_set) __swig_setmethods__["SHd"] = _AAPI.StructAkiEstadODPair_SHd_set __swig_getmethods__["SHd"] = _AAPI.StructAkiEstadODPair_SHd_get if _newclass:SHd = _swig_property(_AAPI.StructAkiEstadODPair_SHd_get, _AAPI.StructAkiEstadODPair_SHd_set) __swig_setmethods__["STa"] = _AAPI.StructAkiEstadODPair_STa_set __swig_getmethods__["STa"] = _AAPI.StructAkiEstadODPair_STa_get if _newclass:STa = _swig_property(_AAPI.StructAkiEstadODPair_STa_get, _AAPI.StructAkiEstadODPair_STa_set) __swig_setmethods__["STd"] = _AAPI.StructAkiEstadODPair_STd_set __swig_getmethods__["STd"] = _AAPI.StructAkiEstadODPair_STd_get if _newclass:STd = _swig_property(_AAPI.StructAkiEstadODPair_STd_get, _AAPI.StructAkiEstadODPair_STd_set) __swig_setmethods__["NumStops"] = _AAPI.StructAkiEstadODPair_NumStops_set __swig_getmethods__["NumStops"] = _AAPI.StructAkiEstadODPair_NumStops_get if _newclass:NumStops = _swig_property(_AAPI.StructAkiEstadODPair_NumStops_get, _AAPI.StructAkiEstadODPair_NumStops_set) __swig_setmethods__["virtualQueueAvg"] = _AAPI.StructAkiEstadODPair_virtualQueueAvg_set __swig_getmethods__["virtualQueueAvg"] = _AAPI.StructAkiEstadODPair_virtualQueueAvg_get if _newclass:virtualQueueAvg = _swig_property(_AAPI.StructAkiEstadODPair_virtualQueueAvg_get, _AAPI.StructAkiEstadODPair_virtualQueueAvg_set) __swig_setmethods__["virtualQueueMax"] = _AAPI.StructAkiEstadODPair_virtualQueueMax_set __swig_getmethods__["virtualQueueMax"] = _AAPI.StructAkiEstadODPair_virtualQueueMax_get if _newclass:virtualQueueMax = _swig_property(_AAPI.StructAkiEstadODPair_virtualQueueMax_get, _AAPI.StructAkiEstadODPair_virtualQueueMax_set) __swig_setmethods__["TotalTravel"] = _AAPI.StructAkiEstadODPair_TotalTravel_set __swig_getmethods__["TotalTravel"] = _AAPI.StructAkiEstadODPair_TotalTravel_get if _newclass:TotalTravel = _swig_property(_AAPI.StructAkiEstadODPair_TotalTravel_get, _AAPI.StructAkiEstadODPair_TotalTravel_set) __swig_setmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadODPair_TotalTravelTime_set __swig_getmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadODPair_TotalTravelTime_get if _newclass:TotalTravelTime = _swig_property(_AAPI.StructAkiEstadODPair_TotalTravelTime_get, _AAPI.StructAkiEstadODPair_TotalTravelTime_set) __swig_setmethods__["vehLost"] = _AAPI.StructAkiEstadODPair_vehLost_set __swig_getmethods__["vehLost"] = _AAPI.StructAkiEstadODPair_vehLost_get if _newclass:vehLost = _swig_property(_AAPI.StructAkiEstadODPair_vehLost_get, _AAPI.StructAkiEstadODPair_vehLost_set) def __init__(self, *args): this = _AAPI.new_StructAkiEstadODPair(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_StructAkiEstadODPair __del__ = lambda self : None; StructAkiEstadODPair_swigregister = _AAPI.StructAkiEstadODPair_swigregister StructAkiEstadODPair_swigregister(StructAkiEstadODPair) class StructAkiEstadStream(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, StructAkiEstadStream, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, StructAkiEstadStream, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.StructAkiEstadStream_report_set __swig_getmethods__["report"] = _AAPI.StructAkiEstadStream_report_get if _newclass:report = _swig_property(_AAPI.StructAkiEstadStream_report_get, _AAPI.StructAkiEstadStream_report_set) __swig_setmethods__["Id"] = _AAPI.StructAkiEstadStream_Id_set __swig_getmethods__["Id"] = _AAPI.StructAkiEstadStream_Id_get if _newclass:Id = _swig_property(_AAPI.StructAkiEstadStream_Id_get, _AAPI.StructAkiEstadStream_Id_set) __swig_setmethods__["Flow"] = _AAPI.StructAkiEstadStream_Flow_set __swig_getmethods__["Flow"] = _AAPI.StructAkiEstadStream_Flow_get if _newclass:Flow = _swig_property(_AAPI.StructAkiEstadStream_Flow_get, _AAPI.StructAkiEstadStream_Flow_set) __swig_setmethods__["TTa"] = _AAPI.StructAkiEstadStream_TTa_set __swig_getmethods__["TTa"] = _AAPI.StructAkiEstadStream_TTa_get if _newclass:TTa = _swig_property(_AAPI.StructAkiEstadStream_TTa_get, _AAPI.StructAkiEstadStream_TTa_set) __swig_setmethods__["TTd"] = _AAPI.StructAkiEstadStream_TTd_set __swig_getmethods__["TTd"] = _AAPI.StructAkiEstadStream_TTd_get if _newclass:TTd = _swig_property(_AAPI.StructAkiEstadStream_TTd_get, _AAPI.StructAkiEstadStream_TTd_set) __swig_setmethods__["DTa"] = _AAPI.StructAkiEstadStream_DTa_set __swig_getmethods__["DTa"] = _AAPI.StructAkiEstadStream_DTa_get if _newclass:DTa = _swig_property(_AAPI.StructAkiEstadStream_DTa_get, _AAPI.StructAkiEstadStream_DTa_set) __swig_setmethods__["DTd"] = _AAPI.StructAkiEstadStream_DTd_set __swig_getmethods__["DTd"] = _AAPI.StructAkiEstadStream_DTd_get if _newclass:DTd = _swig_property(_AAPI.StructAkiEstadStream_DTd_get, _AAPI.StructAkiEstadStream_DTd_set) __swig_setmethods__["Sa"] = _AAPI.StructAkiEstadStream_Sa_set __swig_getmethods__["Sa"] = _AAPI.StructAkiEstadStream_Sa_get if _newclass:Sa = _swig_property(_AAPI.StructAkiEstadStream_Sa_get, _AAPI.StructAkiEstadStream_Sa_set) __swig_setmethods__["Sd"] = _AAPI.StructAkiEstadStream_Sd_set __swig_getmethods__["Sd"] = _AAPI.StructAkiEstadStream_Sd_get if _newclass:Sd = _swig_property(_AAPI.StructAkiEstadStream_Sd_get, _AAPI.StructAkiEstadStream_Sd_set) __swig_setmethods__["SHa"] = _AAPI.StructAkiEstadStream_SHa_set __swig_getmethods__["SHa"] = _AAPI.StructAkiEstadStream_SHa_get if _newclass:SHa = _swig_property(_AAPI.StructAkiEstadStream_SHa_get, _AAPI.StructAkiEstadStream_SHa_set) __swig_setmethods__["SHd"] = _AAPI.StructAkiEstadStream_SHd_set __swig_getmethods__["SHd"] = _AAPI.StructAkiEstadStream_SHd_get if _newclass:SHd = _swig_property(_AAPI.StructAkiEstadStream_SHd_get, _AAPI.StructAkiEstadStream_SHd_set) __swig_setmethods__["STa"] = _AAPI.StructAkiEstadStream_STa_set __swig_getmethods__["STa"] = _AAPI.StructAkiEstadStream_STa_get if _newclass:STa = _swig_property(_AAPI.StructAkiEstadStream_STa_get, _AAPI.StructAkiEstadStream_STa_set) __swig_setmethods__["STd"] = _AAPI.StructAkiEstadStream_STd_set __swig_getmethods__["STd"] = _AAPI.StructAkiEstadStream_STd_get if _newclass:STd = _swig_property(_AAPI.StructAkiEstadStream_STd_get, _AAPI.StructAkiEstadStream_STd_set) __swig_setmethods__["NumStops"] = _AAPI.StructAkiEstadStream_NumStops_set __swig_getmethods__["NumStops"] = _AAPI.StructAkiEstadStream_NumStops_get if _newclass:NumStops = _swig_property(_AAPI.StructAkiEstadStream_NumStops_get, _AAPI.StructAkiEstadStream_NumStops_set) __swig_setmethods__["virtualQueueAvg"] = _AAPI.StructAkiEstadStream_virtualQueueAvg_set __swig_getmethods__["virtualQueueAvg"] = _AAPI.StructAkiEstadStream_virtualQueueAvg_get if _newclass:virtualQueueAvg = _swig_property(_AAPI.StructAkiEstadStream_virtualQueueAvg_get, _AAPI.StructAkiEstadStream_virtualQueueAvg_set) __swig_setmethods__["virtualQueueMax"] = _AAPI.StructAkiEstadStream_virtualQueueMax_set __swig_getmethods__["virtualQueueMax"] = _AAPI.StructAkiEstadStream_virtualQueueMax_get if _newclass:virtualQueueMax = _swig_property(_AAPI.StructAkiEstadStream_virtualQueueMax_get, _AAPI.StructAkiEstadStream_virtualQueueMax_set) __swig_setmethods__["TotalTravel"] = _AAPI.StructAkiEstadStream_TotalTravel_set __swig_getmethods__["TotalTravel"] = _AAPI.StructAkiEstadStream_TotalTravel_get if _newclass:TotalTravel = _swig_property(_AAPI.StructAkiEstadStream_TotalTravel_get, _AAPI.StructAkiEstadStream_TotalTravel_set) __swig_setmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadStream_TotalTravelTime_set __swig_getmethods__["TotalTravelTime"] = _AAPI.StructAkiEstadStream_TotalTravelTime_get if _newclass:TotalTravelTime = _swig_property(_AAPI.StructAkiEstadStream_TotalTravelTime_get, _AAPI.StructAkiEstadStream_TotalTravelTime_set) def __init__(self, *args): this = _AAPI.new_StructAkiEstadStream(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_StructAkiEstadStream __del__ = lambda self : None; StructAkiEstadStream_swigregister = _AAPI.StructAkiEstadStream_swigregister StructAkiEstadStream_swigregister(StructAkiEstadStream) AKIEstGetIntervalStatistics = _AAPI.AKIEstGetIntervalStatistics AKIIsGatheringStatistics = _AAPI.AKIIsGatheringStatistics AKIEstIsNewStatisticsAvailable = _AAPI.AKIEstIsNewStatisticsAvailable AKIEstGetGlobalStatisticsSection = _AAPI.AKIEstGetGlobalStatisticsSection AKIEstGetParcialStatisticsSection = _AAPI.AKIEstGetParcialStatisticsSection AKIEstGetGlobalStatisticsSectionLane = _AAPI.AKIEstGetGlobalStatisticsSectionLane AKIEstGetParcialStatisticsSectionLane = _AAPI.AKIEstGetParcialStatisticsSectionLane AKIEstGetGlobalStatisticsTurning = _AAPI.AKIEstGetGlobalStatisticsTurning AKIEstGetParcialStatisticsTurning = _AAPI.AKIEstGetParcialStatisticsTurning AKIEstGetGlobalStatisticsLink = _AAPI.AKIEstGetGlobalStatisticsLink AKIEstGetParcialStatisticsLink = _AAPI.AKIEstGetParcialStatisticsLink AKIEstGetGlobalStatisticsSystem = _AAPI.AKIEstGetGlobalStatisticsSystem AKIEstGetParcialStatisticsSystem = _AAPI.AKIEstGetParcialStatisticsSystem AKIEstGetGlobalStatisticsODPair = _AAPI.AKIEstGetGlobalStatisticsODPair AKIEstGetParcialStatisticsODPair = _AAPI.AKIEstGetParcialStatisticsODPair AKIEstGetParcialStatisticsStream = _AAPI.AKIEstGetParcialStatisticsStream AKIEstGetGlobalStatisticsStream = _AAPI.AKIEstGetGlobalStatisticsStream AKIEstGetInstantVirtualQueueSection = _AAPI.AKIEstGetInstantVirtualQueueSection AKIEstGetGlobalStatisticsSectionFuelCons = _AAPI.AKIEstGetGlobalStatisticsSectionFuelCons AKIEstGetParcialStatisticsSectionFuelCons = _AAPI.AKIEstGetParcialStatisticsSectionFuelCons AKIEstGetGlobalStatisticsTurningFuelCons = _AAPI.AKIEstGetGlobalStatisticsTurningFuelCons AKIEstGetParcialStatisticsTurningFuelCons = _AAPI.AKIEstGetParcialStatisticsTurningFuelCons AKIEstGetGlobalStatisticsLinkFuelCons = _AAPI.AKIEstGetGlobalStatisticsLinkFuelCons AKIEstGetParcialStatisticsLinkFuelCons = _AAPI.AKIEstGetParcialStatisticsLinkFuelCons AKIEstGetGlobalStatisticsSystemFuelCons = _AAPI.AKIEstGetGlobalStatisticsSystemFuelCons AKIEstGetParcialStatisticsSystemFuelCons = _AAPI.AKIEstGetParcialStatisticsSystemFuelCons AKIEstGetGlobalStatisticsODPairFuelCons = _AAPI.AKIEstGetGlobalStatisticsODPairFuelCons AKIEstGetParcialStatisticsODPairFuelCons = _AAPI.AKIEstGetParcialStatisticsODPairFuelCons AKIEstGetParcialStatisticsStreamFuelCons = _AAPI.AKIEstGetParcialStatisticsStreamFuelCons AKIEstGetGlobalStatisticsStreamFuelCons = _AAPI.AKIEstGetGlobalStatisticsStreamFuelCons AKIEstGetNbPollutants = _AAPI.AKIEstGetNbPollutants AKIEstGetPollutantNameA = _AAPI.AKIEstGetPollutantNameA AKIEstGetGlobalStatisticsSectionPollution = _AAPI.AKIEstGetGlobalStatisticsSectionPollution AKIEstGetParcialStatisticsSectionPollution = _AAPI.AKIEstGetParcialStatisticsSectionPollution AKIEstGetGlobalStatisticsTurningPollution = _AAPI.AKIEstGetGlobalStatisticsTurningPollution AKIEstGetParcialStatisticsTurningPollution = _AAPI.AKIEstGetParcialStatisticsTurningPollution AKIEstGetGlobalStatisticsLinkPollution = _AAPI.AKIEstGetGlobalStatisticsLinkPollution AKIEstGetParcialStatisticsLinkPollution = _AAPI.AKIEstGetParcialStatisticsLinkPollution AKIEstGetGlobalStatisticsSystemPollution = _AAPI.AKIEstGetGlobalStatisticsSystemPollution AKIEstGetParcialStatisticsSystemPollution = _AAPI.AKIEstGetParcialStatisticsSystemPollution AKIEstGetGlobalStatisticsODPairPollution = _AAPI.AKIEstGetGlobalStatisticsODPairPollution AKIEstGetParcialStatisticsODPairPollution = _AAPI.AKIEstGetParcialStatisticsODPairPollution AKIEstGetParcialStatisticsStreamPollution = _AAPI.AKIEstGetParcialStatisticsStreamPollution AKIEstGetGlobalStatisticsStreamPollution = _AAPI.AKIEstGetGlobalStatisticsStreamPollution AKIEnterVehTrafficFlow = _AAPI.AKIEnterVehTrafficFlow AKIEnterVehTrafficOD = _AAPI.AKIEnterVehTrafficOD AKIPutVehTrafficFlow = _AAPI.AKIPutVehTrafficFlow AKIPutVehTrafficOD = _AAPI.AKIPutVehTrafficOD class InfArrival(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, InfArrival, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, InfArrival, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.InfArrival_report_set __swig_getmethods__["report"] = _AAPI.InfArrival_report_get if _newclass:report = _swig_property(_AAPI.InfArrival_report_get, _AAPI.InfArrival_report_set) __swig_setmethods__["idVeh"] = _AAPI.InfArrival_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.InfArrival_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.InfArrival_idVeh_get, _AAPI.InfArrival_idVeh_set) __swig_setmethods__["inVirtualQueue"] = _AAPI.InfArrival_inVirtualQueue_set __swig_getmethods__["inVirtualQueue"] = _AAPI.InfArrival_inVirtualQueue_get if _newclass:inVirtualQueue = _swig_property(_AAPI.InfArrival_inVirtualQueue_get, _AAPI.InfArrival_inVirtualQueue_set) __swig_setmethods__["entranceSection"] = _AAPI.InfArrival_entranceSection_set __swig_getmethods__["entranceSection"] = _AAPI.InfArrival_entranceSection_get if _newclass:entranceSection = _swig_property(_AAPI.InfArrival_entranceSection_get, _AAPI.InfArrival_entranceSection_set) def __init__(self, *args): this = _AAPI.new_InfArrival(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_InfArrival __del__ = lambda self : None; InfArrival_swigregister = _AAPI.InfArrival_swigregister InfArrival_swigregister(InfArrival) AKIGenerateArrivalTrafficFlow = _AAPI.AKIGenerateArrivalTrafficFlow AKIGenerateArrivalTrafficOD = _AAPI.AKIGenerateArrivalTrafficOD AKIVehTrackedModifySpeed = _AAPI.AKIVehTrackedModifySpeed AKIVehTrackedModifyLane = _AAPI.AKIVehTrackedModifyLane AKIVehTrackedModifyNextSection = _AAPI.AKIVehTrackedModifyNextSection AKIVehTrackedModifyNextSections = _AAPI.AKIVehTrackedModifyNextSections AKIVehTrackedModifyNextTargetLaneInNextSection = _AAPI.AKIVehTrackedModifyNextTargetLaneInNextSection AKIVehTrackedDelete = _AAPI.AKIVehTrackedDelete AKIVehSetAsTracked = _AAPI.AKIVehSetAsTracked AKIVehSetAsTrackedbyPointer = _AAPI.AKIVehSetAsTrackedbyPointer AKIVehSetAsNoTracked = _AAPI.AKIVehSetAsNoTracked AKIVehTrackedGetStaticInf = _AAPI.AKIVehTrackedGetStaticInf AKIVehTrackedGetInf = _AAPI.AKIVehTrackedGetInf AKIVehTrackedSetStaticInf = _AAPI.AKIVehTrackedSetStaticInf AKIVehTrackedGetPos = _AAPI.AKIVehTrackedGetPos AKIVehTrackedGetPosWithStruct = _AAPI.AKIVehTrackedGetPosWithStruct AKIVehSetVehicleTrackedDynInf = _AAPI.AKIVehSetVehicleTrackedDynInf AKIVehTrackedGetGraphicInf = _AAPI.AKIVehTrackedGetGraphicInf AKIGetTimeSta = _AAPI.AKIGetTimeSta AKIGetCurrentSimulationTime = _AAPI.AKIGetCurrentSimulationTime AKIGetIniSimTime = _AAPI.AKIGetIniSimTime AKIGetEndSimTime = _AAPI.AKIGetEndSimTime AKIGetDurationTransTime = _AAPI.AKIGetDurationTransTime AKIGetSimulationStepTime = _AAPI.AKIGetSimulationStepTime AKISetEndSimTime = _AAPI.AKISetEndSimTime AKIGetRandomNumber = _AAPI.AKIGetRandomNumber AKIGenerateIncident = _AAPI.AKIGenerateIncident AKIRemoveIncident = _AAPI.AKIRemoveIncident AKIRemoveAllIncidentsInSection = _AAPI.AKIRemoveAllIncidentsInSection AKIResetAllIncidents = _AAPI.AKIResetAllIncidents class InfPTVeh(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, InfPTVeh, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, InfPTVeh, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.InfPTVeh_report_set __swig_getmethods__["report"] = _AAPI.InfPTVeh_report_get if _newclass:report = _swig_property(_AAPI.InfPTVeh_report_get, _AAPI.InfPTVeh_report_set) __swig_setmethods__["idVeh"] = _AAPI.InfPTVeh_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.InfPTVeh_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.InfPTVeh_idVeh_get, _AAPI.InfPTVeh_idVeh_set) __swig_setmethods__["type"] = _AAPI.InfPTVeh_type_set __swig_getmethods__["type"] = _AAPI.InfPTVeh_type_get if _newclass:type = _swig_property(_AAPI.InfPTVeh_type_get, _AAPI.InfPTVeh_type_set) __swig_setmethods__["idSection"] = _AAPI.InfPTVeh_idSection_set __swig_getmethods__["idSection"] = _AAPI.InfPTVeh_idSection_get if _newclass:idSection = _swig_property(_AAPI.InfPTVeh_idSection_get, _AAPI.InfPTVeh_idSection_set) __swig_setmethods__["segment"] = _AAPI.InfPTVeh_segment_set __swig_getmethods__["segment"] = _AAPI.InfPTVeh_segment_get if _newclass:segment = _swig_property(_AAPI.InfPTVeh_segment_get, _AAPI.InfPTVeh_segment_set) __swig_setmethods__["numberLane"] = _AAPI.InfPTVeh_numberLane_set __swig_getmethods__["numberLane"] = _AAPI.InfPTVeh_numberLane_get if _newclass:numberLane = _swig_property(_AAPI.InfPTVeh_numberLane_get, _AAPI.InfPTVeh_numberLane_set) __swig_setmethods__["idJunction"] = _AAPI.InfPTVeh_idJunction_set __swig_getmethods__["idJunction"] = _AAPI.InfPTVeh_idJunction_get if _newclass:idJunction = _swig_property(_AAPI.InfPTVeh_idJunction_get, _AAPI.InfPTVeh_idJunction_set) __swig_setmethods__["idSectionFrom"] = _AAPI.InfPTVeh_idSectionFrom_set __swig_getmethods__["idSectionFrom"] = _AAPI.InfPTVeh_idSectionFrom_get if _newclass:idSectionFrom = _swig_property(_AAPI.InfPTVeh_idSectionFrom_get, _AAPI.InfPTVeh_idSectionFrom_set) __swig_setmethods__["idLaneFrom"] = _AAPI.InfPTVeh_idLaneFrom_set __swig_getmethods__["idLaneFrom"] = _AAPI.InfPTVeh_idLaneFrom_get if _newclass:idLaneFrom = _swig_property(_AAPI.InfPTVeh_idLaneFrom_get, _AAPI.InfPTVeh_idLaneFrom_set) __swig_setmethods__["idSectionTo"] = _AAPI.InfPTVeh_idSectionTo_set __swig_getmethods__["idSectionTo"] = _AAPI.InfPTVeh_idSectionTo_get if _newclass:idSectionTo = _swig_property(_AAPI.InfPTVeh_idSectionTo_get, _AAPI.InfPTVeh_idSectionTo_set) __swig_setmethods__["idLaneTo"] = _AAPI.InfPTVeh_idLaneTo_set __swig_getmethods__["idLaneTo"] = _AAPI.InfPTVeh_idLaneTo_get if _newclass:idLaneTo = _swig_property(_AAPI.InfPTVeh_idLaneTo_get, _AAPI.InfPTVeh_idLaneTo_set) __swig_setmethods__["CurrentPos"] = _AAPI.InfPTVeh_CurrentPos_set __swig_getmethods__["CurrentPos"] = _AAPI.InfPTVeh_CurrentPos_get if _newclass:CurrentPos = _swig_property(_AAPI.InfPTVeh_CurrentPos_get, _AAPI.InfPTVeh_CurrentPos_set) __swig_setmethods__["distance2End"] = _AAPI.InfPTVeh_distance2End_set __swig_getmethods__["distance2End"] = _AAPI.InfPTVeh_distance2End_get if _newclass:distance2End = _swig_property(_AAPI.InfPTVeh_distance2End_get, _AAPI.InfPTVeh_distance2End_set) __swig_setmethods__["xCurrentPos"] = _AAPI.InfPTVeh_xCurrentPos_set __swig_getmethods__["xCurrentPos"] = _AAPI.InfPTVeh_xCurrentPos_get if _newclass:xCurrentPos = _swig_property(_AAPI.InfPTVeh_xCurrentPos_get, _AAPI.InfPTVeh_xCurrentPos_set) __swig_setmethods__["yCurrentPos"] = _AAPI.InfPTVeh_yCurrentPos_set __swig_getmethods__["yCurrentPos"] = _AAPI.InfPTVeh_yCurrentPos_get if _newclass:yCurrentPos = _swig_property(_AAPI.InfPTVeh_yCurrentPos_get, _AAPI.InfPTVeh_yCurrentPos_set) __swig_setmethods__["zCurrentPos"] = _AAPI.InfPTVeh_zCurrentPos_set __swig_getmethods__["zCurrentPos"] = _AAPI.InfPTVeh_zCurrentPos_get if _newclass:zCurrentPos = _swig_property(_AAPI.InfPTVeh_zCurrentPos_get, _AAPI.InfPTVeh_zCurrentPos_set) __swig_setmethods__["xCurrentPosBack"] = _AAPI.InfPTVeh_xCurrentPosBack_set __swig_getmethods__["xCurrentPosBack"] = _AAPI.InfPTVeh_xCurrentPosBack_get if _newclass:xCurrentPosBack = _swig_property(_AAPI.InfPTVeh_xCurrentPosBack_get, _AAPI.InfPTVeh_xCurrentPosBack_set) __swig_setmethods__["yCurrentPosBack"] = _AAPI.InfPTVeh_yCurrentPosBack_set __swig_getmethods__["yCurrentPosBack"] = _AAPI.InfPTVeh_yCurrentPosBack_get if _newclass:yCurrentPosBack = _swig_property(_AAPI.InfPTVeh_yCurrentPosBack_get, _AAPI.InfPTVeh_yCurrentPosBack_set) __swig_setmethods__["zCurrentPosBack"] = _AAPI.InfPTVeh_zCurrentPosBack_set __swig_getmethods__["zCurrentPosBack"] = _AAPI.InfPTVeh_zCurrentPosBack_get if _newclass:zCurrentPosBack = _swig_property(_AAPI.InfPTVeh_zCurrentPosBack_get, _AAPI.InfPTVeh_zCurrentPosBack_set) __swig_setmethods__["CurrentSpeed"] = _AAPI.InfPTVeh_CurrentSpeed_set __swig_getmethods__["CurrentSpeed"] = _AAPI.InfPTVeh_CurrentSpeed_get if _newclass:CurrentSpeed = _swig_property(_AAPI.InfPTVeh_CurrentSpeed_get, _AAPI.InfPTVeh_CurrentSpeed_set) __swig_setmethods__["PreviousSpeed"] = _AAPI.InfPTVeh_PreviousSpeed_set __swig_getmethods__["PreviousSpeed"] = _AAPI.InfPTVeh_PreviousSpeed_get if _newclass:PreviousSpeed = _swig_property(_AAPI.InfPTVeh_PreviousSpeed_get, _AAPI.InfPTVeh_PreviousSpeed_set) __swig_setmethods__["TotalDistance"] = _AAPI.InfPTVeh_TotalDistance_set __swig_getmethods__["TotalDistance"] = _AAPI.InfPTVeh_TotalDistance_get if _newclass:TotalDistance = _swig_property(_AAPI.InfPTVeh_TotalDistance_get, _AAPI.InfPTVeh_TotalDistance_set) __swig_setmethods__["SystemEntranceT"] = _AAPI.InfPTVeh_SystemEntranceT_set __swig_getmethods__["SystemEntranceT"] = _AAPI.InfPTVeh_SystemEntranceT_get if _newclass:SystemEntranceT = _swig_property(_AAPI.InfPTVeh_SystemEntranceT_get, _AAPI.InfPTVeh_SystemEntranceT_set) __swig_setmethods__["SectionEntranceT"] = _AAPI.InfPTVeh_SectionEntranceT_set __swig_getmethods__["SectionEntranceT"] = _AAPI.InfPTVeh_SectionEntranceT_get if _newclass:SectionEntranceT = _swig_property(_AAPI.InfPTVeh_SectionEntranceT_get, _AAPI.InfPTVeh_SectionEntranceT_set) __swig_setmethods__["CurrentStopTime"] = _AAPI.InfPTVeh_CurrentStopTime_set __swig_getmethods__["CurrentStopTime"] = _AAPI.InfPTVeh_CurrentStopTime_get if _newclass:CurrentStopTime = _swig_property(_AAPI.InfPTVeh_CurrentStopTime_get, _AAPI.InfPTVeh_CurrentStopTime_set) __swig_setmethods__["nbStopsDone"] = _AAPI.InfPTVeh_nbStopsDone_set __swig_getmethods__["nbStopsDone"] = _AAPI.InfPTVeh_nbStopsDone_get if _newclass:nbStopsDone = _swig_property(_AAPI.InfPTVeh_nbStopsDone_get, _AAPI.InfPTVeh_nbStopsDone_set) __swig_setmethods__["nextStopId"] = _AAPI.InfPTVeh_nextStopId_set __swig_getmethods__["nextStopId"] = _AAPI.InfPTVeh_nextStopId_get if _newclass:nextStopId = _swig_property(_AAPI.InfPTVeh_nextStopId_get, _AAPI.InfPTVeh_nextStopId_set) __swig_setmethods__["distanceNextStop"] = _AAPI.InfPTVeh_distanceNextStop_set __swig_getmethods__["distanceNextStop"] = _AAPI.InfPTVeh_distanceNextStop_get if _newclass:distanceNextStop = _swig_property(_AAPI.InfPTVeh_distanceNextStop_get, _AAPI.InfPTVeh_distanceNextStop_set) __swig_setmethods__["nextServiceTime"] = _AAPI.InfPTVeh_nextServiceTime_set __swig_getmethods__["nextServiceTime"] = _AAPI.InfPTVeh_nextServiceTime_get if _newclass:nextServiceTime = _swig_property(_AAPI.InfPTVeh_nextServiceTime_get, _AAPI.InfPTVeh_nextServiceTime_set) __swig_setmethods__["currentStoppedTimeInBusStop"] = _AAPI.InfPTVeh_currentStoppedTimeInBusStop_set __swig_getmethods__["currentStoppedTimeInBusStop"] = _AAPI.InfPTVeh_currentStoppedTimeInBusStop_get if _newclass:currentStoppedTimeInBusStop = _swig_property(_AAPI.InfPTVeh_currentStoppedTimeInBusStop_get, _AAPI.InfPTVeh_currentStoppedTimeInBusStop_set) def __init__(self, *args): this = _AAPI.new_InfPTVeh(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_InfPTVeh __del__ = lambda self : None; InfPTVeh_swigregister = _AAPI.InfPTVeh_swigregister InfPTVeh_swigregister(InfPTVeh) class StaticInfPTVeh(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, StaticInfPTVeh, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, StaticInfPTVeh, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.StaticInfPTVeh_report_set __swig_getmethods__["report"] = _AAPI.StaticInfPTVeh_report_get if _newclass:report = _swig_property(_AAPI.StaticInfPTVeh_report_get, _AAPI.StaticInfPTVeh_report_set) __swig_setmethods__["idVeh"] = _AAPI.StaticInfPTVeh_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.StaticInfPTVeh_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.StaticInfPTVeh_idVeh_get, _AAPI.StaticInfPTVeh_idVeh_set) __swig_setmethods__["type"] = _AAPI.StaticInfPTVeh_type_set __swig_getmethods__["type"] = _AAPI.StaticInfPTVeh_type_get if _newclass:type = _swig_property(_AAPI.StaticInfPTVeh_type_get, _AAPI.StaticInfPTVeh_type_set) __swig_setmethods__["length"] = _AAPI.StaticInfPTVeh_length_set __swig_getmethods__["length"] = _AAPI.StaticInfPTVeh_length_get if _newclass:length = _swig_property(_AAPI.StaticInfPTVeh_length_get, _AAPI.StaticInfPTVeh_length_set) __swig_setmethods__["width"] = _AAPI.StaticInfPTVeh_width_set __swig_getmethods__["width"] = _AAPI.StaticInfPTVeh_width_get if _newclass:width = _swig_property(_AAPI.StaticInfPTVeh_width_get, _AAPI.StaticInfPTVeh_width_set) __swig_setmethods__["maxDesiredSpeed"] = _AAPI.StaticInfPTVeh_maxDesiredSpeed_set __swig_getmethods__["maxDesiredSpeed"] = _AAPI.StaticInfPTVeh_maxDesiredSpeed_get if _newclass:maxDesiredSpeed = _swig_property(_AAPI.StaticInfPTVeh_maxDesiredSpeed_get, _AAPI.StaticInfPTVeh_maxDesiredSpeed_set) __swig_setmethods__["maxAcceleration"] = _AAPI.StaticInfPTVeh_maxAcceleration_set __swig_getmethods__["maxAcceleration"] = _AAPI.StaticInfPTVeh_maxAcceleration_get if _newclass:maxAcceleration = _swig_property(_AAPI.StaticInfPTVeh_maxAcceleration_get, _AAPI.StaticInfPTVeh_maxAcceleration_set) __swig_setmethods__["normalDeceleration"] = _AAPI.StaticInfPTVeh_normalDeceleration_set __swig_getmethods__["normalDeceleration"] = _AAPI.StaticInfPTVeh_normalDeceleration_get if _newclass:normalDeceleration = _swig_property(_AAPI.StaticInfPTVeh_normalDeceleration_get, _AAPI.StaticInfPTVeh_normalDeceleration_set) __swig_setmethods__["maxDeceleration"] = _AAPI.StaticInfPTVeh_maxDeceleration_set __swig_getmethods__["maxDeceleration"] = _AAPI.StaticInfPTVeh_maxDeceleration_get if _newclass:maxDeceleration = _swig_property(_AAPI.StaticInfPTVeh_maxDeceleration_get, _AAPI.StaticInfPTVeh_maxDeceleration_set) __swig_setmethods__["speedAcceptance"] = _AAPI.StaticInfPTVeh_speedAcceptance_set __swig_getmethods__["speedAcceptance"] = _AAPI.StaticInfPTVeh_speedAcceptance_get if _newclass:speedAcceptance = _swig_property(_AAPI.StaticInfPTVeh_speedAcceptance_get, _AAPI.StaticInfPTVeh_speedAcceptance_set) __swig_setmethods__["minDistanceVeh"] = _AAPI.StaticInfPTVeh_minDistanceVeh_set __swig_getmethods__["minDistanceVeh"] = _AAPI.StaticInfPTVeh_minDistanceVeh_get if _newclass:minDistanceVeh = _swig_property(_AAPI.StaticInfPTVeh_minDistanceVeh_get, _AAPI.StaticInfPTVeh_minDistanceVeh_set) __swig_setmethods__["giveWayTime"] = _AAPI.StaticInfPTVeh_giveWayTime_set __swig_getmethods__["giveWayTime"] = _AAPI.StaticInfPTVeh_giveWayTime_get if _newclass:giveWayTime = _swig_property(_AAPI.StaticInfPTVeh_giveWayTime_get, _AAPI.StaticInfPTVeh_giveWayTime_set) __swig_setmethods__["guidanceAcceptance"] = _AAPI.StaticInfPTVeh_guidanceAcceptance_set __swig_getmethods__["guidanceAcceptance"] = _AAPI.StaticInfPTVeh_guidanceAcceptance_get if _newclass:guidanceAcceptance = _swig_property(_AAPI.StaticInfPTVeh_guidanceAcceptance_get, _AAPI.StaticInfPTVeh_guidanceAcceptance_set) __swig_setmethods__["enrouted"] = _AAPI.StaticInfPTVeh_enrouted_set __swig_getmethods__["enrouted"] = _AAPI.StaticInfPTVeh_enrouted_get if _newclass:enrouted = _swig_property(_AAPI.StaticInfPTVeh_enrouted_get, _AAPI.StaticInfPTVeh_enrouted_set) __swig_setmethods__["equipped"] = _AAPI.StaticInfPTVeh_equipped_set __swig_getmethods__["equipped"] = _AAPI.StaticInfPTVeh_equipped_get if _newclass:equipped = _swig_property(_AAPI.StaticInfPTVeh_equipped_get, _AAPI.StaticInfPTVeh_equipped_set) __swig_setmethods__["tracked"] = _AAPI.StaticInfPTVeh_tracked_set __swig_getmethods__["tracked"] = _AAPI.StaticInfPTVeh_tracked_get if _newclass:tracked = _swig_property(_AAPI.StaticInfPTVeh_tracked_get, _AAPI.StaticInfPTVeh_tracked_set) __swig_setmethods__["keepfastLane"] = _AAPI.StaticInfPTVeh_keepfastLane_set __swig_getmethods__["keepfastLane"] = _AAPI.StaticInfPTVeh_keepfastLane_get if _newclass:keepfastLane = _swig_property(_AAPI.StaticInfPTVeh_keepfastLane_get, _AAPI.StaticInfPTVeh_keepfastLane_set) __swig_setmethods__["headwayMin"] = _AAPI.StaticInfPTVeh_headwayMin_set __swig_getmethods__["headwayMin"] = _AAPI.StaticInfPTVeh_headwayMin_get if _newclass:headwayMin = _swig_property(_AAPI.StaticInfPTVeh_headwayMin_get, _AAPI.StaticInfPTVeh_headwayMin_set) __swig_setmethods__["sensitivityFactor"] = _AAPI.StaticInfPTVeh_sensitivityFactor_set __swig_getmethods__["sensitivityFactor"] = _AAPI.StaticInfPTVeh_sensitivityFactor_get if _newclass:sensitivityFactor = _swig_property(_AAPI.StaticInfPTVeh_sensitivityFactor_get, _AAPI.StaticInfPTVeh_sensitivityFactor_set) __swig_setmethods__["reactionTime"] = _AAPI.StaticInfPTVeh_reactionTime_set __swig_getmethods__["reactionTime"] = _AAPI.StaticInfPTVeh_reactionTime_get if _newclass:reactionTime = _swig_property(_AAPI.StaticInfPTVeh_reactionTime_get, _AAPI.StaticInfPTVeh_reactionTime_set) __swig_setmethods__["reactionTimeAtStop"] = _AAPI.StaticInfPTVeh_reactionTimeAtStop_set __swig_getmethods__["reactionTimeAtStop"] = _AAPI.StaticInfPTVeh_reactionTimeAtStop_get if _newclass:reactionTimeAtStop = _swig_property(_AAPI.StaticInfPTVeh_reactionTimeAtStop_get, _AAPI.StaticInfPTVeh_reactionTimeAtStop_set) __swig_setmethods__["maxCapacity"] = _AAPI.StaticInfPTVeh_maxCapacity_set __swig_getmethods__["maxCapacity"] = _AAPI.StaticInfPTVeh_maxCapacity_get if _newclass:maxCapacity = _swig_property(_AAPI.StaticInfPTVeh_maxCapacity_get, _AAPI.StaticInfPTVeh_maxCapacity_set) __swig_setmethods__["currentLoad"] = _AAPI.StaticInfPTVeh_currentLoad_set __swig_getmethods__["currentLoad"] = _AAPI.StaticInfPTVeh_currentLoad_get if _newclass:currentLoad = _swig_property(_AAPI.StaticInfPTVeh_currentLoad_get, _AAPI.StaticInfPTVeh_currentLoad_set) __swig_setmethods__["idLine"] = _AAPI.StaticInfPTVeh_idLine_set __swig_getmethods__["idLine"] = _AAPI.StaticInfPTVeh_idLine_get if _newclass:idLine = _swig_property(_AAPI.StaticInfPTVeh_idLine_get, _AAPI.StaticInfPTVeh_idLine_set) def __init__(self, *args): this = _AAPI.new_StaticInfPTVeh(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_StaticInfPTVeh __del__ = lambda self : None; StaticInfPTVeh_swigregister = _AAPI.StaticInfPTVeh_swigregister StaticInfPTVeh_swigregister(StaticInfPTVeh) AKIPTGetNumberLines = _AAPI.AKIPTGetNumberLines AKIPTGetIdLine = _AAPI.AKIPTGetIdLine AKIPTGetNumberSectionsInLine = _AAPI.AKIPTGetNumberSectionsInLine AKIPTGetIdSectionInLine = _AAPI.AKIPTGetIdSectionInLine AKIPTGetNumberStopsInLine = _AAPI.AKIPTGetNumberStopsInLine AKIPTGetIdStopsInLine = _AAPI.AKIPTGetIdStopsInLine AKIPTEnterVeh = _AAPI.AKIPTEnterVeh AKIPTVehModifyStopTime = _AAPI.AKIPTVehModifyStopTime AKIPTVehGetStaticInf = _AAPI.AKIPTVehGetStaticInf AKIPTVehGetInf = _AAPI.AKIPTVehGetInf AKIPTGetServiceTimeStopsInLine = _AAPI.AKIPTGetServiceTimeStopsInLine AKIPTVehSetCurrentLoad = _AAPI.AKIPTVehSetCurrentLoad AKIPrintString = _AAPI.AKIPrintString AKIPrintAsUNICODEString = _AAPI.AKIPrintAsUNICODEString AKIConvertToAsciiString = _AAPI.AKIConvertToAsciiString AKIConvertFromAsciiString = _AAPI.AKIConvertFromAsciiString AKIDeleteUNICODEString = _AAPI.AKIDeleteUNICODEString AKIODDemandGetNumSlicesOD = _AAPI.AKIODDemandGetNumSlicesOD AKIODDemandGetIniTimeSlice = _AAPI.AKIODDemandGetIniTimeSlice AKIODDemandGetEndTimeSlice = _AAPI.AKIODDemandGetEndTimeSlice AKIODDemandGetDemandODPair = _AAPI.AKIODDemandGetDemandODPair AKIODDemandSetDemandODPair = _AAPI.AKIODDemandSetDemandODPair AKIStateDemandGetNumSlices = _AAPI.AKIStateDemandGetNumSlices AKIStateDemandGetIniTimeSlice = _AAPI.AKIStateDemandGetIniTimeSlice AKIStateDemandGetEndTimeSlice = _AAPI.AKIStateDemandGetEndTimeSlice AKIStateDemandGetDemandSection = _AAPI.AKIStateDemandGetDemandSection AKIStateDemandSetDemandSection = _AAPI.AKIStateDemandSetDemandSection AKIPastCostAreCostsPerVehicleType = _AAPI.AKIPastCostAreCostsPerVehicleType AKIPastCostGetIniTimeReaded = _AAPI.AKIPastCostGetIniTimeReaded AKIPastCostGetIntervalReaded = _AAPI.AKIPastCostGetIntervalReaded AKIPastCostGetNbIntervalsReaded = _AAPI.AKIPastCostGetNbIntervalsReaded AKIPastCostSetPastCost = _AAPI.AKIPastCostSetPastCost AKIPastCostGetPastCost = _AAPI.AKIPastCostGetPastCost AKIPastCostGetPastOutputCost = _AAPI.AKIPastCostGetPastOutputCost AKIFleetGetNbFleet = _AAPI.AKIFleetGetNbFleet AKIFleetGetFleetId = _AAPI.AKIFleetGetFleetId AKIFleetGetNbFleetStops = _AAPI.AKIFleetGetNbFleetStops AKIFleetGetFleetStopId = _AAPI.AKIFleetGetFleetStopId AKIFleetGetNbFleetVehicles = _AAPI.AKIFleetGetNbFleetVehicles AKIFleetGetFleetVehicleName = _AAPI.AKIFleetGetFleetVehicleName AKIFleetAddFleet = _AAPI.AKIFleetAddFleet AKIFleetAddFleetStop = _AAPI.AKIFleetAddFleetStop AKIFleetModifyFleetStopTimeWindow = _AAPI.AKIFleetModifyFleetStopTimeWindow AKIFleetAddFleetAssignment = _AAPI.AKIFleetAddFleetAssignment AKIFleetAddFleetAssignmentRoute = _AAPI.AKIFleetAddFleetAssignmentRoute AKIFleetAddFleetAssignmentStop = _AAPI.AKIFleetAddFleetAssignmentStop AKIFleetIsFleetVehicleGenerated = _AAPI.AKIFleetIsFleetVehicleGenerated AKIFleetGetCurrentSectionIdFleetVehicle = _AAPI.AKIFleetGetCurrentSectionIdFleetVehicle AKIFleetModifyRouteStopsFleetVehicle = _AAPI.AKIFleetModifyRouteStopsFleetVehicle AKIFleetGetNbStopsFleetVeh = _AAPI.AKIFleetGetNbStopsFleetVeh AKIFleetChangeStopTimeFleetVeh = _AAPI.AKIFleetChangeStopTimeFleetVeh AKIFleetGetNbStopsDoneFleetVeh = _AAPI.AKIFleetGetNbStopsDoneFleetVeh AKIFleetIsDoingStopFleetVeh = _AAPI.AKIFleetIsDoingStopFleetVeh AKIFleetGetStopTimeFleetVeh = _AAPI.AKIFleetGetStopTimeFleetVeh class PathInfVeh(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, PathInfVeh, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, PathInfVeh, name) __repr__ = _swig_repr __swig_setmethods__["report"] = _AAPI.PathInfVeh_report_set __swig_getmethods__["report"] = _AAPI.PathInfVeh_report_get if _newclass:report = _swig_property(_AAPI.PathInfVeh_report_get, _AAPI.PathInfVeh_report_set) __swig_setmethods__["idVeh"] = _AAPI.PathInfVeh_idVeh_set __swig_getmethods__["idVeh"] = _AAPI.PathInfVeh_idVeh_get if _newclass:idVeh = _swig_property(_AAPI.PathInfVeh_idVeh_get, _AAPI.PathInfVeh_idVeh_set) __swig_setmethods__["type"] = _AAPI.PathInfVeh_type_set __swig_getmethods__["type"] = _AAPI.PathInfVeh_type_get if _newclass:type = _swig_property(_AAPI.PathInfVeh_type_get, _AAPI.PathInfVeh_type_set) __swig_setmethods__["entranceSectionId"] = _AAPI.PathInfVeh_entranceSectionId_set __swig_getmethods__["entranceSectionId"] = _AAPI.PathInfVeh_entranceSectionId_get if _newclass:entranceSectionId = _swig_property(_AAPI.PathInfVeh_entranceSectionId_get, _AAPI.PathInfVeh_entranceSectionId_set) __swig_setmethods__["numSectionsInPath"] = _AAPI.PathInfVeh_numSectionsInPath_set __swig_getmethods__["numSectionsInPath"] = _AAPI.PathInfVeh_numSectionsInPath_get if _newclass:numSectionsInPath = _swig_property(_AAPI.PathInfVeh_numSectionsInPath_get, _AAPI.PathInfVeh_numSectionsInPath_set) __swig_setmethods__["totalDistance"] = _AAPI.PathInfVeh_totalDistance_set __swig_getmethods__["totalDistance"] = _AAPI.PathInfVeh_totalDistance_get if _newclass:totalDistance = _swig_property(_AAPI.PathInfVeh_totalDistance_get, _AAPI.PathInfVeh_totalDistance_set) def __init__(self, *args): this = _AAPI.new_PathInfVeh(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _AAPI.delete_PathInfVeh __del__ = lambda self : None; PathInfVeh_swigregister = _AAPI.PathInfVeh_swigregister PathInfVeh_swigregister(PathInfVeh) AKIVehInfPath = _AAPI.AKIVehInfPath AKIVehInfPathSection = _AAPI.AKIVehInfPathSection AKIVehInfPathGetNextSection = _AAPI.AKIVehInfPathGetNextSection AKIVehInfPathGetNextSectionInSection = _AAPI.AKIVehInfPathGetNextSectionInSection RED = _AAPI.RED GREEN = _AAPI.GREEN YELLOW = _AAPI.YELLOW NONE = _AAPI.NONE METVERDE = _AAPI.METVERDE METFLUJO = _AAPI.METFLUJO METDELAY = _AAPI.METDELAY METFLUJOALINEA = _AAPI.METFLUJOALINEA METVERDEPERLANE = _AAPI.METVERDEPERLANE ECIGetNumberJunctions = _AAPI.ECIGetNumberJunctions ECIGetJunctionId = _AAPI.ECIGetJunctionId ECIGetJunctionName = _AAPI.ECIGetJunctionName ECIGetNumberSignalGroups = _AAPI.ECIGetNumberSignalGroups ECIGetLogicalNameofSignalGroup = _AAPI.ECIGetLogicalNameofSignalGroup ECIGetJunctionIdFromExternalId = _AAPI.ECIGetJunctionIdFromExternalId ECIGetNumberTurningsofSignalGroup = _AAPI.ECIGetNumberTurningsofSignalGroup ECIGetFromToofTurningofSignalGroup = _AAPI.ECIGetFromToofTurningofSignalGroup ECIGetCurrentPhase = _AAPI.ECIGetCurrentPhase ECIGetNumberPhases = _AAPI.ECIGetNumberPhases ECIGetStartingTimePhase = _AAPI.ECIGetStartingTimePhase ECIGetDurationsPhase = _AAPI.ECIGetDurationsPhase ECIGetYellowTimePhase = _AAPI.ECIGetYellowTimePhase ECISetYellowTimePhase = _AAPI.ECISetYellowTimePhase ECIIsAnInterPhase = _AAPI.ECIIsAnInterPhase ECIDisableBusPreemptionNode = _AAPI.ECIDisableBusPreemptionNode ECIEnableBusPreemptionNode = _AAPI.ECIEnableBusPreemptionNode ECIIsBusPreemptionNodeEnabled = _AAPI.ECIIsBusPreemptionNodeEnabled ECIGetNbSignalGroupsPhaseofJunction = _AAPI.ECIGetNbSignalGroupsPhaseofJunction ECIGetSignalGroupPhaseofJunction = _AAPI.ECIGetSignalGroupPhaseofJunction ECIGetControlType = _AAPI.ECIGetControlType ECIGetYellowTime = _AAPI.ECIGetYellowTime ECIGetOffset = _AAPI.ECIGetOffset ECIDisableEvents = _AAPI.ECIDisableEvents ECIEnableEvents = _AAPI.ECIEnableEvents ECIEnableEventsActivatingPhase = _AAPI.ECIEnableEventsActivatingPhase ECIChangeDirectPhaseWithInterphaseTransition = _AAPI.ECIChangeDirectPhaseWithInterphaseTransition ECIChangeTimingPhase = _AAPI.ECIChangeTimingPhase ECIChangeSignalGroupState = _AAPI.ECIChangeSignalGroupState ECIChangeSignalGroupStateToYellow = _AAPI.ECIChangeSignalGroupStateToYellow ECIGetCurrentStateofSignalGroup = _AAPI.ECIGetCurrentStateofSignalGroup ECIChangeSignalGroupStatebyName = _AAPI.ECIChangeSignalGroupStatebyName ECIGetCurrentStateofSignalGroupbyName = _AAPI.ECIGetCurrentStateofSignalGroupbyName ECIGetActuatedParamsPassageTime = _AAPI.ECIGetActuatedParamsPassageTime ECISetActuatedParamsPassageTime = _AAPI.ECISetActuatedParamsPassageTime ECIGetActuatedParamsMinimumGreen = _AAPI.ECIGetActuatedParamsMinimumGreen ECISetActuatedParamsMinimumGreen = _AAPI.ECISetActuatedParamsMinimumGreen ECIGetActuatedParamsMaxGreen = _AAPI.ECIGetActuatedParamsMaxGreen ECISetActuatedParamsMaxGreen = _AAPI.ECISetActuatedParamsMaxGreen ECIGetActuatedParamsForceOFFPermissivePeriod = _AAPI.ECIGetActuatedParamsForceOFFPermissivePeriod ECISetActuatedParamsForceOFFPermissivePeriod = _AAPI.ECISetActuatedParamsForceOFFPermissivePeriod ECIGetNumberMeterings = _AAPI.ECIGetNumberMeterings ECIGetMeteringIdSection = _AAPI.ECIGetMeteringIdSection ECIGetMeteringName = _AAPI.ECIGetMeteringName ECIGetMeteringId = _AAPI.ECIGetMeteringId ECIGetMeteringIdByPos = _AAPI.ECIGetMeteringIdByPos ECIGetTypeMetering = _AAPI.ECIGetTypeMetering ECIGetParametersGreenMetering = _AAPI.ECIGetParametersGreenMetering ECIChangeParametersGreenMetering = _AAPI.ECIChangeParametersGreenMetering ECIGetParametersFlowMetering = _AAPI.ECIGetParametersFlowMetering ECIChangeParametersFlowMetering = _AAPI.ECIChangeParametersFlowMetering ECIGetParametersDelayMetering = _AAPI.ECIGetParametersDelayMetering ECIChangeParametersDelayMetering = _AAPI.ECIChangeParametersDelayMetering ECIDisableEventsMetering = _AAPI.ECIDisableEventsMetering ECIEnableEventsMetering = _AAPI.ECIEnableEventsMetering ECIGetCurrentStateofMetering = _AAPI.ECIGetCurrentStateofMetering ECIGetMeteringIdByPosition = _AAPI.ECIGetMeteringIdByPosition ECIGetMeteringNameById = _AAPI.ECIGetMeteringNameById ECIGetTypeMeteringById = _AAPI.ECIGetTypeMeteringById ECIGetParametersGreenMeteringById = _AAPI.ECIGetParametersGreenMeteringById ECIChangeParametersGreenMeteringById = _AAPI.ECIChangeParametersGreenMeteringById ECIGetParametersFlowMeteringById = _AAPI.ECIGetParametersFlowMeteringById ECIChangeParametersFlowMeteringById = _AAPI.ECIChangeParametersFlowMeteringById ECIGetParametersFlowAlineaMeteringById = _AAPI.ECIGetParametersFlowAlineaMeteringById ECIChangeParametersFlowAlineaMeteringById = _AAPI.ECIChangeParametersFlowAlineaMeteringById ECIGetParametersDelayMeteringById = _AAPI.ECIGetParametersDelayMeteringById ECIChangeParametersDelayMeteringById = _AAPI.ECIChangeParametersDelayMeteringById ECIDisableEventsMeteringById = _AAPI.ECIDisableEventsMeteringById ECIEnableEventsMeteringById = _AAPI.ECIEnableEventsMeteringById ECIGetCurrentStateofMeteringById = _AAPI.ECIGetCurrentStateofMeteringById ECIGetNumberofControls = _AAPI.ECIGetNumberofControls ECIGetNameofControl = _AAPI.ECIGetNameofControl ECIGetIniTimeofControl = _AAPI.ECIGetIniTimeofControl ECIGetOffsetofControl = _AAPI.ECIGetOffsetofControl ECIGetNameCurrentControl = _AAPI.ECIGetNameCurrentControl ECIGetNumberCurrentControl = _AAPI.ECIGetNumberCurrentControl ECIRemoveControl = _AAPI.ECIRemoveControl ECIGetNumberSem = _AAPI.ECIGetNumberSem ECIGetPositionSem = _AAPI.ECIGetPositionSem ECIGetStateSem = _AAPI.ECIGetStateSem ECIChangeParametersDelayMeteringVehType = _AAPI.ECIChangeParametersDelayMeteringVehType ECIChangeParametersDelayMeteringVehTypeById = _AAPI.ECIChangeParametersDelayMeteringVehTypeById INTEGER_TYPE = _AAPI.INTEGER_TYPE DOUBLE_TYPE = _AAPI.DOUBLE_TYPE STRING_TYPE = _AAPI.STRING_TYPE EXTERNAL_TEMPORAL = _AAPI.EXTERNAL_TEMPORAL INTERNAL = _AAPI.INTERNAL EXTERNAL = _AAPI.EXTERNAL ANGConnGetReplicationId = _AAPI.ANGConnGetReplicationId ANGConnGetExperimentId = _AAPI.ANGConnGetExperimentId ANGConnGetScenarioId = _AAPI.ANGConnGetScenarioId ANGConnGetObjectId = _AAPI.ANGConnGetObjectId ANGConnGetObjectNameA = _AAPI.ANGConnGetObjectNameA ANGConnHideObject = _AAPI.ANGConnHideObject ANGConnIsObjectHidden = _AAPI.ANGConnIsObjectHidden ANGConnGetScenarioTime = _AAPI.ANGConnGetScenarioTime ANGConnGetAttribute = _AAPI.ANGConnGetAttribute ANGConnSetAttributeValueStringA = _AAPI.ANGConnSetAttributeValueStringA ANGConnGetAttributeValueStringA = _AAPI.ANGConnGetAttributeValueStringA ANGConnSetAttributeValueBool = _AAPI.ANGConnSetAttributeValueBool ANGConnGetAttributeValueBool = _AAPI.ANGConnGetAttributeValueBool ANGConnSetAttributeValueInt = _AAPI.ANGConnSetAttributeValueInt ANGConnGetAttributeValueInt = _AAPI.ANGConnGetAttributeValueInt ANGConnSetAttributeValueDouble = _AAPI.ANGConnSetAttributeValueDouble ANGConnGetAttributeValueDouble = _AAPI.ANGConnGetAttributeValueDouble ANGConnActivatePolicy = _AAPI.ANGConnActivatePolicy ANGConnDeactivatePolicy = _AAPI.ANGConnDeactivatePolicy ANGConnIsPolicyActive = _AAPI.ANGConnIsPolicyActive ANGConnGetTimeSeriesSize = _AAPI.ANGConnGetTimeSeriesSize ANGConnGetTimeSeriesValue = _AAPI.ANGConnGetTimeSeriesValue ANGConnSetText = _AAPI.ANGConnSetText ANGConnMarkActivatePolicy = _AAPI.ANGConnMarkActivatePolicy ANGConnInitPolicies = _AAPI.ANGConnInitPolicies ANGConnVehGetGKSimVehicleId = _AAPI.ANGConnVehGetGKSimVehicleId ANGConnEnableVehiclesInBatch = _AAPI.ANGConnEnableVehiclesInBatch ANGGetSimulationOrder = _AAPI.ANGGetSimulationOrder ANGSetSimulationOrder = _AAPI.ANGSetSimulationOrder AKIConvertLatitudeLongitudeToXY = _AAPI.AKIConvertLatitudeLongitudeToXY AKIConvertXYToLatitudeLongitude = _AAPI.AKIConvertXYToLatitudeLongitude ECIChangeDirectPhase = _AAPI.ECIChangeDirectPhase ECIChangeStateMetering = _AAPI.ECIChangeStateMetering ECIChangeStateMeteringById = _AAPI.ECIChangeStateMeteringById ANGConnCreateAttribute = _AAPI.ANGConnCreateAttribute
jprk/sirid
aapi/AAPI.py
Python
gpl-3.0
131,251
0.012107
import os.path from django.conf import settings from django.test.utils import override_settings import mock from celery.result import AsyncResult from olympia import amo from olympia.amo.tests import TestCase, addon_factory, version_factory from olympia.devhub import tasks, utils from olympia.files.models import FileUpload class TestValidatorBase(TestCase): def setUp(self): # Create File objects for version 1.0 and 1.1. self.addon = addon_factory( guid='test-desktop@nowhere', slug='test-amo-addon', version_kw={'version': '1.0'}) self.version = self.addon.current_version self.file = self.version.files.get() self.version_1_1 = version_factory(addon=self.addon, version='1.1') self.file_1_1 = self.version_1_1.files.get() # Creating the files and versions above resets this. self.addon.update(status=amo.STATUS_PUBLIC) # Create a FileUpload object for an XPI containing version 1.1. path = os.path.join(settings.ROOT, 'src/olympia/devhub/tests/addons/desktop.xpi') self.file_upload = FileUpload.objects.create(path=path) self.xpi_version = '1.1' # Patch validation tasks that we expect the validator to call. self.patchers = [] self.save_file = self.patch( 'olympia.devhub.tasks.handle_file_validation_result').subtask self.save_upload = self.patch( 'olympia.devhub.tasks.handle_upload_validation_result').subtask self.validate_file = self.patch( 'olympia.devhub.tasks.validate_file').subtask self.validate_upload = self.patch( 'olympia.devhub.tasks.validate_file_path').subtask def patch(self, thing): """Patch the given "thing", and revert the patch on test teardown.""" patcher = mock.patch(thing) self.addCleanup(patcher.stop) return patcher.start() def check_upload(self, file_upload, listed=True): """Check that the given new file upload is validated properly.""" # Run validator. utils.Validator(file_upload, listed=listed) # We shouldn't be attempting to validate an existing file. assert not self.validate_file.called # Make sure we run the correct validation task for the upload. self.validate_upload.assert_called_once_with( [file_upload.path], {'hash_': file_upload.hash, 'listed': listed, 'is_webextension': False}) # Make sure we run the correct save validation task, with a # fallback error handler. channel = (amo.RELEASE_CHANNEL_LISTED if listed else amo.RELEASE_CHANNEL_UNLISTED) self.save_upload.assert_has_calls([ mock.call([mock.ANY, file_upload.pk, channel, False], immutable=True), mock.call([file_upload.pk, channel, False], link_error=mock.ANY)]) def check_file(self, file_): """Check that the given file is validated properly.""" # Run validator. utils.Validator(file_) # We shouldn't be attempting to validate a bare upload. assert not self.validate_upload.called # Make sure we run the correct validation task. self.validate_file.assert_called_once_with( [file_.pk], {'hash_': file_.original_hash, 'is_webextension': False}) # Make sure we run the correct save validation task, with a # fallback error handler. self.save_file.assert_has_calls([ mock.call([mock.ANY, file_.pk, file_.version.channel, False], immutable=True), mock.call([file_.pk, file_.version.channel, False], link_error=mock.ANY)]) class TestValidatorListed(TestValidatorBase): @mock.patch('olympia.devhub.utils.chain') def test_run_once_per_file(self, chain): """Tests that only a single validation task is run for a given file.""" task = mock.Mock() chain.return_value = task task.delay.return_value = mock.Mock(task_id='42') assert isinstance(tasks.validate(self.file), mock.Mock) assert task.delay.call_count == 1 assert isinstance(tasks.validate(self.file), AsyncResult) assert task.delay.call_count == 1 assert isinstance(tasks.validate(self.file_1_1), mock.Mock) assert task.delay.call_count == 2 @mock.patch('olympia.devhub.utils.chain') def test_run_once_file_upload(self, chain): """Tests that only a single validation task is run for a given file upload.""" task = mock.Mock() chain.return_value = task task.delay.return_value = mock.Mock(task_id='42') assert isinstance( tasks.validate(self.file_upload, listed=True), mock.Mock) assert task.delay.call_count == 1 assert isinstance( tasks.validate(self.file_upload, listed=True), AsyncResult) assert task.delay.call_count == 1 def test_cache_key(self): """Tests that the correct cache key is generated for a given object.""" assert (utils.Validator(self.file).cache_key == 'validation-task:files.File:{0}:None'.format(self.file.pk)) assert (utils.Validator(self.file_upload, listed=False).cache_key == 'validation-task:files.FileUpload:{0}:False'.format( self.file_upload.pk)) @mock.patch('olympia.devhub.utils.parse_addon') def test_search_plugin(self, parse_addon): """Test that search plugins are handled correctly.""" parse_addon.return_value = { 'guid': None, 'version': '20140103', 'is_webextension': False, } addon = addon_factory(type=amo.ADDON_SEARCH, version_kw={'version': '20140101'}) assert addon.guid is None self.check_upload(self.file_upload) self.validate_upload.reset_mock() self.save_file.reset_mock() version = version_factory(addon=addon, version='20140102') self.check_file(version.files.get()) class TestLimitValidationResults(TestCase): """Test that higher priority messages are truncated last.""" def make_validation(self, types): """Take a list of error types and make a validation results dict.""" validation = { 'messages': [], 'errors': 0, 'warnings': 0, 'notices': 0, } severities = ['low', 'medium', 'high'] for type_ in types: if type_ in severities: type_ = 'warning' validation[type_ + 's'] += 1 validation['messages'].append({'type': type_}) return validation @override_settings(VALIDATOR_MESSAGE_LIMIT=2) def test_errors_are_first(self): validation = self.make_validation( ['error', 'warning', 'notice', 'error']) utils.limit_validation_results(validation) limited = validation['messages'] assert len(limited) == 3 assert '2 messages were truncated' in limited[0]['message'] assert limited[1]['type'] == 'error' assert limited[2]['type'] == 'error' class TestFixAddonsLinterOutput(TestCase): def test_fix_output(self): original_output = { 'count': 4, 'summary': { 'errors': 0, 'notices': 0, 'warnings': 4 }, 'metadata': { 'manifestVersion': 2, 'name': 'My Dogs New Tab', 'type': 1, 'version': '2.13.15', 'architecture': 'extension', 'emptyFiles': [], 'jsLibs': { 'lib/vendor/jquery.js': 'jquery.2.1.4.jquery.js' } }, 'errors': [], 'notices': [], 'warnings': [ { '_type': 'warning', 'code': 'MANIFEST_PERMISSIONS', 'message': '/permissions: Unknown permissions ...', 'description': 'See https://mzl.la/1R1n1t0 ...', 'file': 'manifest.json' }, { '_type': 'warning', 'code': 'MANIFEST_PERMISSIONS', 'message': '/permissions: Unknown permissions ...', 'description': 'See https://mzl.la/1R1n1t0 ....', 'file': 'manifest.json' }, { '_type': 'warning', 'code': 'MANIFEST_CSP', 'message': '\'content_security_policy\' is ...', 'description': 'A custom content_security_policy ...' }, { '_type': 'warning', 'code': 'NO_DOCUMENT_WRITE', 'message': 'Use of document.write strongly discouraged.', 'description': 'document.write will fail in...', 'column': 13, 'file': 'lib/vendor/knockout.js', 'line': 5449 } ] } fixed = utils.fix_addons_linter_output(original_output) assert fixed['success'] assert fixed['warnings'] == 4 assert 'uid' in fixed['messages'][0] assert 'id' in fixed['messages'][0] assert 'type' in fixed['messages'][0] assert fixed['messages'][0]['tier'] == 1 assert fixed['compatibility_summary'] == { 'warnings': 0, 'errors': 0, 'notices': 0, } assert fixed['ending_tier'] == 5 assert fixed['metadata']['is_webextension'] is True assert fixed['metadata']['processed_by_addons_linter'] is True assert fixed['metadata']['listed'] is True assert fixed['metadata']['identified_files'] == { 'lib/vendor/jquery.js': {'path': 'jquery.2.1.4.jquery.js'} } # Make sure original metadata was preserved. for key, value in original_output['metadata'].items(): assert fixed['metadata'][key] == value
lavish205/olympia
src/olympia/devhub/tests/test_utils.py
Python
bsd-3-clause
10,323
0
import os import logging from flask import Flask from flask_seasurf import SeaSurf from flask_mail import Mail from werkzeug.middleware.proxy_fix import ProxyFix from flask_session import Session from .lib import utils def create_app(config=None): from . import models, routes, services from .assets import assets app = Flask(__name__) # Read log level from environment variable log_level_name = os.environ.get('PDNS_ADMIN_LOG_LEVEL', 'WARNING') log_level = logging.getLevelName(log_level_name.upper()) # Setting logger logging.basicConfig( level=log_level, format= "[%(asctime)s] [%(filename)s:%(lineno)d] %(levelname)s - %(message)s") # If we use Docker + Gunicorn, adjust the # log handler if "GUNICORN_LOGLEVEL" in os.environ: gunicorn_logger = logging.getLogger("gunicorn.error") app.logger.handlers = gunicorn_logger.handlers app.logger.setLevel(gunicorn_logger.level) # Proxy app.wsgi_app = ProxyFix(app.wsgi_app) # CSRF protection csrf = SeaSurf(app) csrf.exempt(routes.index.dyndns_checkip) csrf.exempt(routes.index.dyndns_update) csrf.exempt(routes.index.saml_authorized) csrf.exempt(routes.api.api_login_create_zone) csrf.exempt(routes.api.api_login_delete_zone) csrf.exempt(routes.api.api_generate_apikey) csrf.exempt(routes.api.api_delete_apikey) csrf.exempt(routes.api.api_update_apikey) csrf.exempt(routes.api.api_zone_subpath_forward) csrf.exempt(routes.api.api_zone_forward) csrf.exempt(routes.api.api_create_zone) csrf.exempt(routes.api.api_create_account) csrf.exempt(routes.api.api_delete_account) csrf.exempt(routes.api.api_update_account) csrf.exempt(routes.api.api_create_user) csrf.exempt(routes.api.api_delete_user) csrf.exempt(routes.api.api_update_user) csrf.exempt(routes.api.api_list_account_users) csrf.exempt(routes.api.api_add_account_user) csrf.exempt(routes.api.api_remove_account_user) csrf.exempt(routes.api.api_zone_cryptokeys) csrf.exempt(routes.api.api_zone_cryptokey) # Load config from env variables if using docker if os.path.exists(os.path.join(app.root_path, 'docker_config.py')): app.config.from_object('powerdnsadmin.docker_config') else: # Load default configuration app.config.from_object('powerdnsadmin.default_config') # Load config file from FLASK_CONF env variable if 'FLASK_CONF' in os.environ: app.config.from_envvar('FLASK_CONF') # Load app sepecified configuration if config is not None: if isinstance(config, dict): app.config.update(config) elif config.endswith('.py'): app.config.from_pyfile(config) # HSTS if app.config.get('HSTS_ENABLED'): from flask_sslify import SSLify _sslify = SSLify(app) # lgtm [py/unused-local-variable] # Load Flask-Session if app.config.get('FILESYSTEM_SESSIONS_ENABLED'): app.config['SESSION_TYPE'] = 'filesystem' sess = Session() sess.init_app(app) # SMTP app.mail = Mail(app) # Load app's components assets.init_app(app) models.init_app(app) routes.init_app(app) services.init_app(app) # Register filters app.jinja_env.filters['display_record_name'] = utils.display_record_name app.jinja_env.filters['display_master_name'] = utils.display_master_name app.jinja_env.filters['display_second_to_time'] = utils.display_time app.jinja_env.filters[ 'email_to_gravatar_url'] = utils.email_to_gravatar_url app.jinja_env.filters[ 'display_setting_state'] = utils.display_setting_state app.jinja_env.filters['pretty_domain_name'] = utils.pretty_domain_name # Register context proccessors from .models.setting import Setting @app.context_processor def inject_sitename(): setting = Setting().get('site_name') return dict(SITE_NAME=setting) @app.context_processor def inject_setting(): setting = Setting() return dict(SETTING=setting) @app.context_processor def inject_mode(): setting = app.config.get('OFFLINE_MODE', False) return dict(OFFLINE_MODE=setting) return app
ngoduykhanh/PowerDNS-Admin
powerdnsadmin/__init__.py
Python
mit
4,275
0.000702
class OnboardingTutorial: """Constructs the onboarding message and stores the state of which tasks were completed.""" WELCOME_BLOCK = { "type": "section", "text": { "type": "mrkdwn", "text": ( "Welcome to Slack! :wave: We're so glad you're here. :blush:\n\n" "*Get started by completing the steps below:*" ), }, } DIVIDER_BLOCK = {"type": "divider"} def __init__(self, channel): self.channel = channel self.username = "pythonboardingbot" self.icon_emoji = ":robot_face:" self.timestamp = "" self.reaction_task_completed = False self.pin_task_completed = False def get_message_payload(self): return { "ts": self.timestamp, "channel": self.channel, "username": self.username, "icon_emoji": self.icon_emoji, "blocks": [ self.WELCOME_BLOCK, self.DIVIDER_BLOCK, *self._get_reaction_block(), self.DIVIDER_BLOCK, *self._get_pin_block(), ], } def _get_reaction_block(self): task_checkmark = self._get_checkmark(self.reaction_task_completed) text = ( f"{task_checkmark} *Add an emoji reaction to this message* :thinking_face:\n" "You can quickly respond to any message on Slack with an emoji reaction." "Reactions can be used for any purpose: voting, checking off to-do items, showing excitement." ) information = ( ":information_source: *<https://get.slack.help/hc/en-us/articles/206870317-Emoji-reactions|" "Learn How to Use Emoji Reactions>*" ) return self._get_task_block(text, information) def _get_pin_block(self): task_checkmark = self._get_checkmark(self.pin_task_completed) text = ( f"{task_checkmark} *Pin this message* :round_pushpin:\n" "Important messages and files can be pinned to the details pane in any channel or" " direct message, including group messages, for easy reference." ) information = ( ":information_source: *<https://get.slack.help/hc/en-us/articles/205239997-Pinning-messages-and-files" "|Learn How to Pin a Message>*" ) return self._get_task_block(text, information) @staticmethod def _get_checkmark(task_completed: bool) -> str: if task_completed: return ":white_check_mark:" return ":white_large_square:" @staticmethod def _get_task_block(text, information): return [ {"type": "section", "text": {"type": "mrkdwn", "text": text}}, {"type": "context", "elements": [{"type": "mrkdwn", "text": information}]}, ]
slackapi/python-slackclient
tutorial/PythOnBoardingBot/onboarding_tutorial.py
Python
mit
2,881
0.003124
# -*- coding: utf-8 -*- # Copyright (c) 2015 Dipl.Tzt. Enno Deimel <ennodotvetatgmxdotnet> # # This file is part of gnuvet, published under the GNU General Public License # version 3 or later (GPLv3+ in short). See the file LICENSE for information. # Initially created: Fri Apr 23 00:02:26 2010 by: PyQt4 UI code generator 4.4.2 from PyQt4.QtGui import (QApplication, QCheckBox, QDialogButtonBox) def tl(txt=''): return QApplication.translate("Options", txt, None, 1) class Ui_Options(object): def setupUi(self, Options): Options.resize(245,175) Options.setMinimumSize(245,175) self.buttonBox = QDialogButtonBox(Options) self.buttonBox.setGeometry(10,120,221,32) self.buttonBox.setStandardButtons( QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.buttonBox.setCenterButtons(True) self.autoConsCb = QCheckBox(Options) self.autoConsCb.setGeometry(60,20,107,23) self.autoHistCb = QCheckBox(Options) self.autoHistCb.setGeometry(60,50,104,23) self.lSympCb = QCheckBox(Options) self.lSympCb.setGeometry(60,80,122,23) self.retranslateUi(Options) def retranslateUi(self, Options): Options.setWindowTitle(tl("GnuVet: Set Options")) self.autoConsCb.setToolTip(tl("To automatically book consultation")) self.autoConsCb.setText(tl("Auto-Consult")) self.autoHistCb.setToolTip(tl("To automatically open History Window.")) self.autoHistCb.setText(tl("Auto-History")) self.lSympCb.setToolTip(tl("To use the Lead Symptom feature.")) self.lSympCb.setText(tl("Lead Symptom")) if __name__ == '__main__': from PyQt4.QtGui import QMainWindow, QShortcut a = QApplication([]) b = Ui_Options() w = QMainWindow() b.setupUi(w) QShortcut('Ctrl+W', w, quit) w.show() exit(a.exec_())
gnuvet/gnuvet
options_ui.py
Python
gpl-3.0
1,879
0.00958
"""An example of a python script which can be executed by the task queue """ import sys def execute(): """Simply write the python executable """ sys.stdout.write(sys.executable) if __name__ == '__main__': execute()
quantmind/pulsar-queue
tests/example/executable.py
Python
bsd-3-clause
235
0
#!/usr/bin/env python """Parse GCC-XML output files and produce a list of class names.""" # import system modules. import multiprocessing import xml.dom.minidom import sys import os # Import application modules. import mpipe import util # Configure and parse the command line. NAME = os.path.basename(sys.argv[0]) ARGS = [('out_file', 'output file'), ('xml_dir', 'directory with XML files'),] ARGS = util.parse_cmd(NAME, ARGS) # Create a list of input files. fnames = list() for entry in os.listdir(ARGS['xml_dir']): fname = os.path.join(ARGS['xml_dir'], entry) if not os.path.isfile(fname): continue fnames.append(fname) num_cpus = multiprocessing.cpu_count() print('Parsing %d files on %d CPUs'%(len(fnames), num_cpus,)) # Parse files in a pipeline. def parseFile(fname): """Parse the XML file looking for fully demangled class names, and communicate the result.""" names = list() doc = xml.dom.minidom.parse(fname) classes = doc.getElementsByTagName('Class') for entry in classes: name = entry.getAttribute('demangled') NSPACE = 'Wm5::' if name[:len(NSPACE)] != NSPACE: continue names.append(name) return names pipe = mpipe.Pipeline(mpipe.UnorderedStage(parseFile, num_cpus)) for fname in fnames: pipe.put(fname) pipe.put(None) # Report on progress in realtime. total_names = dict() done_count = 0 for result in pipe.results(): for name in result: total_names[name] = None done_count += 1 percent = float(done_count) / len(fnames) * 100 sys.stdout.write('\r' + '%d of %d (%.1f%%)'%(done_count, len(fnames), percent)) sys.stdout.flush() # End on a newline. print() print('Writing file %s'%ARGS['out_file']) fout = open(ARGS['out_file'], 'w') for key in sorted(total_names): fout.write('%s\n'%key) fout.close() # The end.
vmlaker/pythonwildmagic
tool/parse-xml.py
Python
mit
1,870
0.004278
# -*- coding: utf-8 -*- # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """Miscellaneous modules Contains useful modules that don't belong into any of the other Cryptodome.* subpackages. ======================== ============================================= Module Description ======================== ============================================= `Cryptodome.Util.number` Number-theoretic functions (primality testing, etc.) `Cryptodome.Util.Counter` Fast counter functions for CTR cipher modes. `Cryptodome.Util.RFC1751` Converts between 128-bit keys and human-readable strings of words. `Cryptodome.Util.asn1` Minimal support for ASN.1 DER encoding `Cryptodome.Util.Padding` Set of functions for adding and removing padding. ======================== ============================================= :undocumented: _galois, _number_new, cpuid, py3compat, _raw_api """ __all__ = ['RFC1751', 'number', 'strxor', 'asn1', 'Counter', 'Padding']
hclivess/Stallion
nuitka/Cryptodome/Util/__init__.py
Python
gpl-3.0
1,951
0.00205
''' A taylor series visualization graph. This example demonstrates the ability of Bokeh for inputted expressions to reflect on a chart. ''' import numpy as np import sympy as sy from bokeh.core.properties import value from bokeh.io import curdoc from bokeh.layouts import column from bokeh.models import (ColumnDataSource, Legend, LegendItem, PreText, Slider, TextInput) from bokeh.plotting import figure xs = sy.Symbol('x') expr = sy.exp(-xs)*sy.sin(xs) def taylor(fx, xs, order, x_range=(0, 1), n=200): x0, x1 = x_range x = np.linspace(float(x0), float(x1), n) fy = sy.lambdify(xs, fx, modules=['numpy'])(x) tx = fx.series(xs, n=order).removeO() if tx.is_Number: ty = np.zeros_like(x) ty.fill(float(tx)) else: ty = sy.lambdify(xs, tx, modules=['numpy'])(x) return x, fy, ty source = ColumnDataSource(data=dict(x=[], fy=[], ty=[])) p = figure(x_range=(-7,7), y_range=(-100, 200), width=800, height=400) line_f = p.line(x="x", y="fy", line_color="navy", line_width=2, source=source) line_t = p.line(x="x", y="ty", line_color="firebrick", line_width=2, source=source) p.background_fill_color = "lightgrey" legend = Legend(location="top_right") legend.items = [ LegendItem(label=value(f"{expr}"), renderers=[line_f]), LegendItem(label=value(f"taylor({expr})"), renderers=[line_t]), ] p.add_layout(legend) def update(): try: expr = sy.sympify(text.value, dict(x=xs)) except Exception as exception: errbox.text = str(exception) else: errbox.text = "" x, fy, ty = taylor(expr, xs, slider.value, (-2*sy.pi, 2*sy.pi), 200) p.title.text = "Taylor (n=%d) expansion comparison for: %s" % (slider.value, expr) legend.items[0].label = value(f"{expr}") legend.items[1].label = value(f"taylor({expr})") source.data = dict(x=x, fy=fy, ty=ty) slider = Slider(start=1, end=20, value=1, step=1, title="Order") slider.on_change('value', lambda attr, old, new: update()) text = TextInput(value=str(expr), title="Expression:") text.on_change('value', lambda attr, old, new: update()) errbox = PreText() update() inputs = column(text, slider, errbox, width=400) curdoc().add_root(column(inputs, p))
bokeh/bokeh
examples/app/taylor.py
Python
bsd-3-clause
2,238
0.003128
# coding=utf-8 from typing import List import networkx as nx import pyisemail from fuzzywuzzy import fuzz from recordclass import recordclass import pandas as pd import saapy.util as su from .lexeme import cleanup_proper_name def connect_actors(actor_frame, connectivity_sets, connectivity_column): """ :param actor_frame: :param connectivity_sets: :param connectivity_column: :return: Examples: same_actors = { 'ccason': [3, 14, 15], 'clipka': [4, 5, 13], 'wfpokorny': [11, 17], 'anshuarya': [0], 'bentsm': [1], 'cbarton': [2], 'dbodor': [6], 'jlecher': [7], 'jgrimbert': [8], 'nalvarez': [9], 'selvik': [10], 'wverhelst': [12], 'gryken': [16], 'github': [18]} actor_frame = connect_actors(actor_frame, same_actors, 'actor_id') """ connectivity = {} for actor_id, connectivity_set in connectivity_sets.items(): for actor in connectivity_set: connectivity[actor] = actor_id actor_frame[connectivity_column] = su.categorize(pd.Series(connectivity)) return actor_frame def combine_actors(actor_frame, connectivity_column): """ :param actor_frame: :param connectivity_column: :return: Examples: combine_actors(actor_frame, 'actor_id') """ aggregator = {'name': 'first', 'email': 'first', 'author_commits': 'sum', 'committer_commits': 'sum'} return actor_frame.groupby(connectivity_column).agg( aggregator).reset_index() def insert_actor_ids(commit_frame, actor_frame, drop_name_email=True): actor_columns = ['author_name', 'author_email', 'committer_name', 'committer_email'] cf = commit_frame[actor_columns] af = actor_frame[['name', 'email', 'actor_id']] author = pd.merge( cf, af, left_on=actor_columns[:2], right_on=('name', 'email'), how='left')['actor_id'] committer = pd.merge( cf, af, left_on=actor_columns[2:], right_on=('name', 'email'), how='left')['actor_id'] commit_frame.insert(3, 'author', author) commit_frame.insert(4, 'committer', committer) if drop_name_email: commit_frame.drop(actor_columns, axis=1, inplace=True) return commit_frame PARSED_EMAIL_FIELDS = ['email', 'valid', 'name', 'domain', 'parsed_name'] ParsedEmail = recordclass('ParsedEmail', PARSED_EMAIL_FIELDS) PARSED_NAME_FIELDS = ['name', 'name_type'] ParsedName = recordclass('ParsedName', PARSED_NAME_FIELDS) def proper(name: ParsedName): return name.name_type == 'proper' or name.name_type == 'personal' class Actor: name: str email: str actor_id: str parsed_email: ParsedEmail parsed_name: ParsedName def __init__(self, name: str, email: str): self.name = name self.email = email self.actor_id = '{} <{}>'.format(name, email).lower() self.parsed_email = None self.parsed_name = None def __repr__(self): return "Actor('{}')".format(self.actor_id) class ActorParser: role_names = None def __init__(self): self.role_names = dict() def add_role_names(self, name_roles): for name, role in name_roles: self.role_names[name] = role def parse_name(self, name: str) -> List[str]: """ splits a name into parts separated by ., _, camel casing and similar :param name: potentially human name :return: list of name parts """ parsed_name = ParsedName(**su.empty_dict(PARSED_NAME_FIELDS)) lower_name = name.lower() if lower_name in self.role_names: parsed_name.name_type = self.role_names[lower_name] parsed_name.name = lower_name else: parsed_name.name_type = 'proper' parsed_name.name = cleanup_proper_name(name) return parsed_name def parse_email(self, email: str) -> ParsedEmail: lower_email = email.lower() parsed_email = ParsedEmail(**su.empty_dict(PARSED_EMAIL_FIELDS)) parsed_email.email = lower_email parsed_email.valid = pyisemail.is_email(lower_email) email_parts = lower_email.split('@') parsed_email.name = email_parts[0] if len(email_parts) == 2: parsed_email.domain = email_parts[1] else: parsed_email.domain = '' parsed_email.parsed_name = self.parse_name(parsed_email.name) return parsed_email def parse_actor(self, name: str, email: str, name_from_email=True) -> Actor: parsed_email = self.parse_email(email) if not name and name_from_email: name = parsed_email.parsed_name.name actor = Actor(name, email) actor.parsed_name = self.parse_name(name) actor.parsed_email = parsed_email return actor ACTOR_SIMILARITY_FIELDS = ['possible', 'identical', 'same_name', 'same_email', 'same_email_name', 'name_ratio', 'email_name_ratio', 'email_domain_ratio', 'name1_email_ratio', 'name2_email_ratio', 'proper_name1', 'proper_name2', 'proper_email_name1', 'proper_email_name2', 'explicit'] ActorSimilarity = recordclass('ActorSimilarity', ACTOR_SIMILARITY_FIELDS) ACTOR_SIMILARITY_SETTINGS_FIELDS = ['min_name_ratio', 'min_email_domain_ratio', 'min_email_name_ratio', 'min_name_email_ratio'] ActorSimilaritySettings = recordclass('ActorSimilaritySettings', ACTOR_SIMILARITY_SETTINGS_FIELDS) class ActorSimilarityGraph: actor_graph: nx.Graph settings: ActorSimilaritySettings def __init__(self, settings=None): self.actor_graph = nx.Graph() self.similarity_checks = [self.identical_actors, self.similar_emails, self.similar_proper_names] if settings is None: settings = ActorSimilaritySettings(min_name_ratio=55, min_email_domain_ratio=55, min_email_name_ratio=55, min_name_email_ratio=55) self.settings = settings def add_actor(self, actor: Actor, link_similar=True): if self.actor_graph.has_node(actor.actor_id): return self.actor_graph.add_node(actor.actor_id, actor=actor) for actor_id, actor_attrs in self.actor_graph.nodes_iter(data=True): if actor.actor_id == actor_id: continue other_actor = actor_attrs['actor'] if link_similar: similarity = self.evaluate_similarity(actor, other_actor) if similarity.possible: self.actor_graph.add_edge(actor.actor_id, other_actor.actor_id, similarity=similarity, confidence=None) def link_actors(self, actor1_id: str, actor2_id: str, confidence: float = 1): self.actor_graph.add_edge(actor1_id, actor2_id, confidence=confidence) if 'similarity' not in self.actor_graph[actor1_id][actor2_id]: self.actor_graph[actor1_id][actor2_id]['similarity'] = None def unlink_actors(self, actor1_id: str, actor2_id: str): self.actor_graph.remove_edge(actor1_id, actor2_id) def evaluate_similarity(self, actor: Actor, other_actor: Actor) -> ActorSimilarity: similarity = self.build_similarity(actor, other_actor) checks = list(self.similarity_checks) while not similarity.possible and len(checks): check = checks.pop() similarity.possible = check(similarity) return similarity def build_similarity(self, actor, other_actor): similarity = ActorSimilarity(**su.empty_dict(ACTOR_SIMILARITY_FIELDS)) # run comparisons for similarity similarity.identical = (actor.actor_id == other_actor.actor_id) similarity.proper_name1 = proper(actor.parsed_name) similarity.proper_name2 = proper(other_actor.parsed_name) similarity.proper_email_name1 = proper(actor.parsed_email.parsed_name) similarity.proper_email_name2 = proper( other_actor.parsed_email.parsed_name) similarity.same_name = (actor.parsed_name.name == other_actor.parsed_name.name) similarity.name_ratio = self.compare_names(actor.parsed_name, other_actor.parsed_name) similarity.same_email = (actor.parsed_email.email == other_actor.parsed_email.email) similarity.email_domain_ratio = fuzz.ratio( actor.parsed_email.domain, other_actor.parsed_email.domain) similarity.same_email_name = (actor.parsed_email.parsed_name.name == other_actor.parsed_email.parsed_name.name) similarity.email_name_ratio = self.compare_names( actor.parsed_email.parsed_name, other_actor.parsed_email.parsed_name) similarity.name1_email_ratio = self.compare_names( actor.parsed_name, other_actor.parsed_email.parsed_name) similarity.name2_email_ratio = self.compare_names( actor.parsed_email.parsed_name, other_actor.parsed_name) return similarity @staticmethod def compare_names(name1: ParsedName, name2: ParsedName): if proper(name1) and proper(name2): compare = fuzz.token_set_ratio else: compare = fuzz.ratio return compare(name1.name, name2.name) def similar_emails(self, s: ActorSimilarity): return (s.same_email or (s.email_domain_ratio >= self.settings.min_email_domain_ratio and s.email_name_ratio >= self.settings.min_email_name_ratio)) def similar_proper_names(self, s: ActorSimilarity): return (s.proper_name1 and s.proper_name2 and (s.same_name or s.name_ratio >= self.settings.min_name_ratio)) def similar_name_to_email(self, s: ActorSimilarity): return (s.name1_email_ratio >= self.settings.min_name_email_ratio or s.name2_email_ratio >= self.settings.min_name_email_ratio) @staticmethod def identical_actors(s: ActorSimilarity): return s.identical def group_similar_actors(self): similar_actor_groups = [list(g) for g in nx.connected_components(self.actor_graph)] return similar_actor_groups def print_similarity_groups(self): similar_groups = self.group_similar_actors() for i, group in enumerate(similar_groups): if len(group) < 2: continue print('=== group', i, '===') for actor1_id, actor2_id, data in self.actor_graph.edges_iter( nbunch=group, data=True): print(actor1_id, '->', actor2_id, data)
ashapochka/saapy
saapy/analysis/actor.py
Python
apache-2.0
11,608
0.000258
""" Tests for the bulk_user_activate command. """ from django.contrib.auth.models import User from django.core.management import call_command from django.test import TestCase from openedx.stanford.djangoapps.student_utils.helpers import get_users_by_email class BulkUserActivateTests(TestCase): """ Test the bulk_user_activate command. """ help = __doc__ NUMBER_USERS = 10 NUMBER_DOMAINS = 3 def setUp(self): super(BulkUserActivateTests, self).setUp() self.domain = [ "{i}.example.com".format( i=i, ) for i in xrange(BulkUserActivateTests.NUMBER_DOMAINS) ] self.users = [ User.objects.create( username="user{i}".format( i=i, ), email="user{i}@{domain}".format( i=i, domain=self.domain[(i % BulkUserActivateTests.NUMBER_DOMAINS)], ), is_active=(i % 2), ) for i in xrange(BulkUserActivateTests.NUMBER_USERS) ] def test_bulk_without_force(self): """ Verify that nothing is changed when force is set to false. """ domain = self.domain[0] users_before = get_users_by_email(domain, is_active=True) count_before = users_before.count() self.assertGreater(count_before, 0) call_command( 'bulk_user_activate', '--domain', domain, ) users_after = get_users_by_email(domain, is_active=True) count_after = users_after.count() self.assertEqual(count_before, count_after) def test_bulk_with_force(self): """ Verify that users is activated when force is set to true. """ domain = self.domain[0] users_before = get_users_by_email(domain, is_active=False) count_before = users_before.count() self.assertGreater(count_before, 0) call_command( 'bulk_user_activate', '--domain', domain, '--force', ) users_after = get_users_by_email(domain, is_active=False) count = users_after.count() self.assertEqual(count, 0)
Stanford-Online/edx-platform
openedx/stanford/djangoapps/student_utils/tests/test_bulk_user_activate.py
Python
agpl-3.0
2,256
0.000887
""" Downloads the following: - Korean Wikipedia texts - Korean """ from sqlparse import parsestream from sqlparse.sql import Parenthesis for statement in parsestream(open('data/test.sql')): texts = [str(token.tokens[1].tokens[-1]).decode('string_escape') for token in statement.tokens if isinstance(token, Parenthesis)] print texts texts = [text for text in texts if text[0] != '#'] if texts: print "\n===\n".join(texts)
carpedm20/Bias
scripts/download.py
Python
bsd-3-clause
449
0.004454
# Copyright 2020 Makani Technologies LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Perch simulator parameters.""" from makani.analysis.control import geometry from makani.config import mconfig from makani.config import physics_util from makani.config.sensor_util import MakeEncoderParams from makani.control import system_types from makani.sim import sim_types as m import numpy as np @mconfig.Config(deps={ 'common_params': 'common.common_params', 'gs_model': 'base_station.gs_model', 'perch': 'base_station.perch', 'sim_options': 'common.sim.sim_options', 'flight_plan': 'common.flight_plan' }) def MakeParams(params): """Make perch simulator parameters.""" # pylint: disable=invalid-name A, B = physics_util.CalcTwoLinkageDynamics( params['perch']['I_perch_and_drum'], params['perch']['I_drum'], params['perch']['b_perch'], params['perch']['b_drum']) flight_plan = params['flight_plan'] # Pick an initial azimuth [rad] in the ground frame to rotate the # wing and the perch. azi_g is 0 when the wing is on the negative # xg axis. azi_g = 0.0 # Set perch angle based on flight plan. if (flight_plan in [m.kFlightPlanStartDownwind] and not params['sim_options'] & m.kSimOptConstraintSystem): theta_p_0 = m.Wrap(azi_g, -np.pi, np.pi) initialize_in_crosswind_config = True elif flight_plan in [m.kFlightPlanDisengageEngage, m.kFlightPlanHighHover, m.kFlightPlanHoverInPlace, m.kFlightPlanLaunchPerch, m.kFlightPlanManual, m.kFlightPlanTurnKey]: theta_p_0 = m.Wrap(azi_g + np.pi, -np.pi, np.pi) initialize_in_crosswind_config = False else: assert False # The tophat has one perch azimuth encoder; GSv1 has none. perch_azi_enabled = [ params['gs_model'] == system_types.kGroundStationModelTopHat, False] return { # Radius [m] of the levelwind. 'levelwind_radius': 1.5, # Position [m] of the levelwind in perch x and y coordinates # when the levelwind elevation is zero. 'levelwind_hub_p': [1.8, -1.5], # Minimum tension [N] for the levelwind to engage. 'levelwind_engage_min_tension': 1e3, # The matrices describing the linearized dynamics of the perch and # winch drum system. 'A': {'d': A.tolist()}, 'B': {'d': B.tolist()}, # Initial angle [rad] of the perch relative to ground coordinates. 'theta_p_0': theta_p_0, # Boolean [#] that describes whether the perch begins in the # crosswind configuration (i.e. winch drum angle is 0.0 rad) or # a reeled-in configuration. 'initialize_in_crosswind_config': initialize_in_crosswind_config, # Properties of the perch panel. The perch panel is modeled as # the union of two cylinders, which are pitched and rolled about # the perch axes by the specified angles, then truncated at # planes parallel to the perch z-plane. The port cylinder # corresponds to the port wing side and vice versa. # # Parameters are from hachtmann on 2015-08-21 (with corrections on # 2015-09-22), and are confirmed using the drawings located here: # go/makaniwiki/perch-geometry. 'panel': { # For each panel, the center [m] and radius [m] describe the # cylinder modeling it. The z_extents_p [m] specify the planes, # parallel to the perch z-plane, at which the cylinders are # cut. 'port': { 'center_panel': [3.122, 0.763], 'radius': 4.0, 'z_extents_p': [-0.625, 2.656], }, 'starboard': { 'center_panel': [3.203, -1.103], 'radius': 4.0, 'z_extents_p': [-0.306, 2.656], }, 'origin_pos_p': [0.0, 0.0, 0.0], # Rotation matrix from the perch frame to the panel frame. 'dcm_p2panel': {'d': geometry.AngleToDcm( np.deg2rad(0.0), np.deg2rad(6.0), np.deg2rad(-7.0)).tolist()}, # Y extents [m] of the panel apparatus in the panel coordinate # system. 'y_extents_panel': [-1.9, 2.0], }, # Sensor parameters for perch encoders. The biases of one or # two degrees are estimated typical biases. The noise level is # chosen to be pessimistic but not completely unrealistic. 'ts': params['common_params']['ts'], 'levelwind_ele_sensor': [MakeEncoderParams(), MakeEncoderParams()], 'perch_azi_sensor': [ MakeEncoderParams(bias=np.deg2rad(-1.0), noise_level_counts=0.25, scale=1.0 if perch_azi_enabled[0] else 0.0), MakeEncoderParams(bias=np.deg2rad(2.0), noise_level_counts=0.25, scale=1.0 if perch_azi_enabled[1] else 0.0)] }
google/makani
config/base_station/sim/perch_sim.py
Python
apache-2.0
5,391
0.002968
#!/usr/bin/env python # Copyright (c) 2014, The MITRE Corporation. All rights reserved. # See LICENSE.txt for complete terms. import sys from stix.core import STIXPackage def parse_stix(pkg): print("== MALWARE ==") for fam in pkg.ttps: print("---") print("Title : " + fam.title) print("ID : " + fam.id_) for sample in fam.behavior.malware_instances: print("Sample: " + str(sample.names[0])) print("Type: " + str(sample.types[0])) for ind in pkg.indicators: print("---") print("Title : " + ind.title) print("Type : " + str(ind.indicator_types[0])) print("ID -> : " + ind.indicated_ttps[0].item.idref) for obs in ind.observables: for digest in obs.object_.properties.hashes: print("Hash : " + str(digest)) return 0 if __name__ == '__main__': try: fname = sys.argv[1] except: exit(1) fd = open(fname) stix_pkg = STIXPackage.from_xml(fd) parse_stix(stix_pkg)
jburns12/stixproject.github.io
documentation/idioms/malware-hash/malware-indicator-for-file-hash_consumer.py
Python
bsd-3-clause
1,039
0.001925
import lxml.etree as ET from GEMEditor.model.classes.cobra import Model, Metabolite, Compartment from GEMEditor.rw import * from GEMEditor.rw.compartment import add_compartments, parse_compartments from GEMEditor.rw.test.ex_compartment import valid_compartment_list from lxml.etree import Element def test_parse_compartments(): parent_node = ET.fromstring(valid_compartment_list) model = Model() parse_compartments(parent_node, model) assert model.gem_compartments["p"] == Compartment("p", "Periplasm") assert model.gem_compartments["c"] == Compartment("c", "Cytoplasm") assert model.gem_compartments["e"] == Compartment("e", "Extracellular") def test_add_compartments(): model = Model() model.gem_compartments["c"] = Compartment("c", "Cytoplasm") root = Element("Root") add_compartments(root, model) compartment_list = root.find(sbml3_listOfCompartments) assert compartment_list is not None compartment = compartment_list.find(sbml3_compartment) assert compartment is not None assert compartment.get("id") == "c" assert compartment.get("name") == "Cytoplasm" def test_add_compartments_defined_in_metabolite(): model = Model() metabolite = Metabolite(id="test", compartment="c") model.add_metabolites([metabolite]) root = Element("Root") add_compartments(root, model) compartment_list = root.find(sbml3_listOfCompartments) assert compartment_list is not None compartment = compartment_list.find(sbml3_compartment) assert compartment is not None assert compartment.get("id") == "c" assert compartment.get("name") is None def test_add_compartment_empty_model(): model = Model() root = Element("root") add_compartments(root, model) compartment_list = root.find(sbml3_listOfCompartments) assert compartment_list is None def test_consistency_write_read(): model1 = Model() model1.gem_compartments["c"] = Compartment("c", "Cytoplasm") root = Element("Root") add_compartments(root, model1) model2 = Model() parse_compartments(root, model2) assert model2.gem_compartments == model1.gem_compartments
JuBra/GEMEditor
GEMEditor/rw/test/test_compartment_rw.py
Python
gpl-3.0
2,169
0
""" C/Cython ascii file parser tests """ from pandas.compat import StringIO, BytesIO, map from datetime import datetime from pandas import compat import csv import os import sys import re import nose from numpy import nan import numpy as np from pandas import DataFrame, Series, Index, isnull, MultiIndex import pandas.io.parsers as parsers from pandas.io.parsers import (read_csv, read_table, read_fwf, TextParser, TextFileReader) from pandas.util.testing import (assert_almost_equal, assert_frame_equal, assert_series_equal, network) import pandas.lib as lib from pandas import compat from pandas.lib import Timestamp import pandas.util.testing as tm from pandas.parser import TextReader import pandas.parser as parser class TestCParser(tm.TestCase): def setUp(self): self.dirpath = tm.get_data_path() self.csv1 = os.path.join(self.dirpath, 'test1.csv') self.csv2 = os.path.join(self.dirpath, 'test2.csv') self.xls1 = os.path.join(self.dirpath, 'test.xls') def test_file_handle(self): try: f = open(self.csv1, 'rb') reader = TextReader(f) result = reader.read() finally: f.close() def test_string_filename(self): reader = TextReader(self.csv1, header=None) result = reader.read() def test_file_handle_mmap(self): try: f = open(self.csv1, 'rb') reader = TextReader(f, memory_map=True, header=None) result = reader.read() finally: f.close() def test_StringIO(self): text = open(self.csv1, 'rb').read() src = BytesIO(text) reader = TextReader(src, header=None) result = reader.read() def test_string_factorize(self): # should this be optional? data = 'a\nb\na\nb\na' reader = TextReader(StringIO(data), header=None) result = reader.read() self.assertEqual(len(set(map(id, result[0]))), 2) def test_skipinitialspace(self): data = ('a, b\n' 'a, b\n' 'a, b\n' 'a, b') reader = TextReader(StringIO(data), skipinitialspace=True, header=None) result = reader.read() self.assert_numpy_array_equal(result[0], ['a', 'a', 'a', 'a']) self.assert_numpy_array_equal(result[1], ['b', 'b', 'b', 'b']) def test_parse_booleans(self): data = 'True\nFalse\nTrue\nTrue' reader = TextReader(StringIO(data), header=None) result = reader.read() self.assertEqual(result[0].dtype, np.bool_) def test_delimit_whitespace(self): data = 'a b\na\t\t "b"\n"a"\t \t b' reader = TextReader(StringIO(data), delim_whitespace=True, header=None) result = reader.read() self.assert_numpy_array_equal(result[0], ['a', 'a', 'a']) self.assert_numpy_array_equal(result[1], ['b', 'b', 'b']) def test_embedded_newline(self): data = 'a\n"hello\nthere"\nthis' reader = TextReader(StringIO(data), header=None) result = reader.read() expected = ['a', 'hello\nthere', 'this'] self.assert_numpy_array_equal(result[0], expected) def test_euro_decimal(self): data = '12345,67\n345,678' reader = TextReader(StringIO(data), delimiter=':', decimal=',', header=None) result = reader.read() expected = [12345.67, 345.678] tm.assert_almost_equal(result[0], expected) def test_integer_thousands(self): data = '123,456\n12,500' reader = TextReader(StringIO(data), delimiter=':', thousands=',', header=None) result = reader.read() expected = [123456, 12500] tm.assert_almost_equal(result[0], expected) def test_integer_thousands_alt(self): data = '123.456\n12.500' reader = TextFileReader(StringIO(data), delimiter=':', thousands='.', header=None) result = reader.read() expected = [123456, 12500] tm.assert_almost_equal(result[0], expected) def test_skip_bad_lines(self): # too many lines, see #2430 for why data = ('a:b:c\n' 'd:e:f\n' 'g:h:i\n' 'j:k:l:m\n' 'l:m:n\n' 'o:p:q:r') reader = TextReader(StringIO(data), delimiter=':', header=None) self.assertRaises(parser.CParserError, reader.read) reader = TextReader(StringIO(data), delimiter=':', header=None, error_bad_lines=False, warn_bad_lines=False) result = reader.read() expected = {0: ['a', 'd', 'g', 'l'], 1: ['b', 'e', 'h', 'm'], 2: ['c', 'f', 'i', 'n']} assert_array_dicts_equal(result, expected) stderr = sys.stderr sys.stderr = StringIO() try: reader = TextReader(StringIO(data), delimiter=':', header=None, error_bad_lines=False, warn_bad_lines=True) reader.read() val = sys.stderr.getvalue() self.assertTrue('Skipping line 4' in val) self.assertTrue('Skipping line 6' in val) finally: sys.stderr = stderr def test_header_not_enough_lines(self): data = ('skip this\n' 'skip this\n' 'a,b,c\n' '1,2,3\n' '4,5,6') reader = TextReader(StringIO(data), delimiter=',', header=2, as_recarray=True) header = reader.header expected = [['a', 'b', 'c']] self.assertEqual(header, expected) recs = reader.read() expected = {'a': [1, 4], 'b': [2, 5], 'c': [3, 6]} assert_array_dicts_equal(expected, recs) # not enough rows self.assertRaises(parser.CParserError, TextReader, StringIO(data), delimiter=',', header=5, as_recarray=True) def test_escapechar(self): data = ('\\"hello world\"\n' '\\"hello world\"\n' '\\"hello world\"') reader = TextReader(StringIO(data), delimiter=',', header=None, escapechar='\\') result = reader.read() expected = {0: ['"hello world"'] * 3} assert_array_dicts_equal(result, expected) def test_eof_has_eol(self): # handling of new line at EOF pass def test_na_substitution(self): pass def test_numpy_string_dtype(self): data = """\ a,1 aa,2 aaa,3 aaaa,4 aaaaa,5""" def _make_reader(**kwds): return TextReader(StringIO(data), delimiter=',', header=None, **kwds) reader = _make_reader(dtype='S5,i4') result = reader.read() self.assertEqual(result[0].dtype, 'S5') ex_values = np.array(['a', 'aa', 'aaa', 'aaaa', 'aaaaa'], dtype='S5') self.assertTrue((result[0] == ex_values).all()) self.assertEqual(result[1].dtype, 'i4') reader = _make_reader(dtype='S4') result = reader.read() self.assertEqual(result[0].dtype, 'S4') ex_values = np.array(['a', 'aa', 'aaa', 'aaaa', 'aaaa'], dtype='S4') self.assertTrue((result[0] == ex_values).all()) self.assertEqual(result[1].dtype, 'S4') reader = _make_reader(dtype='S4', as_recarray=True) result = reader.read() self.assertEqual(result['0'].dtype, 'S4') ex_values = np.array(['a', 'aa', 'aaa', 'aaaa', 'aaaa'], dtype='S4') self.assertTrue((result['0'] == ex_values).all()) self.assertEqual(result['1'].dtype, 'S4') def test_pass_dtype(self): data = """\ one,two 1,a 2,b 3,c 4,d""" def _make_reader(**kwds): return TextReader(StringIO(data), delimiter=',', **kwds) reader = _make_reader(dtype={'one': 'u1', 1: 'S1'}) result = reader.read() self.assertEqual(result[0].dtype, 'u1') self.assertEqual(result[1].dtype, 'S1') reader = _make_reader(dtype={'one': np.uint8, 1: object}) result = reader.read() self.assertEqual(result[0].dtype, 'u1') self.assertEqual(result[1].dtype, 'O') reader = _make_reader(dtype={'one': np.dtype('u1'), 1: np.dtype('O')}) result = reader.read() self.assertEqual(result[0].dtype, 'u1') self.assertEqual(result[1].dtype, 'O') def test_usecols(self): data = """\ a,b,c 1,2,3 4,5,6 7,8,9 10,11,12""" def _make_reader(**kwds): return TextReader(StringIO(data), delimiter=',', **kwds) reader = _make_reader(usecols=(1, 2)) result = reader.read() exp = _make_reader().read() self.assertEqual(len(result), 2) self.assertTrue((result[1] == exp[1]).all()) self.assertTrue((result[2] == exp[2]).all()) def test_cr_delimited(self): def _test(text, **kwargs): nice_text = text.replace('\r', '\r\n') result = TextReader(StringIO(text), **kwargs).read() expected = TextReader(StringIO(nice_text), **kwargs).read() assert_array_dicts_equal(result, expected) data = 'a,b,c\r1,2,3\r4,5,6\r7,8,9\r10,11,12' _test(data, delimiter=',') data = 'a b c\r1 2 3\r4 5 6\r7 8 9\r10 11 12' _test(data, delim_whitespace=True) data = 'a,b,c\r1,2,3\r4,5,6\r,88,9\r10,11,12' _test(data, delimiter=',') sample = ('A,B,C,D,E,F,G,H,I,J,K,L,M,N,O\r' 'AAAAA,BBBBB,0,0,0,0,0,0,0,0,0,0,0,0,0\r' ',BBBBB,0,0,0,0,0,0,0,0,0,0,0,0,0') _test(sample, delimiter=',') data = 'A B C\r 2 3\r4 5 6' _test(data, delim_whitespace=True) data = 'A B C\r2 3\r4 5 6' _test(data, delim_whitespace=True) def test_empty_field_eof(self): data = 'a,b,c\n1,2,3\n4,,' result = TextReader(StringIO(data), delimiter=',').read() expected = {0: np.array([1, 4]), 1: np.array(['2', ''], dtype=object), 2: np.array(['3', ''], dtype=object)} assert_array_dicts_equal(result, expected) def assert_array_dicts_equal(left, right): for k, v in compat.iteritems(left): assert(np.array_equal(v, right[k])) if __name__ == '__main__': nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'], exit=False)
webmasterraj/FogOrNot
flask/lib/python2.7/site-packages/pandas/io/tests/test_cparser.py
Python
gpl-2.0
10,838
0.000185
# -*- coding: utf-8 -*- # ShuffleIR的设置文件 #=============================================== # ShuffleMove的安装路径 pathShuffleMove = '../../Shuffle-Move/' # 当前关卡在ShuffleMove中的关卡ID # 例如美梦天梯为 'SP_275', Mega超梦为'150' # 注意要有英文单引号! varStageID = 'SP_275' # 支援精灵列表 # 格式为 listSupport = ('口袋妖怪的NationDex #',...) # 注意要有英文单引号! # 特殊的条目包括 空白'Air', 铁块'Metal', 木块'Wood', 金币'Coin' # Mega 精灵请在Dex后加 -m, 例如Mega化石翼龙为 '142-m' # 已支援图标列表请参考Supported_Icons.md listSupport=['Air','Wood','150-m','249','488','494'] # 是否载入冰封版图标(0为不载入, 1为载入) varIceSupport=True # 铁块计数器 varMetalTimer=3 #=============================================== # 以下设置用于确定Miiverse截图选区 # 消消乐方块区域在窗口截图内部的相对坐标(x1, y1, x2, y2) # 其中(x1, y1)为左上坐标,(x2, y2) 为右下坐标 #varBox = (46, 6, 274, 234) # Old 3DS XL varBox = (38,376,494,832) # iPhone 6p + Airserver #=============================================== # 以下内容最好不要修改 # Path to Mask pathMask = 'images/block_mask76.png' # Board实际路径 pathBoard = pathShuffleMove + '/config/boards/board.txt' #BlockSize = 38 BlockSize = 76
coeusite/ShuffleIR
config.py
Python
gpl-2.0
1,373
0.017769
import unittest, random, sys, time, os, stat, pwd, grp sys.path.extend(['.','..','../..','py']) import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e FILENUM=100 def write_syn_dataset(csvPathname, rowCount, colCount, SEED, translateList): r1 = random.Random(SEED) dsf = open(csvPathname, "w+") roll = random.randint(0,1) # if roll==0: if 1==1: # spit out a header rowData = [] for j in range(colCount): rowData.append('h' + str(j)) rowDataCsv = ",".join(map(str,rowData)) dsf.write(rowDataCsv + "\n") for i in range(rowCount): rowData = [] for j in range(colCount): ri1 = r1.triangular(0,3,1.5) ri1Int = int(round(ri1,0)) rowData.append(ri1Int) if translateList is not None: for i, iNum in enumerate(rowData): rowData[i] = translateList[iNum] rowDataCsv = ",".join(map(str,rowData)) dsf.write(rowDataCsv + "\n") dsf.close() # print csvPathname class Basic(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls): global SEED SEED = h2o.setup_random_seed() print "WARNING: won't work for remote h2o, because syn_datasets is created locally only, for import" h2o.init(1,java_heap_GB=14) @classmethod def tearDownClass(cls): h2o.tear_down_cloud() def test_cols_multi_permission(self): SYNDATASETS_DIR = h2o.make_syn_dir() translateList = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u'] tryList = [ (300, 100, 'cA', 60), ] # h2b.browseTheCloud() cnum = 0 for (rowCount, colCount, hex_key, timeoutSecs) in tryList: cnum += 1 # FIX! should we add a header to them randomly??? print "Wait while", FILENUM, "synthetic files are created in", SYNDATASETS_DIR rowxcol = str(rowCount) + 'x' + str(colCount) for fileN in range(FILENUM): csvFilename = 'syn_' + str(fileN) + "_" + str(SEED) + "_" + rowxcol + '.csv' csvPathname = SYNDATASETS_DIR + '/' + csvFilename write_syn_dataset(csvPathname, rowCount, colCount, SEED, translateList) # DON"T get redirected to S3! (EC2 hack in config, remember!) # use it at the node level directly (because we gen'ed the files. # use regex. the only files in the dir will be the ones we just created with *fileN* match parseResult = h2i.import_parse(path=SYNDATASETS_DIR + '/*'+rowxcol+'*', schema='local', exclude=None, header=1, timeoutSecs=timeoutSecs) print "parseResult['destination_key']: " + parseResult['destination_key'] inspect = h2o_cmd.runInspect(None, parseResult['destination_key']) h2o_cmd.infoFromInspect(inspect, csvPathname) # FIX! h2o strips one of the headers, but treats all the other files with headers as data print "\n" + parseResult['destination_key'] + ":", \ " numRows:", "{:,}".format(inspect['numRows']), \ " numCols:", "{:,}".format(inspect['numCols']) # get uid/gid of files the test create (dir here) origUid = os.getuid() origGid = os.getgid() print "my uid and gid:", origUid, origGid # pick one file to flip fileList = os.listdir(SYNDATASETS_DIR) badFile = random.choice(fileList) badPathname = SYNDATASETS_DIR + "/" + badFile print "Going to use this file as the bad file:", badPathname print "checking os.chmod and parse" # os.chmod(badPathname, stat.S_IRWXU | stat.S_IRWXO) # always have to re-import because source key is deleted by h2o parseResult = h2i.import_parse(path=SYNDATASETS_DIR + '/*'+rowxcol+'*', schema='local', exclude=None, header=1, timeoutSecs=timeoutSecs) print "parseResult['destination_key']: " + parseResult['destination_key'] inspect = h2o_cmd.runInspect(None, parseResult['destination_key']) h2o_cmd.infoFromInspect(inspect, csvPathname) print "write by owner, only, and parse" os.chmod(badPathname, stat.S_IWRITE) parseResult = h2i.import_parse(path=SYNDATASETS_DIR + '/*'+rowxcol+'*', schema='local', exclude=None, header=1, timeoutSecs=timeoutSecs) inspect = h2o_cmd.runInspect(None, parseResult['destination_key']) h2o_cmd.infoFromInspect(inspect, csvPathname) print "execute by owner, only, and parse" os.chmod(badPathname, stat.S_IEXEC) h2o.nodes[0].import_files(SYNDATASETS_DIR) parseResult = h2i.import_parse(path=SYNDATASETS_DIR + '/*'+rowxcol+'*', schema='local', exclude=None, header=1, timeoutSecs=timeoutSecs) inspect = h2o_cmd.runInspect(None, parseResult['destination_key']) h2o_cmd.infoFromInspect(inspect, csvPathname) # change back to normal # os.chmod(badPathname, stat.S_IRWXU | stat.S_IRWXO) # how to make this work? disable for now if (1==0): # now change uid badUid = pwd.getpwnam("nobody").pw_uid badGid = grp.getgrnam("nogroup").gr_gid print "parsing after one bad uid" os.chown(badPathname, badUid, origGid) parseResult = h2i.import_parse(path=SYNDATASETS_DIR + '/*'+rowxcol+'*', schema='local', exclude=None, header=1, timeoutSecs=timeoutSecs) print "parsing after one bad gid" os.chown(badPathname, origUid, badGid) parseResult = h2i.import_parse(path=SYNDATASETS_DIR + '/*'+rowxcol+'*', schema='local', exclude=None, header=1, timeoutSecs=timeoutSecs) os.chown(badPathname, origUid, origGid) if __name__ == '__main__': h2o.unit_main()
vbelakov/h2o
py/testdir_single_jvm/test_enum_multi_permission.py
Python
apache-2.0
6,221
0.009805
""" Tests for Serializer Fields """ from django.core.exceptions import ImproperlyConfigured from django.test import TestCase import pytest from rest_framework.serializers import ValidationError from courses.factories import EdxAuthorFactory, CourseFactory from courses.models import EdxAuthor from courses.serializers import JsonListField as JLF from courses.serializers import StringyManyToManyField as SMMF class JsonListFieldTests(TestCase): """ Tests for JsonListField """ def test_decodes_string(self): """ Test that empty list string decodes properly """ f = JLF() self.assertEqual([], f.to_internal_value('[]')) def test_decodes_unicode(self): """ Test that empty list unicode string decodes properly """ f = JLF() self.assertEqual([], f.to_internal_value(u'[]')) def test_handles_decoding_nullable_values(self): """ Test that null is decoded to None """ f = JLF() self.assertEqual(None, f.to_internal_value('null')) def test_throws_validationerror_on_invalid_json(self): """ Test invalid JSON """ f = JLF() self.assertRaises(ValidationError, f.to_internal_value, 'testing') def test_not_list(self): """ Test that to_internal_value takes only lists """ f = JLF() self.assertRaises(ValidationError, f.to_internal_value, '{}') class StringyM2MTestCase(TestCase): """Tests for m2m stringy field serializer""" def test_requires_model(self): """Field requires a model kwarg""" self.assertRaises(ImproperlyConfigured, SMMF, lookup='test') def test_requires_lookup(self): """Field requires a lookup kwarg""" self.assertRaises(ImproperlyConfigured, SMMF, model=EdxAuthor) def test_returns_string_for_all_objects(self): # pylint: disable=no-self-use """model-to-string returns correct strings""" e1 = EdxAuthorFactory.create() e2 = EdxAuthorFactory.create() co = CourseFactory.create() co.instructors.add(e1) co.instructors.add(e2) f = SMMF(model=EdxAuthor, lookup='edx_uid') assert sorted([str(e1), str(e2)]) == sorted(f.to_representation(co.instructors)) def test_returns_model_if_string_provided(self): # pylint: disable=no-self-use """string-to-model returns correct model for single string""" uid = '2d133482b3214a119f55c3060d882ceb' CourseFactory.create() f = SMMF(model=EdxAuthor, lookup='edx_uid') ms = f.to_internal_value(uid) assert len(ms) == 1 assert ms[0].edx_uid == uid def test_returns_models_if_list_provided(self): # pylint: disable=no-self-use """string-to-model returns correct model for list""" uid = '2d133482b3214a119f55c3060d882ceb' uid2 = '3d133482b3214a119f55c3060d882ceb' CourseFactory.create() f = SMMF(model=EdxAuthor, lookup='edx_uid') ms = f.to_internal_value([uid, uid2]) assert len(ms) == 2 assert ms[0].edx_uid != ms[1].edx_uid assert ms[0].edx_uid in [uid, uid2] assert ms[1].edx_uid in [uid, uid2] def test_errors_on_invalid_input(self): # pylint: disable=no-self-use """Only deserialize known, supported types.""" CourseFactory.create() f = SMMF(model=EdxAuthor, lookup='edx_uid') with pytest.raises(ValidationError): f.to_internal_value(dict())
mitodl/ccxcon
courses/fields_test.py
Python
agpl-3.0
3,535
0.001132
#! /usr/bin/python # https://github.com/jackchi/interview-prep import random # Bubble Sort # randomly generate 10 integers from (-100, 100) arr = [random.randrange(-100,100) for i in range(10)] print('original %s' % arr) def bubbleSort(array): n = len(array) # traverse thru all elements for i in range(n): swapped = False # traverse thru last i elements already sorted for j in range(0, n-i-1): # swap smaller to front if array[j] > array[j+1]: array[j], array[j+1] = array[j+1], array[j] swapped = True # IF no two elements were swapped # by inner loop, then break if swapped == False: break return array a = bubbleSort(arr) print(f"sorted {a}")
jackchi/interview-prep
sorting/bubbleSort.py
Python
mit
738
0.023035
from flask import Blueprint api = Blueprint('api', __name__, url_prefix='/rest/v1.0') from . import authentication, errors, views
pixmeter/enma
enma/rest/__init__.py
Python
bsd-3-clause
131
0.015267
import ast import logging import os.path import ConfigParser import StringIO class ConfigError(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return self.msg class ConfigNoOptionError(ConfigError): def __init__(self, option, path): self.option = option self.path = path def __str__(self): return "There's no option %s in config file %s." % ( self.option, self.path) class LibvirtConfigUnknownKeyTypeError(ConfigError): def __init__(self, key, key_type): self.key = key self.key_type = key_type def __str__(self): return "Unknown type %s for key %s." % (self.key, self.key_type) class LibvirtConfigUnknownKeyError(ConfigError): def __init__(self, key): self.key = key def __str__(self): return 'Unknown config key %s' % self.key class SectionlessConfig(object): """ This is a wrapper class for python's internal library ConfigParser except allows manipulating sectionless configuration file with a dict-like way. Example config file test.conf: ># This is a comment line. >a = 1 >b = [hi, there] >c = hello >d = "hi, there" >e = [hi, > there] Example script using `try...finally...` statement: >>> from virttest import utils_config >>> config = utils_config.SectionlessConfig('test.conf') >>> try: ... print len(config) ... print config ... print config['a'] ... del config['a'] ... config['f'] = 'test' ... print config ... finally: ... config.restore() Example script using `with` statement: >>> from virttest import utils_config >>> with utils_config.SectionlessConfig('test.conf') as config: ... print len(config) ... print config ... print config['a'] ... del config['a'] ... config['f'] = 'test' ... print config """ def __init__(self, path): self.path = path self.parser = ConfigParser.ConfigParser() # Prevent of converting option names to lower case self.parser.optionxform = str self.backup_content = open(path, 'r').read() read_fp = StringIO.StringIO('[root]\n' + self.backup_content) self.parser.readfp(read_fp) def __sync_file(self): out_file = open(self.path, 'w') try: out_file.write(self.__str__()) finally: out_file.close() def __len__(self): return len(self.parser.items('root')) def __getitem__(self, option): try: return self.parser.get('root', option) except ConfigParser.NoOptionError: raise ConfigNoOptionError(option, self.path) def __setitem__(self, option, value): self.parser.set('root', option, value) self.__sync_file() def __delitem__(self, option): res = self.parser.remove_option('root', option) if res: self.__sync_file() else: raise ConfigNoOptionError(option, self.path) def __contains__(self, item): return self.parser.has_option('root', item) def __str__(self): write_fp = StringIO.StringIO() self.parser.write(write_fp) return write_fp.getvalue().split('\n', 1)[1] def __enter__(self): return self def __exit__(self, type, value, traceback): self.restore() def restore(self): out_file = open(self.path, 'w') try: out_file.write(self.backup_content) finally: out_file.close() def set_raw(self, option, value): self[option] = "%s" % value def set_string(self, option, value): self[option] = '"%s"' % value def set_int(self, option, value): self[option] = '%d' % int(value) def set_float(self, option, value): self[option] = '%s' % float(value) def set_boolean(self, option, value): if type(value) == str: value = int(value) if bool(value): self[option] = '1' else: self[option] = '0' def set_list(self, option, value): # TODO: line separation value = ['"%s"' % i for i in list(value)] self[option] = '[%s]' % ', '.join(value) def get_raw(self, option): return self[option] def get_string(self, option): raw_str = self[option].strip() if raw_str.startswith('"') and raw_str.endswith('"'): raw_str = raw_str[1:-1] elif raw_str.startswith("'") and raw_str.endswith("'"): raw_str = raw_str[1:-1] else: raise ValueError("Invalid value for string: %s" % raw_str) return raw_str def get_int(self, option): return int(self.get_raw(option)) def get_float(self, option): return float(self.get_raw(option)) def get_boolean(self, option): try: bool_str = self.get_string(option).lower() except ValueError: bool_str = str(self.get_int(option)) if bool_str in ["1", "yes", "true", "on"]: return True if bool_str in ["0", "no", "false", "off"]: return False raise ValueError("Invalid value for boolean: %s" % bool_str) def get_list(self, option): list_str = self.get_raw(option) return [str(i) for i in ast.literal_eval(list_str)] class LibvirtConfigCommon(SectionlessConfig): """ A abstract class to manipulate options of a libvirt related configure files in a property's way. Variables "__option_types__" and "conf_path" must be setup in the inherented classes before use. "__option_types__" is a dict contains every possible option as keys and their type ("boolean", "int", "string", "float" or "list") as values. Basic usage: 1) Create a config file object: >>> # LibvirtdConfig is a subclass of LibvirtConfigCommon. >>> config = LibvirtdConfig() 2) Set or update an option: >>> config.listen_tcp = True >>> config.listen_tcp = 1 >>> config.listen_tcp = "1" # All three have the same effect. >>> # If the setting value don't meet the specified type. >>> config.listen_tcp = "invalid" >>> # It'll thown an warning message and set a raw string instead. >>> # Use set_* methods when need to customize the result. >>> config.set_raw("'1'") 3) Get an option: >>> is_listening = config.listen_tcp >>> print is_listening True 4) Delete an option from the config file: >>> del config.listen_tcp 5) Make the changes take effect in libvirt by restart libvirt daemon. >>> from virttest import utils_libvirtd >>> utils_libvirtd.Libvirtd().restart() 6) Restore the content of the config file. >>> config.restore() """ __option_types__ = {} conf_path = '' def __init__(self, path=''): if path: self.conf_path = path if not self.conf_path: raise ConfigError("Path for config file is not set up.") if not self.__option_types__: raise ConfigError("__option_types__ is not set up.") if not os.path.isfile(self.conf_path): raise ConfigError("Path for config file %s don't exists." % self.conf_path) super(LibvirtConfigCommon, self).__init__(self.conf_path) def __getattr__(self, key): if key in self.__option_types__: key_type = self.__option_types__[key] if key_type not in ['boolean', 'int', 'float', 'string', 'list']: raise LibvirtConfigUnknownKeyTypeError(key, key_type) else: get_func = eval('self.get_' + key_type) try: return get_func(key) except ConfigNoOptionError: return None else: raise LibvirtConfigUnknownKeyError(key) def __setattr__(self, key, value): if key in self.__option_types__: key_type = self.__option_types__[key] if key_type not in ['boolean', 'int', 'float', 'string', 'list']: raise LibvirtConfigUnknownKeyTypeError(key, key_type) else: set_func = eval('self.set_' + key_type) try: set_func(key, value) except ValueError: logging.warning("Key %s might not have type %s. Set raw " "string instead.", key, key_type) self.set_raw(key, value) super(LibvirtConfigCommon, self).__setattr__(key, value) def __delattr__(self, key): if key in self.__option_types__: key_type = self.__option_types__[key] if key_type not in ['boolean', 'int', 'float', 'string', 'list']: raise LibvirtConfigUnknownKeyTypeError(key, key_type) else: try: del self[key] except ConfigNoOptionError: pass super(LibvirtConfigCommon, self).__setattr__(key, None) else: raise LibvirtConfigUnknownKeyError(key) class LibvirtdConfig(LibvirtConfigCommon): """ Class for libvirt daemon config file. """ conf_path = '/etc/libvirt/libvirtd.conf' __option_types__ = { 'listen_tls': 'boolean', 'listen_tcp': 'boolean', 'tls_port': 'string', 'tcp_port': 'string', 'listen_addr': 'string', 'mdns_adv': 'boolean', 'mdns_name': 'string', 'unix_sock_group': 'string', 'unix_sock_ro_perms': 'string', 'unix_sock_rw_perms': 'string', 'unix_sock_dir': 'string', 'auth_unix_ro': 'string', 'auth_unix_rw': 'string', 'auth_tcp': 'string', 'auth_tls': 'string', 'access_drivers': 'list', 'key_file': 'string', 'cert_file': 'string', 'ca_file': 'string', 'crl_file': 'string', 'tls_no_sanity_certificate': 'boolean', 'tls_no_verify_certificate': 'boolean', 'tls_allowed_dn_list': 'list', 'sasl_allowed_username_list': 'list', 'max_clients': 'int', 'max_queued_clients': 'int', 'min_workers': 'int', 'max_workers': 'int', 'prio_workers': 'int', 'max_requests': 'int', 'max_client_requests': 'int', 'log_level': 'int', 'log_filters': 'string', 'log_outputs': 'string', 'log_buffer_size': 'int', 'audit_level': 'int', 'audit_logging': 'int', 'host_uuid': 'string', 'keepalive_interval': 'int', 'keepalive_count': 'int', 'keepalive_required': 'boolean', } class LibvirtQemuConfig(LibvirtConfigCommon): """ Class for libvirt qemu config file. """ conf_path = '/etc/libvirt/qemu.conf' __option_types__ = { 'vnc_listen': 'string', 'vnc_auto_unix_socket': 'boolean', 'vnc_tls': 'boolean', 'vnc_tls_x509_cert_dir': 'string', 'vnc_tls_x509_verify': 'boolean', 'vnc_password': 'string', 'vnc_sasl': 'boolean', 'vnc_sasl_dir': 'string', 'vnc_allow_host_audio': 'boolean', 'spice_listen': 'string', 'spice_tls': 'boolean', 'spice_tls_x509_cert_dir': 'string', 'spice_password': 'string', 'remote_display_port_min': 'int', 'remote_display_port_max': 'int', 'remote_websocket_port_min': 'int', 'remote_websocket_port_max': 'int', 'security_driver': 'list', 'security_default_confined': 'boolean', 'security_require_confined': 'boolean', 'user': 'string', 'group': 'string', 'dynamic_ownership': 'boolean', 'cgroup_controllers': 'list', 'cgroup_device_acl': 'list', 'save_image_format': 'string', 'dump_image_format': 'string', 'snapshot_image_format': 'string', 'auto_dump_path': 'string', 'auto_dump_bypass_cache': 'boolean', 'auto_start_bypass_cache': 'boolean', 'hugetlbfs_mount': 'list', 'bridge_helper': 'string', 'clear_emulator_capabilities': 'boolean', 'set_process_name': 'boolean', 'max_processes': 'int', 'max_files': 'int', 'mac_filter': 'boolean', 'relaxed_acs_check': 'boolean', 'allow_disk_format_probing': 'boolean', 'lock_manager': 'string', 'max_queued': 'int', 'keepalive_interval': 'int', 'keepalive_count': 'int', 'seccomp_sandbox': 'int', 'migration_address': 'string', 'migration_port_min': 'int', 'migration_port_max': 'int', } class LibvirtdSysConfig(LibvirtConfigCommon): """ Class for sysconfig libvirtd config file. """ conf_path = '/etc/sysconfig/libvirtd' __option_types__ = { 'LIBVIRTD_CONFIG': 'string', 'LIBVIRTD_ARGS': 'string', 'KRB5_KTNAME': 'string', 'QEMU_AUDIO_DRV': 'string', 'SDL_AUDIODRIVER': 'string', 'LIBVIRTD_NOFILES_LIMIT': 'int', } class LibvirtGuestsConfig(LibvirtConfigCommon): """ Class for sysconfig libvirt-guests config file. """ conf_path = '/etc/sysconfig/libvirt-guests' __option_types__ = { 'URIS': 'string', 'ON_BOOT': 'string', 'START_DELAY': 'int', 'ON_SHUTDOWN': 'string', 'PARALLEL_SHUTDOWN': 'int', 'SHUTDOWN_TIMEOUT': 'int', 'BYPASS_CACHE': 'boolean' }
autotest/virt-test
virttest/utils_config.py
Python
gpl-2.0
13,647
0
""" Implementation of the trigsimp algorithm by Fu et al. The idea behind the ``fu`` algorithm is to use a sequence of rules, applied in what is heuristically known to be a smart order, to select a simpler expression that is equivalent to the input. There are transform rules in which a single rule is applied to the expression tree. The following are just mnemonic in nature; see the docstrings for examples. TR0 - simplify expression TR1 - sec-csc to cos-sin TR2 - tan-cot to sin-cos ratio TR2i - sin-cos ratio to tan TR3 - angle canonicalization TR4 - functions at special angles TR5 - powers of sin to powers of cos TR6 - powers of cos to powers of sin TR7 - reduce cos power (increase angle) TR8 - expand products of sin-cos to sums TR9 - contract sums of sin-cos to products TR10 - separate sin-cos arguments TR10i - collect sin-cos arguments TR11 - reduce double angles TR12 - separate tan arguments TR12i - collect tan arguments TR13 - expand product of tan-cot TRmorrie - prod(cos(x*2**i), (i, 0, k - 1)) -> sin(2**k*x)/(2**k*sin(x)) TR14 - factored powers of sin or cos to cos or sin power TR15 - negative powers of sin to cot power TR16 - negative powers of cos to tan power TR22 - tan-cot powers to negative powers of sec-csc functions TR111 - negative sin-cos-tan powers to csc-sec-cot There are 4 combination transforms (CTR1 - CTR4) in which a sequence of transformations are applied and the simplest expression is selected from a few options. Finally, there are the 2 rule lists (RL1 and RL2), which apply a sequence of transformations and combined transformations, and the ``fu`` algorithm itself, which applies rules and rule lists and selects the best expressions. There is also a function ``L`` which counts the number of trigonometric funcions that appear in the expression. Other than TR0, re-writing of expressions is not done by the transformations. e.g. TR10i finds pairs of terms in a sum that are in the form like ``cos(x)*cos(y) + sin(x)*sin(y)``. Such expression are targeted in a bottom-up traversal of the expression, but no manipulation to make them appear is attempted. For example, Set-up for examples below: >>> from sympy.simplify.fu import fu, L, TR9, TR10i, TR11 >>> from sympy import factor, sin, cos, powsimp >>> from sympy.abc import x, y, z, a >>> from time import time >>> eq = cos(x + y)/cos(x) >>> TR10i(eq.expand(trig=True)) -sin(x)*sin(y)/cos(x) + cos(y) If the expression is put in "normal" form (with a common denominator) then the transformation is successful: >>> TR10i(_.normal()) cos(x + y)/cos(x) TR11's behavior is similar. It rewrites double angles as smaller angles but doesn't do any simplification of the result. >>> TR11(sin(2)**a*cos(1)**(-a), 1) (2*sin(1)*cos(1))**a*cos(1)**(-a) >>> powsimp(_) (2*sin(1))**a The temptation is to try make these TR rules "smarter" but that should really be done at a higher level; the TR rules should try maintain the "do one thing well" principle. There is one exception, however. In TR10i and TR9 terms are recognized even when they are each multiplied by a common factor: >>> fu(a*cos(x)*cos(y) + a*sin(x)*sin(y)) a*cos(x - y) Factoring with ``factor_terms`` is used but it it "JIT"-like, being delayed until it is deemed necessary. Furthermore, if the factoring does not help with the simplification, it is not retained, so ``a*cos(x)*cos(y) + a*sin(x)*sin(z)`` does not become the factored (but unsimplified in the trigonometric sense) expression: >>> fu(a*cos(x)*cos(y) + a*sin(x)*sin(z)) a*sin(x)*sin(z) + a*cos(x)*cos(y) In some cases factoring might be a good idea, but the user is left to make that decision. For example: >>> expr=((15*sin(2*x) + 19*sin(x + y) + 17*sin(x + z) + 19*cos(x - z) + ... 25)*(20*sin(2*x) + 15*sin(x + y) + sin(y + z) + 14*cos(x - z) + ... 14*cos(y - z))*(9*sin(2*y) + 12*sin(y + z) + 10*cos(x - y) + 2*cos(y - ... z) + 18)).expand(trig=True).expand() In the expanded state, there are nearly 1000 trig functions: >>> L(expr) 932 If the expression where factored first, this would take time but the resulting expression would be transformed very quickly: >>> def clock(f, n=2): ... t=time(); f(); return round(time()-t, n) ... >>> clock(lambda: factor(expr)) # doctest: +SKIP 0.86 >>> clock(lambda: TR10i(expr), 3) # doctest: +SKIP 0.016 If the unexpanded expression is used, the transformation takes longer but not as long as it took to factor it and then transform it: >>> clock(lambda: TR10i(expr), 2) # doctest: +SKIP 0.28 So neither expansion nor factoring is used in ``TR10i``: if the expression is already factored (or partially factored) then expansion with ``trig=True`` would destroy what is already known and take longer; if the expression is expanded, factoring may take longer than simply applying the transformation itself. Although the algorithms should be canonical, always giving the same result, they may not yield the best result. This, in general, is the nature of simplification where searching all possible transformation paths is very expensive. Here is a simple example. There are 6 terms in the following sum: >>> expr = (sin(x)**2*cos(y)*cos(z) + sin(x)*sin(y)*cos(x)*cos(z) + ... sin(x)*sin(z)*cos(x)*cos(y) + sin(y)*sin(z)*cos(x)**2 + sin(y)*sin(z) + ... cos(y)*cos(z)) >>> args = expr.args Serendipitously, fu gives the best result: >>> fu(expr) 3*cos(y - z)/2 - cos(2*x + y + z)/2 But if different terms were combined, a less-optimal result might be obtained, requiring some additional work to get better simplification, but still less than optimal. The following shows an alternative form of ``expr`` that resists optimal simplification once a given step is taken since it leads to a dead end: >>> TR9(-cos(x)**2*cos(y + z) + 3*cos(y - z)/2 + ... cos(y + z)/2 + cos(-2*x + y + z)/4 - cos(2*x + y + z)/4) sin(2*x)*sin(y + z)/2 - cos(x)**2*cos(y + z) + 3*cos(y - z)/2 + cos(y + z)/2 Here is a smaller expression that exhibits the same behavior: >>> a = sin(x)*sin(z)*cos(x)*cos(y) + sin(x)*sin(y)*cos(x)*cos(z) >>> TR10i(a) sin(x)*sin(y + z)*cos(x) >>> newa = _ >>> TR10i(expr - a) # this combines two more of the remaining terms sin(x)**2*cos(y)*cos(z) + sin(y)*sin(z)*cos(x)**2 + cos(y - z) >>> TR10i(_ + newa) == _ + newa # but now there is no more simplification True Without getting lucky or trying all possible pairings of arguments, the final result may be less than optimal and impossible to find without better heuristics or brute force trial of all possibilities. Notes ===== This work was started by Dimitar Vlahovski at the Technological School "Electronic systems" (30.11.2011). References ========== http://rfdz.ph-noe.ac.at/fileadmin/Mathematik_Uploads/ACDCA/ DESTIME2006/DES_contribs/Fu/simplification.pdf http://www.sosmath.com/trig/Trig5/trig5/pdf/pdf.html gives a formula sheet. """ from __future__ import print_function, division from collections import defaultdict from itertools import combinations from sympy.simplify.simplify import (simplify, powsimp, ratsimp, combsimp, _mexpand, bottom_up) from sympy.core.sympify import sympify from sympy.functions.elementary.trigonometric import ( cos, sin, tan, cot, sec, csc, sqrt) from sympy.functions.elementary.hyperbolic import cosh, sinh, tanh, coth from sympy.core.compatibility import ordered from sympy.core.core import C from sympy.core.mul import Mul from sympy.core.power import Pow from sympy.core.function import expand_mul, count_ops from sympy.core.add import Add from sympy.core.symbol import Dummy from sympy.core.exprtools import Factors, gcd_terms from sympy.core.rules import Transform from sympy.core.basic import S from sympy.core.numbers import Integer, pi, I from sympy.strategies.tree import greedy from sympy.strategies.core import identity, debug from sympy.polys.polytools import factor from sympy.ntheory.factor_ import perfect_power from sympy import SYMPY_DEBUG # ================== Fu-like tools =========================== def TR0(rv): """Simplification of rational polynomials, trying to simplify the expression, e.g. combine things like 3*x + 2*x, etc.... """ # although it would be nice to use cancel, it doesn't work # with noncommutatives return rv.normal().factor().expand() def TR1(rv): """Replace sec, csc with 1/cos, 1/sin Examples ======== >>> from sympy.simplify.fu import TR1, sec, csc >>> from sympy.abc import x >>> TR1(2*csc(x) + sec(x)) 1/cos(x) + 2/sin(x) """ def f(rv): if rv.func is sec: a = rv.args[0] return S.One/cos(a) elif rv.func is csc: a = rv.args[0] return S.One/sin(a) return rv return bottom_up(rv, f) def TR2(rv): """Replace tan and cot with sin/cos and cos/sin Examples ======== >>> from sympy.simplify.fu import TR2 >>> from sympy.abc import x >>> from sympy import tan, cot, sin, cos >>> TR2(tan(x)) sin(x)/cos(x) >>> TR2(cot(x)) cos(x)/sin(x) >>> TR2(tan(tan(x) - sin(x)/cos(x))) 0 """ def f(rv): if rv.func is tan: a = rv.args[0] return sin(a)/cos(a) elif rv.func is cot: a = rv.args[0] return cos(a)/sin(a) return rv return bottom_up(rv, f) def TR2i(rv, half=False): """Converts ratios involving sin and cos as follows:: sin(x)/cos(x) -> tan(x) sin(x)/(cos(x) + 1) -> tan(x/2) if half=True Examples ======== >>> from sympy.simplify.fu import TR2i >>> from sympy.abc import x, a >>> from sympy import sin, cos >>> TR2i(sin(x)/cos(x)) tan(x) Powers of the numerator and denominator are also recognized >>> TR2i(sin(x)**2/(cos(x) + 1)**2, half=True) tan(x/2)**2 The transformation does not take place unless assumptions allow (i.e. the base must be positive or the exponent must be an integer for both numerator and denominator) >>> TR2i(sin(x)**a/(cos(x) + 1)**a) (cos(x) + 1)**(-a)*sin(x)**a """ def f(rv): if not rv.is_Mul: return rv n, d = rv.as_numer_denom() if n.is_Atom or d.is_Atom: return rv def ok(k, e): # initial filtering of factors return ( (e.is_integer or k.is_positive) and ( k.func in (sin, cos) or (half and k.is_Add and len(k.args) >= 2 and any(any(ai.func is cos or ai.is_Pow and ai.base is cos for ai in Mul.make_args(a)) for a in k.args)))) n = n.as_powers_dict() ndone = [(k, n.pop(k)) for k in list(n.keys()) if not ok(k, n[k])] if not n: return rv d = d.as_powers_dict() ddone = [(k, d.pop(k)) for k in list(d.keys()) if not ok(k, d[k])] if not d: return rv # factoring if necessary def factorize(d, ddone): newk = [] for k in d: if k.is_Add and len(k.args) > 2: knew = factor(k) if half else factor_terms(k) if knew != k: newk.append((k, knew)) if newk: for i, (k, knew) in enumerate(newk): del d[k] newk[i] = knew newk = Mul(*newk).as_powers_dict() for k in newk: if ok(k, d[k]): d[k] += newk[k] else: ddone.append((k, d[k])) del newk factorize(n, ndone) factorize(d, ddone) # joining t = [] for k in n: if k.func is sin: a = cos(k.args[0], evaluate=False) if a in d and d[a] == n[k]: t.append(tan(k.args[0])**n[k]) n[k] = d[a] = None elif half: a1 = 1 + a if a1 in d and d[a1] == n[k]: t.append((tan(k.args[0]/2))**n[k]) n[k] = d[a1] = None elif k.func is cos: a = sin(k.args[0], evaluate=False) if a in d and d[a] == n[k]: t.append(tan(k.args[0])**-n[k]) n[k] = d[a] = None elif half and k.is_Add and k.args[0] is S.One and \ k.args[1].func is cos: a = sin(k.args[1].args[0], evaluate=False) if a in d and d[a] == n[k] and (d[a].is_integer or \ a.is_positive): t.append(tan(a.args[0]/2)**-n[k]) n[k] = d[a] = None if t: rv = Mul(*(t + [b**e for b, e in n.items() if e]))/\ Mul(*[b**e for b, e in d.items() if e]) rv *= Mul(*[b**e for b, e in ndone])/Mul(*[b**e for b, e in ddone]) return rv return bottom_up(rv, f) def TR3(rv): """Induced formula: example sin(-a) = -sin(a) Examples ======== >>> from sympy.simplify.fu import TR3 >>> from sympy.abc import x, y >>> from sympy import pi >>> from sympy import cos >>> TR3(cos(y - x*(y - x))) cos(x*(x - y) + y) >>> cos(pi/2 + x) -sin(x) >>> cos(30*pi/2 + x) -cos(x) """ from sympy.simplify.simplify import signsimp # Negative argument (already automatic for funcs like sin(-x) -> -sin(x) # but more complicated expressions can use it, too). Also, trig angles # between pi/4 and pi/2 are not reduced to an angle between 0 and pi/4. # The following are automatically handled: # Argument of type: pi/2 +/- angle # Argument of type: pi +/- angle # Argument of type : 2k*pi +/- angle def f(rv): if not isinstance(rv, C.TrigonometricFunction): return rv rv = rv.func(signsimp(rv.args[0])) if (rv.args[0] - S.Pi/4).is_positive is (S.Pi/2 - rv.args[0]).is_positive is True: fmap = {cos: sin, sin: cos, tan: cot, cot: tan, sec: csc, csc: sec} rv = fmap[rv.func](S.Pi/2 - rv.args[0]) return rv return bottom_up(rv, f) def TR4(rv): """Identify values of special angles. a= 0 pi/6 pi/4 pi/3 pi/2 ---------------------------------------------------- cos(a) 0 1/2 sqrt(2)/2 sqrt(3)/2 1 sin(a) 1 sqrt(3)/2 sqrt(2)/2 1/2 0 tan(a) 0 sqt(3)/3 1 sqrt(3) -- Examples ======== >>> from sympy.simplify.fu import TR4 >>> from sympy import pi >>> from sympy import cos, sin, tan, cot >>> for s in (0, pi/6, pi/4, pi/3, pi/2): ... print('%s %s %s %s' % (cos(s), sin(s), tan(s), cot(s))) ... 1 0 0 zoo sqrt(3)/2 1/2 sqrt(3)/3 sqrt(3) sqrt(2)/2 sqrt(2)/2 1 1 1/2 sqrt(3)/2 sqrt(3) sqrt(3)/3 0 1 zoo 0 """ # special values at 0, pi/6, pi/4, pi/3, pi/2 already handled return rv def _TR56(rv, f, g, h, max, pow): """Helper for TR5 and TR6 to replace f**2 with h(g**2) Options ======= max : controls size of exponent that can appear on f e.g. if max=4 then f**4 will be changed to h(g**2)**2. pow : controls whether the exponent must be a perfect power of 2 e.g. if pow=True (and max >= 6) then f**6 will not be changed but f**8 will be changed to h(g**2)**4 >>> from sympy.simplify.fu import _TR56 as T >>> from sympy.abc import x >>> from sympy import sin, cos >>> h = lambda x: 1 - x >>> T(sin(x)**3, sin, cos, h, 4, False) sin(x)**3 >>> T(sin(x)**6, sin, cos, h, 6, False) (-cos(x)**2 + 1)**3 >>> T(sin(x)**6, sin, cos, h, 6, True) sin(x)**6 >>> T(sin(x)**8, sin, cos, h, 10, True) (-cos(x)**2 + 1)**4 """ def _f(rv): # I'm not sure if this transformation should target all even powers # or only those expressible as powers of 2. Also, should it only # make the changes in powers that appear in sums -- making an isolated # change is not going to allow a simplification as far as I can tell. if not (rv.is_Pow and rv.base.func == f): return rv if (rv.exp < 0) is True: return rv if (rv.exp > max) is True: return rv if rv.exp == 2: return h(g(rv.base.args[0])**2) else: if rv.exp == 4: e = 2 elif not pow: if rv.exp % 2: return rv e = rv.exp//2 else: p = perfect_power(rv.exp) if not p: return rv e = rv.exp//2 return h(g(rv.base.args[0])**2)**e return bottom_up(rv, _f) def TR5(rv, max=4, pow=False): """Replacement of sin**2 with 1 - cos(x)**2. See _TR56 docstring for advanced use of ``max`` and ``pow``. Examples ======== >>> from sympy.simplify.fu import TR5 >>> from sympy.abc import x >>> from sympy import sin >>> TR5(sin(x)**2) -cos(x)**2 + 1 >>> TR5(sin(x)**-2) # unchanged sin(x)**(-2) >>> TR5(sin(x)**4) (-cos(x)**2 + 1)**2 """ return _TR56(rv, sin, cos, lambda x: 1 - x, max=max, pow=pow) def TR6(rv, max=4, pow=False): """Replacement of cos**2 with 1 - sin(x)**2. See _TR56 docstring for advanced use of ``max`` and ``pow``. Examples ======== >>> from sympy.simplify.fu import TR6 >>> from sympy.abc import x >>> from sympy import cos >>> TR6(cos(x)**2) -sin(x)**2 + 1 >>> TR6(cos(x)**-2) #unchanged cos(x)**(-2) >>> TR6(cos(x)**4) (-sin(x)**2 + 1)**2 """ return _TR56(rv, cos, sin, lambda x: 1 - x, max=max, pow=pow) def TR7(rv): """Lowering the degree of cos(x)**2 Examples ======== >>> from sympy.simplify.fu import TR7 >>> from sympy.abc import x >>> from sympy import cos >>> TR7(cos(x)**2) cos(2*x)/2 + 1/2 >>> TR7(cos(x)**2 + 1) cos(2*x)/2 + 3/2 """ def f(rv): if not (rv.is_Pow and rv.base.func == cos and rv.exp == 2): return rv return (1 + cos(2*rv.base.args[0]))/2 return bottom_up(rv, f) def TR8(rv, first=True): """Converting products of ``cos`` and/or ``sin`` to a sum or difference of ``cos`` and or ``sin`` terms. Examples ======== >>> from sympy.simplify.fu import TR8, TR7 >>> from sympy import cos, sin >>> TR8(cos(2)*cos(3)) cos(5)/2 + cos(1)/2 >>> TR8(cos(2)*sin(3)) sin(5)/2 + sin(1)/2 >>> TR8(sin(2)*sin(3)) -cos(5)/2 + cos(1)/2 """ def f(rv): if not ( rv.is_Mul or rv.is_Pow and rv.base.func in (cos, sin) and (rv.exp.is_integer or rv.base.is_positive)): return rv if first: n, d = [expand_mul(i) for i in rv.as_numer_denom()] newn = TR8(n, first=False) newd = TR8(d, first=False) if newn != n or newd != d: rv = gcd_terms(newn/newd) if rv.is_Mul and rv.args[0].is_Rational and \ len(rv.args) == 2 and rv.args[1].is_Add: rv = Mul(*rv.as_coeff_Mul()) return rv args = {cos: [], sin: [], None: []} for a in ordered(Mul.make_args(rv)): if a.func in (cos, sin): args[a.func].append(a.args[0]) elif (a.is_Pow and a.exp.is_Integer and a.exp > 0 and \ a.base.func in (cos, sin)): # XXX this is ok but pathological expression could be handled # more efficiently as in TRmorrie args[a.base.func].extend([a.base.args[0]]*a.exp) else: args[None].append(a) c = args[cos] s = args[sin] if not (c and s or len(c) > 1 or len(s) > 1): return rv args = args[None] n = min(len(c), len(s)) for i in range(n): a1 = s.pop() a2 = c.pop() args.append((sin(a1 + a2) + sin(a1 - a2))/2) while len(c) > 1: a1 = c.pop() a2 = c.pop() args.append((cos(a1 + a2) + cos(a1 - a2))/2) if c: args.append(cos(c.pop())) while len(s) > 1: a1 = s.pop() a2 = s.pop() args.append((-cos(a1 + a2) + cos(a1 - a2))/2) if s: args.append(sin(s.pop())) return TR8(expand_mul(Mul(*args))) return bottom_up(rv, f) def TR9(rv): """Sum of ``cos`` or ``sin`` terms as a product of ``cos`` or ``sin``. Examples ======== >>> from sympy.simplify.fu import TR9 >>> from sympy import cos, sin >>> TR9(cos(1) + cos(2)) 2*cos(1/2)*cos(3/2) >>> TR9(cos(1) + 2*sin(1) + 2*sin(2)) cos(1) + 4*sin(3/2)*cos(1/2) If no change is made by TR9, no re-arrangement of the expression will be made. For example, though factoring of common term is attempted, if the factored expression wasn't changed, the original expression will be returned: >>> TR9(cos(3) + cos(3)*cos(2)) cos(3) + cos(2)*cos(3) """ def f(rv): if not rv.is_Add: return rv def do(rv, first=True): # cos(a)+/-cos(b) can be combined into a product of cosines and # sin(a)+/-sin(b) can be combined into a product of cosine and # sine. # # If there are more than two args, the pairs which "work" will # have a gcd extractable and the remaining two terms will have # the above structure -- all pairs must be checked to find the # ones that work. args that don't have a common set of symbols # are skipped since this doesn't lead to a simpler formula and # also has the arbitrariness of combining, for example, the x # and y term instead of the y and z term in something like # cos(x) + cos(y) + cos(z). if not rv.is_Add: return rv args = list(ordered(rv.args)) if len(args) != 2: hit = False for i in range(len(args)): ai = args[i] if ai is None: continue for j in range(i + 1, len(args)): aj = args[j] if aj is None: continue was = ai + aj new = do(was) if new != was: args[i] = new # update in place args[j] = None hit = True break # go to next i if hit: rv = Add(*[_f for _f in args if _f]) if rv.is_Add: rv = do(rv) return rv # two-arg Add split = trig_split(*args) if not split: return rv gcd, n1, n2, a, b, iscos = split # application of rule if possible if iscos: if n1 == n2: return gcd*n1*2*cos((a + b)/2)*cos((a - b)/2) if n1 < 0: a, b = b, a return -2*gcd*sin((a + b)/2)*sin((a - b)/2) else: if n1 == n2: return gcd*n1*2*sin((a + b)/2)*cos((a - b)/2) if n1 < 0: a, b = b, a return 2*gcd*cos((a + b)/2)*sin((a - b)/2) return process_common_addends(rv, do) # DON'T sift by free symbols return bottom_up(rv, f) def TR10(rv, first=True): """Separate sums in ``cos`` and ``sin``. Examples ======== >>> from sympy.simplify.fu import TR10 >>> from sympy.abc import a, b, c >>> from sympy import cos, sin >>> TR10(cos(a + b)) -sin(a)*sin(b) + cos(a)*cos(b) >>> TR10(sin(a + b)) sin(a)*cos(b) + sin(b)*cos(a) >>> TR10(sin(a + b + c)) (-sin(a)*sin(b) + cos(a)*cos(b))*sin(c) + \ (sin(a)*cos(b) + sin(b)*cos(a))*cos(c) """ def f(rv): if not rv.func in (cos, sin): return rv f = rv.func arg = rv.args[0] if arg.is_Add: if first: args = list(ordered(arg.args)) else: args = list(arg.args) a = args.pop() b = Add._from_args(args) if b.is_Add: if f == sin: return sin(a)*TR10(cos(b), first=False) + \ cos(a)*TR10(sin(b), first=False) else: return cos(a)*TR10(cos(b), first=False) - \ sin(a)*TR10(sin(b), first=False) else: if f == sin: return sin(a)*cos(b) + cos(a)*sin(b) else: return cos(a)*cos(b) - sin(a)*sin(b) return rv return bottom_up(rv, f) def TR10i(rv): """Sum of products to function of sum. Examples ======== >>> from sympy.simplify.fu import TR10i >>> from sympy import cos, sin, pi, Add, Mul, sqrt, Symbol >>> from sympy.abc import x, y >>> TR10i(cos(1)*cos(3) + sin(1)*sin(3)) cos(2) >>> TR10i(cos(1)*sin(3) + sin(1)*cos(3) + cos(3)) cos(3) + sin(4) >>> TR10i(sqrt(2)*cos(x)*x + sqrt(6)*sin(x)*x) 2*sqrt(2)*x*sin(x + pi/6) """ global _ROOT2, _ROOT3, _invROOT3 if _ROOT2 is None: _roots() def f(rv): if not rv.is_Add: return rv def do(rv, first=True): # args which can be expressed as A*(cos(a)*cos(b)+/-sin(a)*sin(b)) # or B*(cos(a)*sin(b)+/-cos(b)*sin(a)) can be combined into # A*f(a+/-b) where f is either sin or cos. # # If there are more than two args, the pairs which "work" will have # a gcd extractable and the remaining two terms will have the above # structure -- all pairs must be checked to find the ones that # work. if not rv.is_Add: return rv args = list(ordered(rv.args)) if len(args) != 2: hit = False for i in range(len(args)): ai = args[i] if ai is None: continue for j in range(i + 1, len(args)): aj = args[j] if aj is None: continue was = ai + aj new = do(was) if new != was: args[i] = new # update in place args[j] = None hit = True break # go to next i if hit: rv = Add(*[_f for _f in args if _f]) if rv.is_Add: rv = do(rv) return rv # two-arg Add split = trig_split(*args, two=True) if not split: return rv gcd, n1, n2, a, b, same = split # identify and get c1 to be cos then apply rule if possible if same: # coscos, sinsin gcd = n1*gcd if n1 == n2: return gcd*cos(a - b) return gcd*cos(a + b) else: #cossin, cossin gcd = n1*gcd if n1 == n2: return gcd*sin(a + b) return gcd*sin(b - a) rv = process_common_addends( rv, do, lambda x: tuple(ordered(x.free_symbols))) # need to check for inducible pairs in ratio of sqrt(3):1 that # appeared in different lists when sorting by coefficient while rv.is_Add: byrad = defaultdict(list) for a in rv.args: hit = 0 if a.is_Mul: for ai in a.args: if ai.is_Pow and ai.exp is S.Half and \ ai.base.is_Integer: byrad[ai].append(a) hit = 1 break if not hit: byrad[S.One].append(a) # no need to check all pairs -- just check for the onees # that have the right ratio args = [] for a in byrad: for b in [_ROOT3*a, _invROOT3]: if b in byrad: for i in range(len(byrad[a])): if byrad[a][i] is None: continue for j in range(len(byrad[b])): if byrad[b][j] is None: continue was = Add(byrad[a][i] + byrad[b][j]) new = do(was) if new != was: args.append(new) byrad[a][i] = None byrad[b][j] = None break if args: rv = Add(*(args + [Add(*[_f for _f in v if _f]) for v in byrad.values()])) else: rv = do(rv) # final pass to resolve any new inducible pairs break return rv return bottom_up(rv, f) def TR11(rv, base=None): """Function of double angle to product. The ``base`` argument can be used to indicate what is the un-doubled argument, e.g. if 3*pi/7 is the base then cosine and sine functions with argument 6*pi/7 will be replaced. Examples ======== >>> from sympy.simplify.fu import TR11 >>> from sympy import cos, sin, pi >>> from sympy.abc import x >>> TR11(sin(2*x)) 2*sin(x)*cos(x) >>> TR11(cos(2*x)) -sin(x)**2 + cos(x)**2 >>> TR11(sin(4*x)) 4*(-sin(x)**2 + cos(x)**2)*sin(x)*cos(x) >>> TR11(sin(4*x/3)) 4*(-sin(x/3)**2 + cos(x/3)**2)*sin(x/3)*cos(x/3) If the arguments are simply integers, no change is made unless a base is provided: >>> TR11(cos(2)) cos(2) >>> TR11(cos(4), 2) -sin(2)**2 + cos(2)**2 There is a subtle issue here in that autosimplification will convert some higher angles to lower angles >>> cos(6*pi/7) + cos(3*pi/7) -cos(pi/7) + cos(3*pi/7) The 6*pi/7 angle is now pi/7 but can be targeted with TR11 by supplying the 3*pi/7 base: >>> TR11(_, 3*pi/7) -sin(3*pi/7)**2 + cos(3*pi/7)**2 + cos(3*pi/7) """ def f(rv): if not rv.func in (cos, sin): return rv if base: f = rv.func t = f(base*2) co = S.One if t.is_Mul: co, t = t.as_coeff_Mul() if not t.func in (cos, sin): return rv if rv.args[0] == t.args[0]: c = cos(base) s = sin(base) if f is cos: return (c**2 - s**2)/co else: return 2*c*s/co return rv elif not rv.args[0].is_Number: # make a change if the leading coefficient's numerator is # divisible by 2 c, m = rv.args[0].as_coeff_Mul(rational=True) if c.p % 2 == 0: arg = c.p//2*m/c.q c = TR11(cos(arg)) s = TR11(sin(arg)) if rv.func == sin: rv = 2*s*c else: rv = c**2 - s**2 return rv return bottom_up(rv, f) def TR12(rv, first=True): """Separate sums in ``tan``. Examples ======== >>> from sympy.simplify.fu import TR12 >>> from sympy.abc import x, y >>> from sympy import tan >>> from sympy.simplify.fu import TR12 >>> TR12(tan(x + y)) (tan(x) + tan(y))/(-tan(x)*tan(y) + 1) """ def f(rv): if not rv.func == tan: return rv arg = rv.args[0] if arg.is_Add: if first: args = list(ordered(arg.args)) else: args = list(arg.args) a = args.pop() b = Add._from_args(args) if b.is_Add: tb = TR12(tan(b), first=False) else: tb = tan(b) return (tan(a) + tb)/(1 - tan(a)*tb) return rv return bottom_up(rv, f) def TR12i(rv): """Combine tan arguments as (tan(y) + tan(x))/(tan(x)*tan(y) - 1) -> -tan(x + y) Examples ======== >>> from sympy.simplify.fu import TR12i >>> from sympy import tan >>> from sympy.abc import a, b, c >>> ta, tb, tc = [tan(i) for i in (a, b, c)] >>> TR12i((ta + tb)/(-ta*tb + 1)) tan(a + b) >>> TR12i((ta + tb)/(ta*tb - 1)) -tan(a + b) >>> TR12i((-ta - tb)/(ta*tb - 1)) tan(a + b) >>> eq = (ta + tb)/(-ta*tb + 1)**2*(-3*ta - 3*tc)/(2*(ta*tc - 1)) >>> TR12i(eq.expand()) -3*tan(a + b)*tan(a + c)/(2*(tan(a) + tan(b) - 1)) """ from sympy import factor, fraction, factor_terms def f(rv): if not (rv.is_Add or rv.is_Mul or rv.is_Pow): return rv n, d = rv.as_numer_denom() if not d.args or not n.args: return rv dok = {} def ok(di): m = as_f_sign_1(di) if m: g, f, s = m if s is S.NegativeOne and f.is_Mul and len(f.args) == 2 and \ all(fi.func is tan for fi in f.args): return g, f d_args = list(Mul.make_args(d)) for i, di in enumerate(d_args): m = ok(di) if m: g, t = m s = Add(*[_.args[0] for _ in t.args]) dok[s] = S.One d_args[i] = g continue if di.is_Add: di = factor(di) if di.is_Mul: d_args.extend(di.args) d_args[i] = S.One elif di.is_Pow and (di.exp.is_integer or di.base.is_positive): m = ok(di.base) if m: g, t = m s = Add(*[_.args[0] for _ in t.args]) dok[s] = di.exp d_args[i] = g**di.exp else: di = factor(di) if di.is_Mul: d_args.extend(di.args) d_args[i] = S.One if not dok: return rv def ok(ni): if ni.is_Add and len(ni.args) == 2: a, b = ni.args if a.func is tan and b.func is tan: return a, b n_args = list(Mul.make_args(factor_terms(n))) hit = False for i, ni in enumerate(n_args): m = ok(ni) if not m: m = ok(-ni) if m: n_args[i] = S.NegativeOne else: if ni.is_Add: ni = factor(ni) if ni.is_Mul: n_args.extend(ni.args) n_args[i] = S.One continue elif ni.is_Pow and ( ni.exp.is_integer or ni.base.is_positive): m = ok(ni.base) if m: n_args[i] = S.One else: ni = factor(ni) if ni.is_Mul: n_args.extend(ni.args) n_args[i] = S.One continue else: continue else: n_args[i] = S.One hit = True s = Add(*[_.args[0] for _ in m]) ed = dok[s] newed = ed.extract_additively(S.One) if newed is not None: if newed: dok[s] = newed else: dok.pop(s) n_args[i] *= -tan(s) if hit: rv = Mul(*n_args)/Mul(*d_args)/Mul(*[(Add(*[ tan(a) for a in i.args]) - 1)**e for i, e in dok.items()]) return rv return bottom_up(rv, f) def TR13(rv): """Change products of ``tan`` or ``cot``. Examples ======== >>> from sympy.simplify.fu import TR13 >>> from sympy import tan, cot, cos >>> TR13(tan(3)*tan(2)) -tan(2)/tan(5) - tan(3)/tan(5) + 1 >>> TR13(cot(3)*cot(2)) cot(2)*cot(5) + 1 + cot(3)*cot(5) """ def f(rv): if not rv.is_Mul: return rv # XXX handle products of powers? or let power-reducing handle it? args = {tan: [], cot: [], None: []} for a in ordered(Mul.make_args(rv)): if a.func in (tan, cot): args[a.func].append(a.args[0]) else: args[None].append(a) t = args[tan] c = args[cot] if len(t) < 2 and len(c) < 2: return rv args = args[None] while len(t) > 1: t1 = t.pop() t2 = t.pop() args.append(1 - (tan(t1)/tan(t1 + t2) + tan(t2)/tan(t1 + t2))) if t: args.append(tan(t.pop())) while len(c) > 1: t1 = c.pop() t2 = c.pop() args.append(1 + cot(t1)*cot(t1 + t2) + cot(t2)*cot(t1 + t2)) if c: args.append(cot(c.pop())) return Mul(*args) return bottom_up(rv, f) def TRmorrie(rv): """Returns cos(x)*cos(2*x)*...*cos(2**(k-1)*x) -> sin(2**k*x)/(2**k*sin(x)) Examples ======== >>> from sympy.simplify.fu import TRmorrie, TR8, TR3 >>> from sympy.abc import x >>> from sympy import Mul, cos, pi >>> TRmorrie(cos(x)*cos(2*x)) sin(4*x)/(4*sin(x)) >>> TRmorrie(7*Mul(*[cos(x) for x in range(10)])) 7*sin(12)*sin(16)*cos(5)*cos(7)*cos(9)/(64*sin(1)*sin(3)) Sometimes autosimplification will cause a power to be not recognized. e.g. in the following, cos(4*pi/7) automatically simplifies to -cos(3*pi/7) so only 2 of the 3 terms are recognized: >>> TRmorrie(cos(pi/7)*cos(2*pi/7)*cos(4*pi/7)) -sin(3*pi/7)*cos(3*pi/7)/(4*sin(pi/7)) A touch by TR8 resolves the expression to a Rational >>> TR8(_) -1/8 In this case, if eq is unsimplified, the answer is obtained directly: >>> eq = cos(pi/9)*cos(2*pi/9)*cos(3*pi/9)*cos(4*pi/9) >>> TRmorrie(eq) 1/16 But if angles are made canonical with TR3 then the answer is not simplified without further work: >>> TR3(eq) sin(pi/18)*cos(pi/9)*cos(2*pi/9)/2 >>> TRmorrie(_) sin(pi/18)*sin(4*pi/9)/(8*sin(pi/9)) >>> TR8(_) cos(7*pi/18)/(16*sin(pi/9)) >>> TR3(_) 1/16 The original expression would have resolve to 1/16 directly with TR8, however: >>> TR8(eq) 1/16 References ========== http://en.wikipedia.org/wiki/Morrie%27s_law """ def f(rv): if not rv.is_Mul: return rv args = defaultdict(list) coss = {} other = [] for c in rv.args: b, e = c.as_base_exp() if e.is_Integer and b.func is cos: co, a = b.args[0].as_coeff_Mul() args[a].append(co) coss[b] = e else: other.append(c) new = [] for a in args: c = args[a] c.sort() no = [] while c: k = 0 cc = ci = c[0] while cc in c: k += 1 cc *= 2 if k > 1: newarg = sin(2**k*ci*a)/2**k/sin(ci*a) # see how many times this can be taken take = None ccs = [] for i in range(k): cc /= 2 key = cos(a*cc, evaluate=False) ccs.append(cc) take = min(coss[key], take or coss[key]) # update exponent counts for i in range(k): cc = ccs.pop() key = cos(a*cc, evaluate=False) coss[key] -= take if not coss[key]: c.remove(cc) new.append(newarg**take) else: no.append(c.pop(0)) c[:] = no if new: rv = Mul(*(new + other + [ cos(k*a, evaluate=False) for a in args for k in args[a]])) return rv return bottom_up(rv, f) def TR14(rv, first=True): """Convert factored powers of sin and cos identities into simpler expressions. Examples ======== >>> from sympy.simplify.fu import TR14 >>> from sympy.abc import x, y >>> from sympy import cos, sin >>> TR14((cos(x) - 1)*(cos(x) + 1)) -sin(x)**2 >>> TR14((sin(x) - 1)*(sin(x) + 1)) -cos(x)**2 >>> p1 = (cos(x) + 1)*(cos(x) - 1) >>> p2 = (cos(y) - 1)*2*(cos(y) + 1) >>> p3 = (3*(cos(y) - 1))*(3*(cos(y) + 1)) >>> TR14(p1*p2*p3*(x - 1)) -18*(x - 1)*sin(x)**2*sin(y)**4 """ def f(rv): if not rv.is_Mul: return rv if first: # sort them by location in numerator and denominator # so the code below can just deal with positive exponents n, d = rv.as_numer_denom() if d is not S.One: newn = TR14(n, first=False) newd = TR14(d, first=False) if newn != n or newd != d: rv = newn/newd return rv other = [] process = [] for a in rv.args: if a.is_Pow: b, e = a.as_base_exp() if not (e.is_integer or b.is_positive): other.append(a) continue a = b else: e = S.One m = as_f_sign_1(a) if not m or m[1].func not in (cos, sin): if e is S.One: other.append(a) else: other.append(a**e) continue g, f, si = m process.append((g, e.is_Number, e, f, si, a)) # sort them to get like terms next to each other process = list(ordered(process)) # keep track of whether there was any change nother = len(other) # access keys keys = (g, t, e, f, si, a) = list(range(6)) while process: A = process.pop(0) if process: B = process[0] if A[e].is_Number and B[e].is_Number: # both exponents are numbers if A[f] == B[f]: if A[si] != B[si]: B = process.pop(0) take = min(A[e], B[e]) # reinsert any remainder # the B will likely sort after A so check it first if B[e] != take: rem = [B[i] for i in keys] rem[e] -= take process.insert(0, rem) elif A[e] != take: rem = [A[i] for i in keys] rem[e] -= take process.insert(0, rem) if A[f].func is cos: t = sin else: t = cos other.append((-A[g]*B[g]*t(A[f].args[0])**2)**take) continue elif A[e] == B[e]: # both exponents are equal symbols if A[f] == B[f]: if A[si] != B[si]: B = process.pop(0) take = A[e] if A[f].func is cos: t = sin else: t = cos other.append((-A[g]*B[g]*t(A[f].args[0])**2)**take) continue # either we are done or neither condition above applied other.append(A[a]**A[e]) if len(other) != nother: rv = Mul(*other) return rv return bottom_up(rv, f) def TR15(rv, max=4, pow=False): """Convert sin(x)*-2 to 1 + cot(x)**2. See _TR56 docstring for advanced use of ``max`` and ``pow``. Examples ======== >>> from sympy.simplify.fu import TR15 >>> from sympy.abc import x >>> from sympy import cos, sin >>> TR15(1 - 1/sin(x)**2) -cot(x)**2 """ def f(rv): if not (isinstance(rv, Pow) and rv.base.func is sin): return rv ia = 1/rv a = _TR56(ia, sin, cot, lambda x: 1 + x, max=max, pow=pow) if a != ia: rv = a return rv return bottom_up(rv, f) def TR16(rv, max=4, pow=False): """Convert cos(x)*-2 to 1 + tan(x)**2. See _TR56 docstring for advanced use of ``max`` and ``pow``. Examples ======== >>> from sympy.simplify.fu import TR16 >>> from sympy.abc import x >>> from sympy import cos, sin >>> TR16(1 - 1/cos(x)**2) -tan(x)**2 """ def f(rv): if not (isinstance(rv, Pow) and rv.base.func is cos): return rv ia = 1/rv a = _TR56(ia, cos, tan, lambda x: 1 + x, max=max, pow=pow) if a != ia: rv = a return rv return bottom_up(rv, f) def TR111(rv): """Convert f(x)**-i to g(x)**i where either ``i`` is an integer or the base is positive and f, g are: tan, cot; sin, csc; or cos, sec. Examples ======== >>> from sympy.simplify.fu import TR111 >>> from sympy.abc import x >>> from sympy import tan >>> TR111(1 - 1/tan(x)**2) -cot(x)**2 + 1 """ def f(rv): if not ( isinstance(rv, Pow) and (rv.base.is_positive or rv.exp.is_integer and rv.exp.is_negative)): return rv if rv.base.func is tan: return cot(rv.base.args[0])**-rv.exp elif rv.base.func is sin: return csc(rv.base.args[0])**-rv.exp elif rv.base.func is cos: return sec(rv.base.args[0])**-rv.exp return rv return bottom_up(rv, f) def TR22(rv, max=4, pow=False): """Convert tan(x)**2 to sec(x)**2 - 1 and cot(x)**2 to csc(x)**2 - 1. See _TR56 docstring for advanced use of ``max`` and ``pow``. Examples ======== >>> from sympy.simplify.fu import TR22 >>> from sympy.abc import x >>> from sympy import tan, cot >>> TR22(1 + tan(x)**2) sec(x)**2 >>> TR22(1 + cot(x)**2) csc(x)**2 """ def f(rv): if not (isinstance(rv, Pow) and rv.base.func in (cot, tan)): return rv rv = _TR56(rv, tan, sec, lambda x: x - 1, max=max, pow=pow) rv = _TR56(rv, cot, csc, lambda x: x - 1, max=max, pow=pow) return rv return bottom_up(rv, f) def L(rv): """Return count of trigonometric functions in expression. Examples ======== >>> from sympy.simplify.fu import L >>> from sympy.abc import x >>> from sympy import cos, sin >>> L(cos(x)+sin(x)) 2 """ return S(rv.count(C.TrigonometricFunction)) # ============== end of basic Fu-like tools ===================== if SYMPY_DEBUG: (TR0, TR1, TR2, TR3, TR4, TR5, TR6, TR7, TR8, TR9, TR10, TR11, TR12, TR13, TR2i, TRmorrie, TR14, TR15, TR16, TR12i, TR111, TR22 )= list(map(debug, (TR0, TR1, TR2, TR3, TR4, TR5, TR6, TR7, TR8, TR9, TR10, TR11, TR12, TR13, TR2i, TRmorrie, TR14, TR15, TR16, TR12i, TR111, TR22))) # tuples are chains -- (f, g) -> lambda x: g(f(x)) # lists are choices -- [f, g] -> lambda x: min(f(x), g(x), key=objective) CTR1 = [(TR5, TR0), (TR6, TR0), identity] CTR2 = (TR11, [(TR5, TR0), (TR6, TR0), TR0]) CTR3 = [(TRmorrie, TR8, TR0), (TRmorrie, TR8, TR10i, TR0), identity] CTR4 = [(TR4, TR10i), identity] RL1 = (TR4, TR3, TR4, TR12, TR4, TR13, TR4, TR0) # XXX it's a little unclear how this one is to be implemented # see Fu paper of reference, page 7. What is the Union symbol refering to? # The diagram shows all these as one chain of transformations, but the # text refers to them being applied independently. Also, a break # if L starts to increase has not been implemented. RL2 = [ (TR4, TR3, TR10, TR4, TR3, TR11), (TR5, TR7, TR11, TR4), (CTR3, CTR1, TR9, CTR2, TR4, TR9, TR9, CTR4), identity, ] def fu(rv, measure=lambda x: (L(x), x.count_ops())): """Attempt to simplify expression by using transformation rules given in the algorithm by Fu et al. :func:`fu` will try to minimize the objective function ``measure``. By default this first minimizes the number of trig terms and then minimizes the number of total operations. Examples ======== >>> from sympy.simplify.fu import fu >>> from sympy import cos, sin, tan, pi, S, sqrt >>> from sympy.abc import x, y, a, b >>> fu(sin(50)**2 + cos(50)**2 + sin(pi/6)) 3/2 >>> fu(sqrt(6)*cos(x) + sqrt(2)*sin(x)) 2*sqrt(2)*sin(x + pi/3) CTR1 example >>> eq = sin(x)**4 - cos(y)**2 + sin(y)**2 + 2*cos(x)**2 >>> fu(eq) cos(x)**4 - 2*cos(y)**2 + 2 CTR2 example >>> fu(S.Half - cos(2*x)/2) sin(x)**2 CTR3 example >>> fu(sin(a)*(cos(b) - sin(b)) + cos(a)*(sin(b) + cos(b))) sqrt(2)*sin(a + b + pi/4) CTR4 example >>> fu(sqrt(3)*cos(x)/2 + sin(x)/2) sin(x + pi/3) Example 1 >>> fu(1-sin(2*x)**2/4-sin(y)**2-cos(x)**4) -cos(x)**2 + cos(y)**2 Example 2 >>> fu(cos(4*pi/9)) sin(pi/18) >>> fu(cos(pi/9)*cos(2*pi/9)*cos(3*pi/9)*cos(4*pi/9)) 1/16 Example 3 >>> fu(tan(7*pi/18)+tan(5*pi/18)-sqrt(3)*tan(5*pi/18)*tan(7*pi/18)) -sqrt(3) Objective function example >>> fu(sin(x)/cos(x)) # default objective function tan(x) >>> fu(sin(x)/cos(x), measure=lambda x: -x.count_ops()) # maximize op count sin(x)/cos(x) References ========== http://rfdz.ph-noe.ac.at/fileadmin/Mathematik_Uploads/ACDCA/ DESTIME2006/DES_contribs/Fu/simplification.pdf """ fRL1 = greedy(RL1, measure) fRL2 = greedy(RL2, measure) was = rv rv = sympify(rv) if not isinstance(rv, C.Expr): return rv.func(*[fu(a, measure=measure) for a in rv.args]) rv = TR1(rv) if rv.has(tan, cot): rv1 = fRL1(rv) if (measure(rv1) < measure(rv)): rv = rv1 if rv.has(tan, cot): rv = TR2(rv) if rv.has(sin, cos): rv1 = fRL2(rv) rv2 = TR8(TRmorrie(rv1)) rv = min([was, rv, rv1, rv2], key=measure) return min(TR2i(rv), rv, key=measure) def process_common_addends(rv, do, key2=None, key1=True): """Apply ``do`` to addends of ``rv`` that (if key1=True) share at least a common absolute value of their coefficient and the value of ``key2`` when applied to the argument. If ``key1`` is False ``key2`` must be supplied and will be the only key applied. """ # collect by absolute value of coefficient and key2 absc = defaultdict(list) if key1: for a in rv.args: c, a = a.as_coeff_Mul() if c < 0: c = -c a = -a # put the sign on `a` absc[(c, key2(a) if key2 else 1)].append(a) elif key2: for a in rv.args: absc[(S.One, key2(a))].append(a) else: raise ValueError('must have at least one key') args = [] hit = False for k in absc: v = absc[k] c, _ = k if len(v) > 1: e = Add(*v, evaluate=False) new = do(e) if new != e: e = new hit = True args.append(c*e) else: args.append(c*v[0]) if hit: rv = Add(*args) return rv fufuncs = ''' TR0 TR1 TR2 TR3 TR4 TR5 TR6 TR7 TR8 TR9 TR10 TR10i TR11 TR12 TR13 L TR2i TRmorrie TR12i TR14 TR15 TR16 TR111 TR22'''.split() FU = dict(list(zip(fufuncs, list(map(locals().get, fufuncs))))) def _roots(): global _ROOT2, _ROOT3, _invROOT3 _ROOT2, _ROOT3 = sqrt(2), sqrt(3) _invROOT3 = 1/_ROOT3 _ROOT2 = None def trig_split(a, b, two=False): """Return the gcd, s1, s2, a1, a2, bool where If two is False (default) then:: a + b = gcd*(s1*f(a1) + s2*f(a2)) where f = cos if bool else sin else: if bool, a + b was +/- cos(a1)*cos(a2) +/- sin(a1)*sin(a2) and equals n1*gcd*cos(a - b) if n1 == n2 else n1*gcd*cos(a + b) else a + b was +/- cos(a1)*sin(a2) +/- sin(a1)*cos(a2) and equals n1*gcd*sin(a + b) if n1 = n2 else n1*gcd*sin(b - a) Examples ======== >>> from sympy.simplify.fu import trig_split >>> from sympy.abc import x, y, z >>> from sympy import cos, sin, sqrt >>> trig_split(cos(x), cos(y)) (1, 1, 1, x, y, True) >>> trig_split(2*cos(x), -2*cos(y)) (2, 1, -1, x, y, True) >>> trig_split(cos(x)*sin(y), cos(y)*sin(y)) (sin(y), 1, 1, x, y, True) >>> trig_split(cos(x), -sqrt(3)*sin(x), two=True) (2, 1, -1, x, pi/6, False) >>> trig_split(cos(x), sin(x), two=True) (sqrt(2), 1, 1, x, pi/4, False) >>> trig_split(cos(x), -sin(x), two=True) (sqrt(2), 1, -1, x, pi/4, False) >>> trig_split(sqrt(2)*cos(x), -sqrt(6)*sin(x), two=True) (2*sqrt(2), 1, -1, x, pi/6, False) >>> trig_split(-sqrt(6)*cos(x), -sqrt(2)*sin(x), two=True) (-2*sqrt(2), 1, 1, x, pi/3, False) >>> trig_split(cos(x)/sqrt(6), sin(x)/sqrt(2), two=True) (sqrt(6)/3, 1, 1, x, pi/6, False) >>> trig_split(-sqrt(6)*cos(x)*sin(y), -sqrt(2)*sin(x)*sin(y), two=True) (-2*sqrt(2)*sin(y), 1, 1, x, pi/3, False) >>> trig_split(cos(x), sin(x)) >>> trig_split(cos(x), sin(z)) >>> trig_split(2*cos(x), -sin(x)) >>> trig_split(cos(x), -sqrt(3)*sin(x)) >>> trig_split(cos(x)*cos(y), sin(x)*sin(z)) >>> trig_split(cos(x)*cos(y), sin(x)*sin(y)) >>> trig_split(-sqrt(6)*cos(x), sqrt(2)*sin(x)*sin(y), two=True) """ global _ROOT2, _ROOT3, _invROOT3 if _ROOT2 is None: _roots() a, b = [Factors(i) for i in (a, b)] ua, ub = a.normal(b) gcd = a.gcd(b).as_expr() n1 = n2 = 1 if S.NegativeOne in ua.factors: ua = ua.quo(S.NegativeOne) n1 = -n1 elif S.NegativeOne in ub.factors: ub = ub.quo(S.NegativeOne) n2 = -n2 a, b = [i.as_expr() for i in (ua, ub)] def pow_cos_sin(a, two): """Return ``a`` as a tuple (r, c, s) such that ``a = (r or 1)*(c or 1)*(s or 1)``. Three arguments are returned (radical, c-factor, s-factor) as long as the conditions set by ``two`` are met; otherwise None is returned. If ``two`` is True there will be one or two non-None values in the tuple: c and s or c and r or s and r or s or c with c being a cosine function (if possible) else a sine, and s being a sine function (if possible) else oosine. If ``two`` is False then there will only be a c or s term in the tuple. ``two`` also require that either two cos and/or sin be present (with the condition that if the functions are the same the arguments are different or vice versa) or that a single cosine or a single sine be present with an optional radical. If the above conditions dictated by ``two`` are not met then None is returned. """ c = s = None co = S.One if a.is_Mul: co, a = a.as_coeff_Mul() if len(a.args) > 2 or not two: return None if a.is_Mul: args = list(a.args) else: args = [a] a = args.pop(0) if a.func is cos: c = a elif a.func is sin: s = a elif a.is_Pow and a.exp is S.Half: # autoeval doesn't allow -1/2 co *= a else: return None if args: b = args[0] if b.func is cos: if c: s = b else: c = b elif b.func is sin: if s: c = b else: s = b elif b.is_Pow and b.exp is S.Half: co *= b else: return None return co if co is not S.One else None, c, s elif a.func is cos: c = a elif a.func is sin: s = a if c is None and s is None: return co = co if co is not S.One else None return co, c, s # get the parts m = pow_cos_sin(a, two) if m is None: return coa, ca, sa = m m = pow_cos_sin(b, two) if m is None: return cob, cb, sb = m # check them if (not ca) and cb or ca and ca.func is sin: coa, ca, sa, cob, cb, sb = cob, cb, sb, coa, ca, sa n1, n2 = n2, n1 if not two: # need cos(x) and cos(y) or sin(x) and sin(y) c = ca or sa s = cb or sb if c.func is not s.func: return None return gcd, n1, n2, c.args[0], s.args[0], c.func is cos else: if not coa and not cob: if (ca and cb and sa and sb): if not ((ca.func is sa.func) is (cb.func is sb.func)): return args = set([j.args for j in (ca, sa)]) if not all(i.args in args for i in (cb, sb)): return return gcd, n1, n2, ca.args[0], sa.args[0], ca.func is sa.func if ca and sa or cb and sb or \ two and (ca is None and sa is None or cb is None and sb is None): return c = ca or sa s = cb or sb if c.args != s.args: return if not coa: coa = S.One if not cob: cob = S.One if coa is cob: gcd *= _ROOT2 return gcd, n1, n2, c.args[0], pi/4, False elif coa/cob == _ROOT3: gcd *= 2*cob return gcd, n1, n2, c.args[0], pi/3, False elif coa/cob == _invROOT3: gcd *= 2*coa return gcd, n1, n2, c.args[0], pi/6, False def as_f_sign_1(e): """If ``e`` is a sum that can be written as ``g*(a + s)`` where ``s`` is ``+/-1``, return ``g``, ``a``, and ``s`` where ``a`` does not have a leading negative coefficient. Examples ======== >>> from sympy.simplify.fu import as_f_sign_1 >>> from sympy.abc import x >>> as_f_sign_1(x + 1) (1, x, 1) >>> as_f_sign_1(x - 1) (1, x, -1) >>> as_f_sign_1(-x + 1) (-1, x, -1) >>> as_f_sign_1(-x - 1) (-1, x, 1) >>> as_f_sign_1(2*x + 2) (2, x, 1) """ if not e.is_Add or len(e.args) != 2: return # exact match a, b = e.args if a in (S.NegativeOne, S.One): g = S.One if b.is_Mul and b.args[0].is_Number and b.args[0] < 0: a, b = -a, -b g = -g return g, b, a # gcd match a, b = [Factors(i) for i in e.args] ua, ub = a.normal(b) gcd = a.gcd(b).as_expr() if S.NegativeOne in ua.factors: ua = ua.quo(S.NegativeOne) n1 = -1 n2 = 1 elif S.NegativeOne in ub.factors: ub = ub.quo(S.NegativeOne) n1 = 1 n2 = -1 else: n1 = n2 = 1 a, b = [i.as_expr() for i in (ua, ub)] if a is S.One: a, b = b, a n1, n2 = n2, n1 if n1 == -1: gcd = -gcd n2 = -n2 if b is S.One: return gcd, a, n2 def _osborne(e): """Replace all hyperbolic functions with trig functions using the Osborne rule. References ========== http://en.wikipedia.org/wiki/Hyperbolic_function """ def f(rv): if not isinstance(rv, C.HyperbolicFunction): return rv if rv.func is sinh: return I*sin(rv.args[0]) elif rv.func is cosh: return cos(rv.args[0]) elif rv.func is tanh: return I*tan(rv.args[0]) elif rv.func is coth: return cot(rv.args[0])/I else: raise NotImplementedError('unhandled %s' % rv.func) return bottom_up(e, f) def _osbornei(e): """Replace all trig functions with hyperbolic functions using the Osborne rule. References ========== http://en.wikipedia.org/wiki/Hyperbolic_function """ def f(rv): if not isinstance(rv, C.TrigonometricFunction): return rv if rv.func is sin: return sinh(rv.args[0])/I elif rv.func is cos: return cosh(rv.args[0]) elif rv.func is tan: return tanh(rv.args[0])/I elif rv.func is cot: return coth(rv.args[0])*I elif rv.func is sec: return 1/cosh(rv.args[0]) elif rv.func is csc: return I/sinh(rv.args[0]) else: raise NotImplementedError('unhandled %s' % rv.func) return bottom_up(e, f) def hyper_as_trig(rv): """Return an expression containing hyperbolic functions in terms of trigonometric functions. Any trigonometric functions initially present are replaced with Dummy symbols and the function to undo the masking and the conversion back to hyperbolics is also returned. It should always be true that:: t, f = hyper_as_trig(expr) expr == f(t) Examples ======== >>> from sympy.simplify.fu import hyper_as_trig, fu >>> from sympy.abc import x >>> from sympy import cosh, sinh >>> eq = sinh(x)**2 + cosh(x)**2 >>> t, f = hyper_as_trig(eq) >>> f(fu(t)) cosh(2*x) References ========== http://en.wikipedia.org/wiki/Hyperbolic_function """ from sympy.simplify.simplify import signsimp # mask of trig functions trigs = rv.atoms(C.TrigonometricFunction) reps = [(t, Dummy()) for t in trigs] masked = rv.xreplace(dict(reps)) # get inversion substitutions in place reps = [(v, k) for k, v in reps] return _osborne(masked), lambda x: signsimp( _osbornei(x).xreplace(dict(reps)))
alephu5/Soundbyte
environment/lib/python3.3/site-packages/sympy/simplify/fu.py
Python
gpl-3.0
63,469
0.000394
""" Graph isomorphism functions. """ import networkx as nx from networkx.exception import NetworkXError __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Christopher Ellison cellison@cse.ucdavis.edu)']) # Copyright (C) 2004-2019 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. __all__ = ['could_be_isomorphic', 'fast_could_be_isomorphic', 'faster_could_be_isomorphic', 'is_isomorphic'] def could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. True does NOT guarantee isomorphism. Parameters ---------- G1, G2 : graphs The two graphs G1 and G2 must be the same type. Notes ----- Checks for matching degree, triangle, and number of cliques sequences. """ # Check global properties if G1.order() != G2.order(): return False # Check local properties d1 = G1.degree() t1 = nx.triangles(G1) c1 = nx.number_of_cliques(G1) props1 = [[d, t1[v], c1[v]] for v, d in d1] props1.sort() d2 = G2.degree() t2 = nx.triangles(G2) c2 = nx.number_of_cliques(G2) props2 = [[d, t2[v], c2[v]] for v, d in d2] props2.sort() if props1 != props2: return False # OK... return True graph_could_be_isomorphic = could_be_isomorphic def fast_could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. True does NOT guarantee isomorphism. Parameters ---------- G1, G2 : graphs The two graphs G1 and G2 must be the same type. Notes ----- Checks for matching degree and triangle sequences. """ # Check global properties if G1.order() != G2.order(): return False # Check local properties d1 = G1.degree() t1 = nx.triangles(G1) props1 = [[d, t1[v]] for v, d in d1] props1.sort() d2 = G2.degree() t2 = nx.triangles(G2) props2 = [[d, t2[v]] for v, d in d2] props2.sort() if props1 != props2: return False # OK... return True fast_graph_could_be_isomorphic = fast_could_be_isomorphic def faster_could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. True does NOT guarantee isomorphism. Parameters ---------- G1, G2 : graphs The two graphs G1 and G2 must be the same type. Notes ----- Checks for matching degree sequences. """ # Check global properties if G1.order() != G2.order(): return False # Check local properties d1 = sorted(d for n, d in G1.degree()) d2 = sorted(d for n, d in G2.degree()) if d1 != d2: return False # OK... return True faster_graph_could_be_isomorphic = faster_could_be_isomorphic def is_isomorphic(G1, G2, node_match=None, edge_match=None): """Returns True if the graphs G1 and G2 are isomorphic and False otherwise. Parameters ---------- G1, G2: graphs The two graphs G1 and G2 must be the same type. node_match : callable A function that returns True if node n1 in G1 and n2 in G2 should be considered equal during the isomorphism test. If node_match is not specified then node attributes are not considered. The function will be called like node_match(G1.nodes[n1], G2.nodes[n2]). That is, the function will receive the node attribute dictionaries for n1 and n2 as inputs. edge_match : callable A function that returns True if the edge attribute dictionary for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be considered equal during the isomorphism test. If edge_match is not specified then edge attributes are not considered. The function will be called like edge_match(G1[u1][v1], G2[u2][v2]). That is, the function will receive the edge attribute dictionaries of the edges under consideration. Notes ----- Uses the vf2 algorithm [1]_. Examples -------- >>> import networkx.algorithms.isomorphism as iso For digraphs G1 and G2, using 'weight' edge attribute (default: 1) >>> G1 = nx.DiGraph() >>> G2 = nx.DiGraph() >>> nx.add_path(G1, [1,2,3,4], weight=1) >>> nx.add_path(G2, [10,20,30,40], weight=2) >>> em = iso.numerical_edge_match('weight', 1) >>> nx.is_isomorphic(G1, G2) # no weights considered True >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights False For multidigraphs G1 and G2, using 'fill' node attribute (default: '') >>> G1 = nx.MultiDiGraph() >>> G2 = nx.MultiDiGraph() >>> G1.add_nodes_from([1,2,3], fill='red') >>> G2.add_nodes_from([10,20,30,40], fill='red') >>> nx.add_path(G1, [1,2,3,4], weight=3, linewidth=2.5) >>> nx.add_path(G2, [10,20,30,40], weight=3) >>> nm = iso.categorical_node_match('fill', 'red') >>> nx.is_isomorphic(G1, G2, node_match=nm) True For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7) >>> G1.add_edge(1,2, weight=7) 1 >>> G2.add_edge(10,20) 1 >>> em = iso.numerical_multiedge_match('weight', 7, rtol=1e-6) >>> nx.is_isomorphic(G1, G2, edge_match=em) True For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes with default values 7 and 2.5. Also using 'fill' node attribute with default value 'red'. >>> em = iso.numerical_multiedge_match(['weight', 'linewidth'], [7, 2.5]) >>> nm = iso.categorical_node_match('fill', 'red') >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm) True See Also -------- numerical_node_match, numerical_edge_match, numerical_multiedge_match categorical_node_match, categorical_edge_match, categorical_multiedge_match References ---------- .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop on Graph-based Representations in Pattern Recognition, Cuen, pp. 149-159, 2001. http://amalfi.dis.unina.it/graph/db/papers/vf-algorithm.pdf """ if G1.is_directed() and G2.is_directed(): GM = nx.algorithms.isomorphism.DiGraphMatcher elif (not G1.is_directed()) and (not G2.is_directed()): GM = nx.algorithms.isomorphism.GraphMatcher else: raise NetworkXError("Graphs G1 and G2 are not of the same type.") gm = GM(G1, G2, node_match=node_match, edge_match=edge_match) return gm.is_isomorphic()
sserrot/champion_relationships
venv/Lib/site-packages/networkx/algorithms/isomorphism/isomorph.py
Python
mit
6,757
0
#!/usr/bin/env python # # Use the raw transactions API to spend dass received on particular addresses, # and send any change back to that same address. # # Example usage: # spendfrom.py # Lists available funds # spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00 # # Assumes it will talk to a dasd or Das-Qt running # on localhost. # # Depends on jsonrpc # from decimal import * import getpass import math import os import os.path import platform import sys import time from jsonrpc import ServiceProxy, json BASE_FEE=Decimal("0.001") def check_json_precision(): """Make sure json library being used does not lose precision converting BTC values""" n = Decimal("20000000.00000003") satoshis = int(json.loads(json.dumps(float(n)))*1.0e8) if satoshis != 2000000000000003: raise RuntimeError("JSON encode/decode loses precision") def determine_db_dir(): """Return the default location of the das data directory""" if platform.system() == "Darwin": return os.path.expanduser("~/Library/Application Support/Das/") elif platform.system() == "Windows": return os.path.join(os.environ['APPDATA'], "Das") return os.path.expanduser("~/.das") def read_bitcoin_config(dbdir): """Read the das.conf file from dbdir, returns dictionary of settings""" from ConfigParser import SafeConfigParser class FakeSecHead(object): def __init__(self, fp): self.fp = fp self.sechead = '[all]\n' def readline(self): if self.sechead: try: return self.sechead finally: self.sechead = None else: s = self.fp.readline() if s.find('#') != -1: s = s[0:s.find('#')].strip() +"\n" return s config_parser = SafeConfigParser() config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "das.conf")))) return dict(config_parser.items("all")) def connect_JSON(config): """Connect to a das JSON-RPC server""" testnet = config.get('testnet', '0') testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False if not 'rpcport' in config: config['rpcport'] = 19998 if testnet else 9998 connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport']) try: result = ServiceProxy(connect) # ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors, # but also make sure the dasd we're talking to is/isn't testnet: if result.getmininginfo()['testnet'] != testnet: sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n") sys.exit(1) return result except: sys.stderr.write("Error connecting to RPC server at "+connect+"\n") sys.exit(1) def unlock_wallet(dasd): info = dasd.getinfo() if 'unlocked_until' not in info: return True # wallet is not encrypted t = int(info['unlocked_until']) if t <= time.time(): try: passphrase = getpass.getpass("Wallet is locked; enter passphrase: ") dasd.walletpassphrase(passphrase, 5) except: sys.stderr.write("Wrong passphrase\n") info = dasd.getinfo() return int(info['unlocked_until']) > time.time() def list_available(dasd): address_summary = dict() address_to_account = dict() for info in dasd.listreceivedbyaddress(0): address_to_account[info["address"]] = info["account"] unspent = dasd.listunspent(0) for output in unspent: # listunspent doesn't give addresses, so: rawtx = dasd.getrawtransaction(output['txid'], 1) vout = rawtx["vout"][output['vout']] pk = vout["scriptPubKey"] # This code only deals with ordinary pay-to-das-address # or pay-to-script-hash outputs right now; anything exotic is ignored. if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash": continue address = pk["addresses"][0] if address in address_summary: address_summary[address]["total"] += vout["value"] address_summary[address]["outputs"].append(output) else: address_summary[address] = { "total" : vout["value"], "outputs" : [output], "account" : address_to_account.get(address, "") } return address_summary def select_coins(needed, inputs): # Feel free to improve this, this is good enough for my simple needs: outputs = [] have = Decimal("0.0") n = 0 while have < needed and n < len(inputs): outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]}) have += inputs[n]["amount"] n += 1 return (outputs, have-needed) def create_tx(dasd, fromaddresses, toaddress, amount, fee): all_coins = list_available(dasd) total_available = Decimal("0.0") needed = amount+fee potential_inputs = [] for addr in fromaddresses: if addr not in all_coins: continue potential_inputs.extend(all_coins[addr]["outputs"]) total_available += all_coins[addr]["total"] if total_available < needed: sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed)); sys.exit(1) # # Note: # Python's json/jsonrpc modules have inconsistent support for Decimal numbers. # Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode # Decimals, I'm casting amounts to float before sending them to dasd. # outputs = { toaddress : float(amount) } (inputs, change_amount) = select_coins(needed, potential_inputs) if change_amount > BASE_FEE: # don't bother with zero or tiny change change_address = fromaddresses[-1] if change_address in outputs: outputs[change_address] += float(change_amount) else: outputs[change_address] = float(change_amount) rawtx = dasd.createrawtransaction(inputs, outputs) signed_rawtx = dasd.signrawtransaction(rawtx) if not signed_rawtx["complete"]: sys.stderr.write("signrawtransaction failed\n") sys.exit(1) txdata = signed_rawtx["hex"] return txdata def compute_amount_in(dasd, txinfo): result = Decimal("0.0") for vin in txinfo['vin']: in_info = dasd.getrawtransaction(vin['txid'], 1) vout = in_info['vout'][vin['vout']] result = result + vout['value'] return result def compute_amount_out(txinfo): result = Decimal("0.0") for vout in txinfo['vout']: result = result + vout['value'] return result def sanity_test_fee(dasd, txdata_hex, max_fee): class FeeError(RuntimeError): pass try: txinfo = dasd.decoderawtransaction(txdata_hex) total_in = compute_amount_in(dasd, txinfo) total_out = compute_amount_out(txinfo) if total_in-total_out > max_fee: raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out)) tx_size = len(txdata_hex)/2 kb = tx_size/1000 # integer division rounds down if kb > 1 and fee < BASE_FEE: raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes") if total_in < 0.01 and fee < BASE_FEE: raise FeeError("Rejecting no-fee, tiny-amount transaction") # Exercise for the reader: compute transaction priority, and # warn if this is a very-low-priority transaction except FeeError as err: sys.stderr.write((str(err)+"\n")) sys.exit(1) def main(): import optparse parser = optparse.OptionParser(usage="%prog [options]") parser.add_option("--from", dest="fromaddresses", default=None, help="addresses to get dass from") parser.add_option("--to", dest="to", default=None, help="address to get send dass to") parser.add_option("--amount", dest="amount", default=None, help="amount to send") parser.add_option("--fee", dest="fee", default="0.0", help="fee to include") parser.add_option("--datadir", dest="datadir", default=determine_db_dir(), help="location of das.conf file with RPC username/password (default: %default)") parser.add_option("--testnet", dest="testnet", default=False, action="store_true", help="Use the test network") parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true", help="Don't broadcast the transaction, just create and print the transaction data") (options, args) = parser.parse_args() check_json_precision() config = read_bitcoin_config(options.datadir) if options.testnet: config['testnet'] = True dasd = connect_JSON(config) if options.amount is None: address_summary = list_available(dasd) for address,info in address_summary.iteritems(): n_transactions = len(info['outputs']) if n_transactions > 1: print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions)) else: print("%s %.8f %s"%(address, info['total'], info['account'])) else: fee = Decimal(options.fee) amount = Decimal(options.amount) while unlock_wallet(dasd) == False: pass # Keep asking for passphrase until they get it right txdata = create_tx(dasd, options.fromaddresses.split(","), options.to, amount, fee) sanity_test_fee(dasd, txdata, amount*Decimal("0.01")) if options.dry_run: print(txdata) else: txid = dasd.sendrawtransaction(txdata) print(txid) if __name__ == '__main__': main()
mainconceptx/DAS
contrib/spendfrom/spendfrom.py
Python
mit
9,887
0.005664
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*- # ex: set expandtab softtabstop=4 shiftwidth=4: # # Copyright (C) 2008-2015,2018 Contributor # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from sqlalchemy.orm.attributes import set_committed_value from aquilon.worker.broker import BrokerCommand from aquilon.worker.dbwrappers.resources import get_resource_holder class CommandShowResource(BrokerCommand): resource_class = None resource_name = None def render(self, session, logger, hostname, cluster, metacluster, all, personality=None, archetype=None, grn=None, eon_id=None, host_environment=None, **kwargs): # resourcegroup is special, because it's both a holder and a resource # itself if self.resource_name != "resourcegroup": resourcegroup = kwargs.pop("resourcegroup", None) else: resourcegroup = None q = session.query(self.resource_class) who = None if not all: if self.resource_name: name = kwargs.get(self.resource_name) else: name = self.resource_class.__mapper__.polymorphic_identity if name: q = q.filter_by(name=name) if hostname or cluster or resourcegroup or personality or \ archetype or grn or eon_id: who = get_resource_holder(session, logger, hostname, cluster, metacluster, resourcegroup, personality, archetype, grn, eon_id, host_environment, config=self.config, **kwargs) q = q.filter_by(holder=who) results = q.all() if who: for dbresource in results: set_committed_value(dbresource, 'holder', who) return results
quattor/aquilon
lib/aquilon/worker/commands/show_resource.py
Python
apache-2.0
2,445
0
import subprocess import setup_util import os def start(args, logfile, errfile): setup_util.replace_text("hhvm/once.php.inc", "host=localhost;", "host=" + args.database_host + ";") setup_util.replace_text("hhvm/deploy/config.hdf", "SourceRoot = .*\/FrameworkBenchmarks/hhvm", "SourceRoot = " + args.troot) setup_util.replace_text("hhvm/deploy/config.hdf", "Path = .*\/.hhvm.hhbc", "Path = " + args.troot + "/.hhvm.bbhc") setup_util.replace_text("hhvm/deploy/config.hdf", "PidFile = .*\/hhvm.pid", "PidFile = " + args.troot + "/hhvm.pid") setup_util.replace_text("hhvm/deploy/config.hdf", "File = .*\/error.log", "File = " + args.troot + "/error.log") try: if os.name == 'nt': # Not supported ! return 0 subprocess.check_call("hhvm --config $TROOT/deploy/config.hdf -m daemon", shell=True, stderr=errfile, stdout=logfile) return 0 except subprocess.CalledProcessError: return 1 def stop(logfile, errfile): try: if os.name == 'nt': # Not Supported ! return 0 p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.splitlines(): if 'hhvm' in line and 'toolset' not in line and 'run-ci' not in line and 'run-tests' not in line: pid = int(line.split(None,2)[1]) os.kill(pid,15) return 0 except subprocess.CalledProcessError: return 1
denkab/FrameworkBenchmarks
frameworks/PHP/hhvm/setup.py
Python
bsd-3-clause
1,384
0.020954
__author__ = 'Sergey Tomin' from ocelot.rad import * from ocelot import * from ocelot.gui import * import numpy as np import time font = {'size' : 14} matplotlib.rc('font', **font) #from scipy.optimize import curve_fit from ocelot.demos.sr.k_analysis import * #from ocelot.lib.genera.src.python.radiation import generaSR font = {'size' : 14} matplotlib.rc('font', **font) beam = Beam() beam.E = 17.5 beam.I = 0.1 und = Undulator(Kx = 4., nperiods = 125, lperiod=0.04, eid= "und") lat = MagneticLattice((und)) screen = Screen() screen.z = 500.0 screen.size_x = 0. screen.size_y = 0. screen.nx = 1 screen.ny = 1 screen.start_energy = 7950 #eV screen.end_energy = 8200 #eV screen.num_energy = 1000 screen = calculate_radiation(lat, screen, beam) show_flux(screen, unit="mrad") # K-mono scan beam_energy = 17.5 # GeV b_energy_jit = 1e-4 # dE/E screen = Screen() screen.z = 500.0 screen.size_x = 0.01 screen.size_y = 0.01 screen.nx = 51 screen.ny = 51 ds = screen.size_x/screen.nx*screen.size_y/screen.ny n_scan_points = 30 n_shots = 5 scan_Kmono_energy = np.linspace(start=8000, stop=8150, num=n_scan_points) start = time.time() flux = [] Etotal = [] for n, eph in enumerate(scan_Kmono_energy): print(n, "/", n_scan_points) for i in range(n_shots): beam.E = np.random.normal(beam_energy, beam_energy*b_energy_jit, 1) print("beam energy: ", beam.E) screen.start_energy = eph # 8078.2 - 50 + i*100/30. #eV screen.num_energy = 1 screen = calculate_radiation(lat, screen, beam) flux.append(sum(screen.Total)*ds) Etotal.append(eph) print("time cpp = ", start - time.time()) e_fin, polynom = data_analysis(Etotal, flux=flux, method="least") print("Eph_fin = ", e_fin) x = np.linspace(Etotal[0], Etotal[-1], num=100) plt.plot(Etotal, flux, "r.", lw =2, label="exp data") plt.plot(x, polynom(x), "b", label="fit func") plt.plot(e_fin, polynom(e_fin), "go", lw = 3, label=r"$E_{ph}=$" + str(np.around(e_fin, decimals=2))) plt.xlabel(r"$E_{ph}$, eV") plt.grid(True) plt.legend() plt.show()
sserkez/ocelot
demos/sr/k_diode.py
Python
gpl-3.0
2,071
0.011106
""" File: tree.py Author: Inokentiy Babushkin Email: inokentiy.babushkin@googlemail.com Github: ibabushkin Description: A tree used to store DFS-trees for the CFG-module. """ class Tree(object): """ The tree mentioned in the module-docstring. Probably a Gingko. """ def __init__(self, obj): self.nodes = [TreeNode(obj)] self.edges = [] self.current_node = 0 def append(self, obj): """ Append an object as a node to the tree. Use the current node as the place for insertion. """ self.nodes.append(TreeNode(obj)) self.edges.append((self.current_node, len(self.nodes) - 1)) self.current_node = len(self.nodes) - 1 # print self.current_node def get_children_of(self, index): """ Get a node's children. """ ret = [] for edge in self.edges: if edge[0] == index: ret.append(edge[1]) return ret def postorder(self, start=0): """ Get the postorder traversal of the tree. """ ret = [self.get_content(start).id] for node in self.get_children_of(start): ret = self.postorder(node) + ret return ret def get_content(self, index): """ Get the object saved in a node. """ return self.nodes[index].content class TreeNode(object): """ A node for the tree. Can save an object. """ def __init__(self, obj): self.content = obj
ibabushkin/Iridium
defines/util/tree.py
Python
gpl-3.0
1,538
0
from Centroid import Centroid from Vec2 import Vec2 from random import random from math import * from angle import angle from Seed import * Seed() class Swarm(list): def __init__(self, count): self.speed= 1.0/16.0 self.paused= False def __new__(cls, count): swarm= list.__new__(cls) for n in range(count): x= random()-random() y= random()-random() c= Centroid(x,y) c.inertia= Vec2(0,0) swarm.append(c) return swarm def repel(self, step): for i in range(1,len(self)): for j in range(i): if self[i] in self[j].neighbors: assert self[j] in self[i].neighbors a=angle(self[j],self[i]) dx,dy = self[i]-self[j] dist= sqrt(dx*dx+dy*dy) push= 1.0/dist a+=1.5707*push push= sin(a)*push*step,cos(a)*push*step self[i].inertia+= push self[j].inertia-= push def move(self, step): if self.paused: return self.repel(step) step*= self.speed for c in self: c+= c.inertia*step if abs(c.x)>=1: c.inertia.x*=-1 c.x+=c.inertia.x*2*step if abs(c.y)>=1: c.inertia.y*=-1 c.y+=c.inertia.y*2*step c.clear()
bobbysoon/Taxi3
Swarm.py
Python
unlicense
1,110
0.062162
#!/usr/bin/env python import os from setuptools import setup, find_packages CURRENT_DIR = os.path.dirname(__file__) setup(name='datapot', description='Library for automatic feature extraction from JSON-datasets', long_description=open(os.path.join(CURRENT_DIR, 'README.rst')).read(), version='0.1.3', url='https://github.com/bashalex/datapot', author='Alex Bash, Yuriy Mokriy, Nikita Saveyev, Michal Rozenwald, Peter Romov', author_email='avbashlykov@gmail.com, yurymokriy@gmail.com, n.a.savelyev@gmail.com, michal.rozenwald@gmail.com, romovpa@gmail.com', license='GNU v3.0', maintainer='Nikita Savelyev', maintainer_email='n.a.savelyev@gmail.com', install_requires=[ 'numpy >= 1.6.1', 'scipy >= 0.17.0', 'pandas >= 0.17.1', 'scikit-learn >= 0.17.1', 'iso-639 >= 0.4.5', 'langdetect >= 1.0.7', 'gensim >= 2.1.0', 'nltk >= 3.2.4', 'tsfresh >= 0.7.1', 'python-dateutil >= 2.6.0', 'fastnumbers >= 2.0.1', 'pystemmer >= 1.3.0', ], classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 2', 'Intended Audience :: Science/Research', 'Intended Audience :: Developers', 'Topic :: Scientific/Engineering', 'Topic :: Software Development', ], packages=find_packages())
bashalex/datapot
setup.py
Python
gpl-3.0
1,582
0.001896
import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 5, transform = "Quantization", sigma = 0.0, exog_count = 100, ar_order = 12);
antoinecarme/pyaf
tests/artificial/transf_Quantization/trend_PolyTrend/cycle_5/ar_12/test_artificial_1024_Quantization_PolyTrend_5_12_100.py
Python
bsd-3-clause
270
0.085185
# -*- coding: utf-8 -*- # Copyright 2018 GIG Technology NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # @@license_version:1.4@@ from mcfw.consts import DEBUG from plugins.rogerthat_api.api import system from plugins.tff_backend.bizz import get_tf_token_api_key from plugins.tff_backend.bizz.global_stats import _get_currency_conversions from plugins.tff_backend.models.global_stats import GlobalStats from plugins.tff_backend.plugin_consts import BUY_TOKENS_TAG, BUY_TOKENS_FLOW_V5 def migrate(): for stats_model in GlobalStats.query(): # type: GlobalStats new_value = stats_model.value / 100 currencies = _get_currency_conversions(stats_model.currencies, new_value) stats_model.populate(currencies=currencies, value=new_value) stats_model.put() coords = [2, 1, 0] icon_name = 'fa-suitcase' label = 'Purchase iTokens' flow = BUY_TOKENS_FLOW_V5 api_key = get_tf_token_api_key() roles = system.list_roles(api_key) menu_item_roles = [] for role in roles: if role.name in ('invited', 'members'): menu_item_roles.append(role.id) system.put_menu_item(api_key, icon_name, BUY_TOKENS_TAG, coords, None, label, static_flow=flow, roles=[] if DEBUG else menu_item_roles, fall_through=True) system.publish_changes(api_key)
threefoldfoundation/app_backend
plugins/tff_backend/migrations/_009_change_token_value.py
Python
bsd-3-clause
1,840
0.002174
#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: USRP Feng Yun 1 HRPT Receiver # Author: POES Weather Ltd # Description: Feng Yun 1 HRPT Receiver # Generated: Fri Jan 7 15:21:35 2011 ################################################## from gnuradio import eng_notation from gnuradio import gr from gnuradio import noaa from gnuradio import window from gnuradio.eng_option import eng_option from gnuradio.gr import firdes from gnuradio.wxgui import fftsink2 from gnuradio.wxgui import forms from gnuradio.wxgui import numbersink2 from grc_gnuradio import usrp as grc_usrp from grc_gnuradio import wxgui as grc_wxgui from optparse import OptionParser from time import strftime, localtime import ConfigParser import math, os import poesweather import wx class usrp_rx_fy1_bb_hrpt(grc_wxgui.top_block_gui): def __init__(self, side="A", gain=35, sync_check=False, decim=16, satellite='FENGYUN-1D', frames_file=os.environ['HOME'] + '/FENGYUN-1D.hrpt', baseband_file=os.environ['HOME'] + '/FENGYUN-1D.dat', freq=1700.5e6): grc_wxgui.top_block_gui.__init__(self, title="USRP Feng Yun 1 HRPT Receiver") _icon_path = "/usr/share/icons/hicolor/32x32/apps/gnuradio-grc.png" self.SetIcon(wx.Icon(_icon_path, wx.BITMAP_TYPE_ANY)) ################################################## # Parameters ################################################## self.side = side self.gain = gain self.sync_check = sync_check self.decim = decim self.satellite = satellite self.frames_file = frames_file self.baseband_file = baseband_file self.freq = freq ################################################## # Variables ################################################## self.sym_rate = sym_rate = 600*1109*2 self.samp_rate = samp_rate = 64e6/decim self.config_filename = config_filename = os.environ['HOME']+'/.gnuradio/fy1_hrpt.conf' self.sps = sps = samp_rate/sym_rate self._saved_pll_alpha_config = ConfigParser.ConfigParser() self._saved_pll_alpha_config.read(config_filename) try: saved_pll_alpha = self._saved_pll_alpha_config.getfloat("satname", 'pll_alpha') except: saved_pll_alpha = 0.005 self.saved_pll_alpha = saved_pll_alpha self._saved_clock_alpha_config = ConfigParser.ConfigParser() self._saved_clock_alpha_config.read(config_filename) try: saved_clock_alpha = self._saved_clock_alpha_config.getfloat("satname", 'clock_alpha') except: saved_clock_alpha = 0.001 self.saved_clock_alpha = saved_clock_alpha self.sync_check_txt = sync_check_txt = sync_check self.side_text = side_text = side self._saved_gain_config = ConfigParser.ConfigParser() self._saved_gain_config.read(config_filename) try: saved_gain = self._saved_gain_config.getfloat("satname", 'gain') except: saved_gain = gain self.saved_gain = saved_gain self.satellite_text = satellite_text = satellite self.sample_rate_text = sample_rate_text = samp_rate self.pll_alpha = pll_alpha = saved_pll_alpha self.max_clock_offset = max_clock_offset = 0.1 self.max_carrier_offset = max_carrier_offset = 2*math.pi*100e3/samp_rate self.hs = hs = int(sps/2.0) self.gain_slider = gain_slider = gain self.freq_tb = freq_tb = freq self.frames_outfile_text = frames_outfile_text = frames_file self.decim_tb = decim_tb = decim self.datetime_text = datetime_text = strftime("%A, %B %d %Y %H:%M:%S", localtime()) self.clock_alpha = clock_alpha = saved_clock_alpha self.baseband_outfile_text = baseband_outfile_text = baseband_file ################################################## # Notebooks ################################################## self.displays = wx.Notebook(self.GetWin(), style=wx.NB_TOP) self.displays.AddPage(grc_wxgui.Panel(self.displays), "RX Feng Yun 1 HRPT") self.displays.AddPage(grc_wxgui.Panel(self.displays), "Information") self.Add(self.displays) ################################################## # Controls ################################################## self._sync_check_txt_static_text = forms.static_text( parent=self.GetWin(), value=self.sync_check_txt, callback=self.set_sync_check_txt, label="Sync check", converter=forms.float_converter(), ) self.GridAdd(self._sync_check_txt_static_text, 0, 2, 1, 1) self._side_text_static_text = forms.static_text( parent=self.GetWin(), value=self.side_text, callback=self.set_side_text, label="USRP Side", converter=forms.str_converter(), ) self.GridAdd(self._side_text_static_text, 0, 0, 1, 1) self._satellite_text_static_text = forms.static_text( parent=self.GetWin(), value=self.satellite_text, callback=self.set_satellite_text, label="Satellite", converter=forms.str_converter(), ) self.GridAdd(self._satellite_text_static_text, 0, 1, 1, 1) self._sample_rate_text_static_text = forms.static_text( parent=self.displays.GetPage(1).GetWin(), value=self.sample_rate_text, callback=self.set_sample_rate_text, label="Sample rate", converter=forms.float_converter(), ) self.displays.GetPage(1).GridAdd(self._sample_rate_text_static_text, 3, 0, 1, 1) _pll_alpha_sizer = wx.BoxSizer(wx.VERTICAL) self._pll_alpha_text_box = forms.text_box( parent=self.GetWin(), sizer=_pll_alpha_sizer, value=self.pll_alpha, callback=self.set_pll_alpha, label="PLL Alpha", converter=forms.float_converter(), proportion=0, ) self._pll_alpha_slider = forms.slider( parent=self.GetWin(), sizer=_pll_alpha_sizer, value=self.pll_alpha, callback=self.set_pll_alpha, minimum=0.005, maximum=0.5, num_steps=100, style=wx.SL_HORIZONTAL, cast=float, proportion=1, ) self.GridAdd(_pll_alpha_sizer, 2, 1, 1, 1) _gain_slider_sizer = wx.BoxSizer(wx.VERTICAL) self._gain_slider_text_box = forms.text_box( parent=self.GetWin(), sizer=_gain_slider_sizer, value=self.gain_slider, callback=self.set_gain_slider, label="Gain", converter=forms.int_converter(), proportion=0, ) self._gain_slider_slider = forms.slider( parent=self.GetWin(), sizer=_gain_slider_sizer, value=self.gain_slider, callback=self.set_gain_slider, minimum=0, maximum=100, num_steps=100, style=wx.SL_HORIZONTAL, cast=int, proportion=1, ) self.GridAdd(_gain_slider_sizer, 2, 0, 1, 1) self._freq_tb_text_box = forms.text_box( parent=self.GetWin(), value=self.freq_tb, callback=self.set_freq_tb, label="Frequency", converter=forms.float_converter(), ) self.GridAdd(self._freq_tb_text_box, 1, 1, 1, 1) self._frames_outfile_text_static_text = forms.static_text( parent=self.displays.GetPage(1).GetWin(), value=self.frames_outfile_text, callback=self.set_frames_outfile_text, label="Frames filename", converter=forms.str_converter(), ) self.displays.GetPage(1).GridAdd(self._frames_outfile_text_static_text, 5, 0, 1, 1) self._decim_tb_text_box = forms.text_box( parent=self.GetWin(), value=self.decim_tb, callback=self.set_decim_tb, label="Decimation", converter=forms.int_converter(), ) self.GridAdd(self._decim_tb_text_box, 1, 0, 1, 1) self._datetime_text_static_text = forms.static_text( parent=self.displays.GetPage(1).GetWin(), value=self.datetime_text, callback=self.set_datetime_text, label="Acquisition start", converter=forms.str_converter(), ) self.displays.GetPage(1).GridAdd(self._datetime_text_static_text, 2, 0, 1, 1) _clock_alpha_sizer = wx.BoxSizer(wx.VERTICAL) self._clock_alpha_text_box = forms.text_box( parent=self.GetWin(), sizer=_clock_alpha_sizer, value=self.clock_alpha, callback=self.set_clock_alpha, label="Clock alpha", converter=forms.float_converter(), proportion=0, ) self._clock_alpha_slider = forms.slider( parent=self.GetWin(), sizer=_clock_alpha_sizer, value=self.clock_alpha, callback=self.set_clock_alpha, minimum=0.001, maximum=0.1, num_steps=100, style=wx.SL_HORIZONTAL, cast=float, proportion=1, ) self.GridAdd(_clock_alpha_sizer, 2, 2, 1, 1) self._baseband_outfile_text_static_text = forms.static_text( parent=self.displays.GetPage(1).GetWin(), value=self.baseband_outfile_text, callback=self.set_baseband_outfile_text, label="Baseband filename", converter=forms.str_converter(), ) self.displays.GetPage(1).GridAdd(self._baseband_outfile_text_static_text, 4, 0, 1, 1) ################################################## # Blocks ################################################## self.agc = gr.agc_cc(1e-5, 1.0, 1.0/32768.0, 1.0) self.gr_binary_slicer_fb_0 = gr.binary_slicer_fb() self.gr_clock_recovery_mm_xx_0 = gr.clock_recovery_mm_ff(sps/2.0, clock_alpha**2/4.0, 0.5, clock_alpha, max_clock_offset) self.gr_file_sink_0 = gr.file_sink(gr.sizeof_short*1, "/home/jerry/feng-yun-bb.dat") self.gr_file_sink_0.set_unbuffered(False) self.gr_file_sink_0_0 = gr.file_sink(gr.sizeof_short*1, "/home/jerry/feng-yun-frames.hrpt") self.gr_file_sink_0_0.set_unbuffered(False) self.gr_interleaved_short_to_complex_0 = gr.interleaved_short_to_complex() self.gr_moving_average_xx_0 = gr.moving_average_ff(hs, 1.0/hs, 4000) self.gr_rms_xx_0 = gr.rms_cf(0.2) self.pll = noaa.hrpt_pll_cf(pll_alpha, pll_alpha**2/4.0, max_carrier_offset) self.poesweather_fy1_hrpt_decoder_0 = poesweather.fy1_hrpt_decoder(True,False) self.poesweather_fy1_hrpt_deframer_0 = poesweather.fy1_hrpt_deframer(sync_check) self.usrp_simple_source_x_0 = grc_usrp.simple_source_s(which=0, side=side, rx_ant="RXA") self.usrp_simple_source_x_0.set_decim_rate(decim) self.usrp_simple_source_x_0.set_frequency(freq, verbose=True) self.usrp_simple_source_x_0.set_gain(gain) self.wxgui_fftsink2_0 = fftsink2.fft_sink_c( self.displays.GetPage(0).GetWin(), baseband_freq=0, y_per_div=5, y_divs=10, ref_level=45, ref_scale=2.0, sample_rate=samp_rate, fft_size=1024, fft_rate=30, average=True, avg_alpha=0.1, title="Feng Yun 1 HRPT FFT Spectrum", peak_hold=False, ) self.displays.GetPage(0).Add(self.wxgui_fftsink2_0.win) self.wxgui_numbersink2_0 = numbersink2.number_sink_f( self.GetWin(), unit="Units", minval=0, maxval=5000, factor=1.0, decimal_places=4, ref_level=0, sample_rate=samp_rate, number_rate=15, average=True, avg_alpha=0.07, label="S-Meter", peak_hold=False, show_gauge=True, ) self.Add(self.wxgui_numbersink2_0.win) ################################################## # Connections ################################################## self.connect((self.poesweather_fy1_hrpt_deframer_0, 0), (self.poesweather_fy1_hrpt_decoder_0, 0)) self.connect((self.gr_binary_slicer_fb_0, 0), (self.poesweather_fy1_hrpt_deframer_0, 0)) self.connect((self.usrp_simple_source_x_0, 0), (self.gr_interleaved_short_to_complex_0, 0)) self.connect((self.gr_interleaved_short_to_complex_0, 0), (self.agc, 0)) self.connect((self.gr_clock_recovery_mm_xx_0, 0), (self.gr_binary_slicer_fb_0, 0)) self.connect((self.agc, 0), (self.pll, 0)) self.connect((self.pll, 0), (self.gr_moving_average_xx_0, 0)) self.connect((self.gr_moving_average_xx_0, 0), (self.gr_clock_recovery_mm_xx_0, 0)) self.connect((self.gr_interleaved_short_to_complex_0, 0), (self.wxgui_fftsink2_0, 0)) self.connect((self.gr_interleaved_short_to_complex_0, 0), (self.gr_rms_xx_0, 0)) self.connect((self.usrp_simple_source_x_0, 0), (self.gr_file_sink_0, 0)) self.connect((self.poesweather_fy1_hrpt_deframer_0, 0), (self.gr_file_sink_0_0, 0)) self.connect((self.gr_rms_xx_0, 0), (self.wxgui_numbersink2_0, 0)) def set_side(self, side): self.side = side self.set_side_text(self.side) def set_gain(self, gain): self.gain = gain self.set_saved_gain(self.gain) self._saved_gain_config = ConfigParser.ConfigParser() self._saved_gain_config.read(self.config_filename) if not self._saved_gain_config.has_section("satname"): self._saved_gain_config.add_section("satname") self._saved_gain_config.set("satname", 'gain', str(self.gain)) self._saved_gain_config.write(open(self.config_filename, 'w')) self.set_gain_slider(self.gain) self.usrp_simple_source_x_0.set_gain(self.gain) def set_sync_check(self, sync_check): self.sync_check = sync_check self.set_sync_check_txt(self.sync_check) def set_decim(self, decim): self.decim = decim self.set_samp_rate(64e6/self.decim) self.set_decim_tb(self.decim) self.usrp_simple_source_x_0.set_decim_rate(self.decim) def set_satellite(self, satellite): self.satellite = satellite self.set_satellite_text(self.satellite) def set_frames_file(self, frames_file): self.frames_file = frames_file self.set_frames_outfile_text(self.frames_file) def set_baseband_file(self, baseband_file): self.baseband_file = baseband_file self.set_baseband_outfile_text(self.baseband_file) def set_freq(self, freq): self.freq = freq self.set_freq_tb(self.freq) self.usrp_simple_source_x_0.set_frequency(self.freq) def set_sym_rate(self, sym_rate): self.sym_rate = sym_rate self.set_sps(self.samp_rate/self.sym_rate) def set_samp_rate(self, samp_rate): self.samp_rate = samp_rate self.set_max_carrier_offset(2*math.pi*100e3/self.samp_rate) self.set_sample_rate_text(self.samp_rate) self.set_sps(self.samp_rate/self.sym_rate) self.wxgui_fftsink2_0.set_sample_rate(self.samp_rate) def set_config_filename(self, config_filename): self.config_filename = config_filename self._saved_pll_alpha_config = ConfigParser.ConfigParser() self._saved_pll_alpha_config.read(self.config_filename) if not self._saved_pll_alpha_config.has_section("satname"): self._saved_pll_alpha_config.add_section("satname") self._saved_pll_alpha_config.set("satname", 'pll_alpha', str(self.pll_alpha)) self._saved_pll_alpha_config.write(open(self.config_filename, 'w')) self._saved_gain_config = ConfigParser.ConfigParser() self._saved_gain_config.read(self.config_filename) if not self._saved_gain_config.has_section("satname"): self._saved_gain_config.add_section("satname") self._saved_gain_config.set("satname", 'gain', str(self.gain)) self._saved_gain_config.write(open(self.config_filename, 'w')) self._saved_clock_alpha_config = ConfigParser.ConfigParser() self._saved_clock_alpha_config.read(self.config_filename) if not self._saved_clock_alpha_config.has_section("satname"): self._saved_clock_alpha_config.add_section("satname") self._saved_clock_alpha_config.set("satname", 'clock_alpha', str(self.clock_alpha)) self._saved_clock_alpha_config.write(open(self.config_filename, 'w')) def set_sps(self, sps): self.sps = sps self.set_hs(int(self.sps/2.0)) self.gr_clock_recovery_mm_xx_0.set_omega(self.sps/2.0) def set_saved_pll_alpha(self, saved_pll_alpha): self.saved_pll_alpha = saved_pll_alpha self.set_pll_alpha(self.saved_pll_alpha) def set_saved_clock_alpha(self, saved_clock_alpha): self.saved_clock_alpha = saved_clock_alpha self.set_clock_alpha(self.saved_clock_alpha) def set_sync_check_txt(self, sync_check_txt): self.sync_check_txt = sync_check_txt self._sync_check_txt_static_text.set_value(self.sync_check_txt) def set_side_text(self, side_text): self.side_text = side_text self._side_text_static_text.set_value(self.side_text) def set_saved_gain(self, saved_gain): self.saved_gain = saved_gain def set_satellite_text(self, satellite_text): self.satellite_text = satellite_text self._satellite_text_static_text.set_value(self.satellite_text) def set_sample_rate_text(self, sample_rate_text): self.sample_rate_text = sample_rate_text self._sample_rate_text_static_text.set_value(self.sample_rate_text) def set_pll_alpha(self, pll_alpha): self.pll_alpha = pll_alpha self._pll_alpha_slider.set_value(self.pll_alpha) self._pll_alpha_text_box.set_value(self.pll_alpha) self._saved_pll_alpha_config = ConfigParser.ConfigParser() self._saved_pll_alpha_config.read(self.config_filename) if not self._saved_pll_alpha_config.has_section("satname"): self._saved_pll_alpha_config.add_section("satname") self._saved_pll_alpha_config.set("satname", 'pll_alpha', str(self.pll_alpha)) self._saved_pll_alpha_config.write(open(self.config_filename, 'w')) self.pll.set_alpha(self.pll_alpha) self.pll.set_beta(self.pll_alpha**2/4.0) def set_max_clock_offset(self, max_clock_offset): self.max_clock_offset = max_clock_offset def set_max_carrier_offset(self, max_carrier_offset): self.max_carrier_offset = max_carrier_offset self.pll.set_max_offset(self.max_carrier_offset) def set_hs(self, hs): self.hs = hs self.gr_moving_average_xx_0.set_length_and_scale(self.hs, 1.0/self.hs) def set_gain_slider(self, gain_slider): self.gain_slider = gain_slider self._gain_slider_slider.set_value(self.gain_slider) self._gain_slider_text_box.set_value(self.gain_slider) def set_freq_tb(self, freq_tb): self.freq_tb = freq_tb self._freq_tb_text_box.set_value(self.freq_tb) def set_frames_outfile_text(self, frames_outfile_text): self.frames_outfile_text = frames_outfile_text self._frames_outfile_text_static_text.set_value(self.frames_outfile_text) def set_decim_tb(self, decim_tb): self.decim_tb = decim_tb self._decim_tb_text_box.set_value(self.decim_tb) def set_datetime_text(self, datetime_text): self.datetime_text = datetime_text self._datetime_text_static_text.set_value(self.datetime_text) def set_clock_alpha(self, clock_alpha): self.clock_alpha = clock_alpha self._saved_clock_alpha_config = ConfigParser.ConfigParser() self._saved_clock_alpha_config.read(self.config_filename) if not self._saved_clock_alpha_config.has_section("satname"): self._saved_clock_alpha_config.add_section("satname") self._saved_clock_alpha_config.set("satname", 'clock_alpha', str(self.clock_alpha)) self._saved_clock_alpha_config.write(open(self.config_filename, 'w')) self._clock_alpha_slider.set_value(self.clock_alpha) self._clock_alpha_text_box.set_value(self.clock_alpha) self.gr_clock_recovery_mm_xx_0.set_gain_omega(self.clock_alpha**2/4.0) self.gr_clock_recovery_mm_xx_0.set_gain_mu(self.clock_alpha) def set_baseband_outfile_text(self, baseband_outfile_text): self.baseband_outfile_text = baseband_outfile_text self._baseband_outfile_text_static_text.set_value(self.baseband_outfile_text) if __name__ == '__main__': parser = OptionParser(option_class=eng_option, usage="%prog: [options]") parser.add_option("-R", "--side", dest="side", type="string", default="A", help="Set Side [default=%default]") parser.add_option("-g", "--gain", dest="gain", type="eng_float", default=eng_notation.num_to_str(35), help="Set Gain [default=%default]") parser.add_option("-c", "--sync-check", dest="sync_check", type="intx", default=False, help="Set Sync check [default=%default]") parser.add_option("-d", "--decim", dest="decim", type="intx", default=16, help="Set Decimation [default=%default]") parser.add_option("-S", "--satellite", dest="satellite", type="string", default='FENGYUN-1D', help="Set Satellite [default=%default]") parser.add_option("-o", "--frames-file", dest="frames_file", type="string", default=os.environ['HOME'] + '/FENGYUN-1D.hrpt', help="Set Frames output filename [default=%default]") parser.add_option("-F", "--baseband-file", dest="baseband_file", type="string", default=os.environ['HOME'] + '/FENGYUN-1D.dat', help="Set Baseband output filename [default=%default]") parser.add_option("-f", "--freq", dest="freq", type="eng_float", default=eng_notation.num_to_str(1700.5e6), help="Set Frequency [default=%default]") (options, args) = parser.parse_args() tb = usrp_rx_fy1_bb_hrpt(side=options.side, gain=options.gain, sync_check=options.sync_check, decim=options.decim, satellite=options.satellite, frames_file=options.frames_file, baseband_file=options.baseband_file, freq=options.freq) tb.Run(True)
mbsat/gr-poes-weather
apps/FY1/gui/usrp_rx_fy1_bb_hrpt.py
Python
gpl-3.0
19,883
0.024342
__version__ = (1, 0, 0, 'final', 0)
aek/pgbouncer-ng
pgbouncerlib/__init__.py
Python
bsd-3-clause
36
0
# -*- coding: utf-8 -*- ''' Covenant Add-on This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import re,urllib,urlparse,json,base64 from resources.lib.modules import cleantitle from resources.lib.modules import client from resources.lib.modules import directstream from resources.lib.modules import jsunfuck from resources.lib.modules import source_utils from resources.lib.modules import dom_parser from resources.lib.modules import log_utils CODE = '''def retA(): class Infix: def __init__(self, function): self.function = function def __ror__(self, other): return Infix(lambda x, self=self, other=other: self.function(other, x)) def __or__(self, other): return self.function(other) def __rlshift__(self, other): return Infix(lambda x, self=self, other=other: self.function(other, x)) def __rshift__(self, other): return self.function(other) def __call__(self, value1, value2): return self.function(value1, value2) def my_add(x, y): try: return x + y except Exception: return str(x) + str(y) x = Infix(my_add) return %s param = retA()''' class source: def __init__(self): self.priority = 1 self.language = ['en'] self.domains = ['yesmovies.to'] self.base_link = 'https://yesmovies.to' self.search_link = '/movie/search/%s.html' self.info_link = '/ajax/movie_info/%s.html?is_login=false' self.server_link = '/ajax/v4_movie_episodes/%s' self.embed_link = '/ajax/movie_embed/%s' self.token_link = '/ajax/movie_token?eid=%s&mid=%s' self.source_link = '/ajax/movie_sources/%s?x=%s&y=%s' def matchAlias(self, title, aliases): try: for alias in aliases: if cleantitle.get(title) == cleantitle.get(alias['title']): return True except: return False def movie(self, imdb, title, localtitle, aliases, year): try: aliases.append({'country': 'us', 'title': title}) url = {'imdb': imdb, 'title': title, 'year': year, 'aliases': aliases} url = urllib.urlencode(url) return url except: return def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year): try: aliases.append({'country': 'us', 'title': tvshowtitle}) url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year, 'aliases': aliases} url = urllib.urlencode(url) return url except: return def episode(self, url, imdb, tvdb, title, premiered, season, episode): try: if url == None: return url = urlparse.parse_qs(url) url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url]) url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode url = urllib.urlencode(url) return url except: return def searchShow(self, title, season, aliases, headers): try: title = cleantitle.normalize(title) search = '%s Season %01d' % (title, int(season)) url = urlparse.urljoin(self.base_link, self.search_link % urllib.quote_plus(cleantitle.getsearch(search))) log_utils.log('shit Returned: %s' % str(url), log_utils.LOGNOTICE) r = client.request(url, headers=headers, timeout='15') r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'}) r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title')) r = [(i[0], i[1], re.findall('(.*?)\s+-\s+Season\s+(\d)', i[1])) for i in r] r = [(i[0], i[1], i[2][0]) for i in r if len(i[2]) > 0] url = [i[0] for i in r if self.matchAlias(i[2][0], aliases) and i[2][1] == season][0] return url except: return def searchMovie(self, title, year, aliases, headers): try: title = cleantitle.normalize(title) url = urlparse.urljoin(self.base_link, self.search_link % urllib.quote_plus(cleantitle.getsearch(title))) r = client.request(url, headers=headers, timeout='15') r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'}) r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title')) results = [(i[0], i[1], re.findall('\((\d{4})', i[1])) for i in r] try: r = [(i[0], i[1], i[2][0]) for i in results if len(i[2]) > 0] url = [i[0] for i in r if self.matchAlias(i[1], aliases) and (year == i[2])][0] except: url = None pass if (url == None): url = [i[0] for i in results if self.matchAlias(i[1], aliases)][0] return url except: return def sources(self, url, hostDict, hostprDict): try: sources = [] if url is None: return sources data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) aliases = eval(data['aliases']) headers = {} if 'tvshowtitle' in data: episode = int(data['episode']) url = self.searchShow(data['tvshowtitle'], data['season'], aliases, headers) else: episode = 0 url = self.searchMovie(data['title'], data['year'], aliases, headers) mid = re.findall('-(\d+)', url)[-1] try: headers = {'Referer': url} u = urlparse.urljoin(self.base_link, self.server_link % mid) r = client.request(u, headers=headers, XHR=True) r = json.loads(r)['html'] r = client.parseDOM(r, 'div', attrs = {'class': 'pas-list'}) ids = client.parseDOM(r, 'li', ret='data-id') servers = client.parseDOM(r, 'li', ret='data-server') labels = client.parseDOM(r, 'a', ret='title') r = zip(ids, servers, labels) u = urlparse.urljoin(self.base_link, self.info_link % mid) quality = client.request(u, headers=headers) quality = dom_parser.parse_dom(quality, 'div', attrs={'class': 'jtip-quality'})[0].content if quality == "HD": quality = "720p" for eid in r: try: try: ep = re.findall('episode.*?(\d+).*?',eid[2].lower())[0] except: ep = 0 if (episode == 0) or (int(ep) == episode): if eid[1] != '6': url = urlparse.urljoin(self.base_link, self.embed_link % eid[0]) link = client.request(url) link = json.loads(link)['src'] valid, host = source_utils.is_host_valid(link, hostDict) sources.append({'source':host,'quality':quality,'language': 'en','url':link,'info':[],'direct':False,'debridonly':False}) else: url = urlparse.urljoin(self.base_link, self.token_link % (eid[0], mid)) script = client.request(url) if '$_$' in script: params = self.uncensored1(script) elif script.startswith('[]') and script.endswith('()'): params = self.uncensored2(script) elif '_x=' in script: x = re.search('''_x=['"]([^"']+)''', script).group(1) y = re.search('''_y=['"]([^"']+)''', script).group(1) params = {'x': x, 'y': y} else: raise Exception() u = urlparse.urljoin(self.base_link, self.source_link % (eid[0], params['x'], params['y'])) r = client.request(u, XHR=True) url = json.loads(r)['playlist'][0]['sources'] url = [i['file'] for i in url if 'file' in i] url = [directstream.googletag(i) for i in url] url = [i[0] for i in url if i] for s in url: if 'lh3.googleusercontent.com' in s['url']: s['url'] = directstream.googleredirect(s['url']) sources.append({'source': 'gvideo', 'quality': s['quality'], 'language': 'en', 'url': s['url'], 'direct': True, 'debridonly': False}) except: pass except: pass return sources except: return sources def resolve(self, url): try: if self.embed_link in url: result = client.request(url, XHR=True) url = json.loads(result)['embed_url'] return url return url except: return def uncensored(a, b): x = '' ; i = 0 for i, y in enumerate(a): z = b[i % len(b) - 1] y = int(ord(str(y)[0])) + int(ord(str(z)[0])) x += chr(y) x = base64.b64encode(x) return x def uncensored1(self, script): try: script = '(' + script.split("(_$$)) ('_');")[0].split("/* `$$` */")[-1].strip() script = script.replace('(__$)[$$$]', '\'"\'') script = script.replace('(__$)[_$]', '"\\\\"') script = script.replace('(o^_^o)', '3') script = script.replace('(c^_^o)', '0') script = script.replace('(_$$)', '1') script = script.replace('($$_)', '4') vGlobals = {"__builtins__": None, '__name__': __name__, 'str': str, 'Exception': Exception} vLocals = {'param': None} exec (CODE % script.replace('+', '|x|'), vGlobals, vLocals) data = vLocals['param'].decode('string_escape') x = re.search('''_x=['"]([^"']+)''', data).group(1) y = re.search('''_y=['"]([^"']+)''', data).group(1) return {'x': x, 'y': y} except: pass def uncensored2(self, script): try: js = jsunfuck.JSUnfuck(script).decode() x = re.search('''_x=['"]([^"']+)''', js).group(1) y = re.search('''_y=['"]([^"']+)''', js).group(1) return {'x': x, 'y': y} except: pass
RuiNascimento/krepo
script.module.lambdascrapers/lib/lambdascrapers/sources_incursion/en_incursion-1.20(final)/ymovies.py
Python
gpl-2.0
11,750
0.007064
from django.contrib import admin from blog.models import Blog_post admin.site.register(Blog_post) # Register your models here.
Extintor/DjangoBlog
blog/admin.py
Python
gpl-2.0
129
0
#!/usr/bin/python # -*- coding: latin-1 -*- usage = """\ usage: %prog [options] connection_string Unit tests for SQL Server. To use, pass a connection string as the parameter. The tests will create and drop tables t1 and t2 as necessary. These run using the version from the 'build' directory, not the version installed into the Python directories. You must run python setup.py build before running the tests. You can also put the connection string into a setup.cfg file in the root of the project (the same one setup.py would use) like so: [sqlservertests] connection-string=DRIVER={SQL Server};SERVER=localhost;UID=uid;PWD=pwd;DATABASE=db The connection string above will use the 2000/2005 driver, even if SQL Server 2008 is installed: 2000: DRIVER={SQL Server} 2005: DRIVER={SQL Server} 2008: DRIVER={SQL Server Native Client 10.0} """ import sys, os, re import unittest from decimal import Decimal from datetime import datetime, date, time from os.path import join, getsize, dirname, abspath from testutils import * _TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-' def _generate_test_string(length): """ Returns a string of `length` characters, constructed by repeating _TESTSTR as necessary. To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are tested with 3 lengths. This function helps us generate the test data. We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will be hidden and to help us manually identify where a break occurs. """ if length <= len(_TESTSTR): return _TESTSTR[:length] c = (length + len(_TESTSTR)-1) / len(_TESTSTR) v = _TESTSTR * c return v[:length] class SqlServerTestCase(unittest.TestCase): SMALL_FENCEPOST_SIZES = [ 0, 1, 255, 256, 510, 511, 512, 1023, 1024, 2047, 2048, 4000 ] LARGE_FENCEPOST_SIZES = [ 4095, 4096, 4097, 10 * 1024, 20 * 1024 ] MAX_FENCEPOST_SIZES = [ 5 * 1024 * 1024 ] #, 50 * 1024 * 1024 ] ANSI_SMALL_FENCEPOSTS = [ _generate_test_string(size) for size in SMALL_FENCEPOST_SIZES ] UNICODE_SMALL_FENCEPOSTS = [ unicode(s) for s in ANSI_SMALL_FENCEPOSTS ] ANSI_LARGE_FENCEPOSTS = ANSI_SMALL_FENCEPOSTS + [ _generate_test_string(size) for size in LARGE_FENCEPOST_SIZES ] UNICODE_LARGE_FENCEPOSTS = UNICODE_SMALL_FENCEPOSTS + [ unicode(s) for s in [_generate_test_string(size) for size in LARGE_FENCEPOST_SIZES ]] ANSI_MAX_FENCEPOSTS = ANSI_LARGE_FENCEPOSTS + [ _generate_test_string(size) for size in MAX_FENCEPOST_SIZES ] UNICODE_MAX_FENCEPOSTS = UNICODE_LARGE_FENCEPOSTS + [ unicode(s) for s in [_generate_test_string(size) for size in MAX_FENCEPOST_SIZES ]] def __init__(self, method_name, connection_string): unittest.TestCase.__init__(self, method_name) self.connection_string = connection_string def get_sqlserver_version(self): """ Returns the major version: 8-->2000, 9-->2005, 10-->2008 """ self.cursor.execute("exec master..xp_msver 'ProductVersion'") row = self.cursor.fetchone() return int(row.Character_Value.split('.', 1)[0]) def setUp(self): self.cnxn = pyodbc.connect(self.connection_string) self.cursor = self.cnxn.cursor() for i in range(3): try: self.cursor.execute("drop table t%d" % i) self.cnxn.commit() except: pass for i in range(3): try: self.cursor.execute("drop procedure proc%d" % i) self.cnxn.commit() except: pass try: self.cursor.execute('drop function func1') self.cnxn.commit() except: pass self.cnxn.rollback() def tearDown(self): try: self.cursor.close() self.cnxn.close() except: # If we've already closed the cursor or connection, exceptions are thrown. pass def test_binary_type(self): if sys.hexversion >= 0x02060000: self.assertIs(pyodbc.BINARY, bytearray) else: self.assertIs(pyodbc.BINARY, buffer) def test_multiple_bindings(self): "More than one bind and select on a cursor" self.cursor.execute("create table t1(n int)") self.cursor.execute("insert into t1 values (?)", 1) self.cursor.execute("insert into t1 values (?)", 2) self.cursor.execute("insert into t1 values (?)", 3) for i in range(3): self.cursor.execute("select n from t1 where n < ?", 10) self.cursor.execute("select n from t1 where n < 3") def test_different_bindings(self): self.cursor.execute("create table t1(n int)") self.cursor.execute("create table t2(d datetime)") self.cursor.execute("insert into t1 values (?)", 1) self.cursor.execute("insert into t2 values (?)", datetime.now()) def test_datasources(self): p = pyodbc.dataSources() self.assert_(isinstance(p, dict)) def test_getinfo_string(self): value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR) self.assert_(isinstance(value, str)) def test_getinfo_bool(self): value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES) self.assert_(isinstance(value, bool)) def test_getinfo_int(self): value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION) self.assert_(isinstance(value, (int, long))) def test_getinfo_smallint(self): value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR) self.assert_(isinstance(value, int)) def test_noscan(self): self.assertEqual(self.cursor.noscan, False) self.cursor.noscan = True self.assertEqual(self.cursor.noscan, True) def test_guid(self): self.cursor.execute("create table t1(g1 uniqueidentifier)") self.cursor.execute("insert into t1 values (newid())") v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), str) self.assertEqual(len(v), 36) def test_nextset(self): self.cursor.execute("create table t1(i int)") for i in range(4): self.cursor.execute("insert into t1(i) values(?)", i) self.cursor.execute("select i from t1 where i < 2 order by i; select i from t1 where i >= 2 order by i") for i, row in enumerate(self.cursor): self.assertEqual(i, row.i) self.assertEqual(self.cursor.nextset(), True) for i, row in enumerate(self.cursor): self.assertEqual(i + 2, row.i) def test_nextset_with_raiserror(self): self.cursor.execute("select i = 1; RAISERROR('c', 16, 1);") row = next(self.cursor) self.assertEqual(1, row.i) self.assertRaises(pyodbc.ProgrammingError, self.cursor.nextset) def test_fixed_unicode(self): value = u"t\xebsting" self.cursor.execute("create table t1(s nchar(7))") self.cursor.execute("insert into t1 values(?)", u"t\xebsting") v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), unicode) self.assertEqual(len(v), len(value)) # If we alloc'd wrong, the test below might work because of an embedded NULL self.assertEqual(v, value) def _test_strtype(self, sqltype, value, resulttype=None, colsize=None): """ The implementation for string, Unicode, and binary tests. """ assert colsize in (None, 'max') or isinstance(colsize, int), colsize assert colsize in (None, 'max') or (value is None or colsize >= len(value)) if colsize: sql = "create table t1(s %s(%s))" % (sqltype, colsize) else: sql = "create table t1(s %s)" % sqltype if resulttype is None: resulttype = type(value) self.cursor.execute(sql) self.cursor.execute("insert into t1 values(?)", value) v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), resulttype) if value is not None: self.assertEqual(len(v), len(value)) # To allow buffer --> db --> bytearray tests, always convert the input to the expected result type before # comparing. if type(value) is not resulttype: value = resulttype(value) self.assertEqual(v, value) def _test_strliketype(self, sqltype, value, resulttype=None, colsize=None): """ The implementation for text, image, ntext, and binary. These types do not support comparison operators. """ assert colsize is None or isinstance(colsize, int), colsize assert colsize is None or (value is None or colsize >= len(value)) if colsize: sql = "create table t1(s %s(%s))" % (sqltype, colsize) else: sql = "create table t1(s %s)" % sqltype if resulttype is None: resulttype = type(value) self.cursor.execute(sql) self.cursor.execute("insert into t1 values(?)", value) v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), resulttype) if value is not None: self.assertEqual(len(v), len(value)) # To allow buffer --> db --> bytearray tests, always convert the input to the expected result type before # comparing. if type(value) is not resulttype: value = resulttype(value) self.assertEqual(v, value) # # varchar # def test_varchar_null(self): self._test_strtype('varchar', None, colsize=100) # Generate a test for each fencepost size: test_varchar_0, etc. def _maketest(value): def t(self): self._test_strtype('varchar', value, colsize=len(value)) return t for value in ANSI_SMALL_FENCEPOSTS: locals()['test_varchar_%s' % len(value)] = _maketest(value) # Also test varchar(max) def _maketest(value): def t(self): self._test_strtype('varchar', value, colsize='max') return t for value in ANSI_MAX_FENCEPOSTS: locals()['test_varcharmax_%s' % len(value)] = _maketest(value) def test_varchar_many(self): self.cursor.execute("create table t1(c1 varchar(300), c2 varchar(300), c3 varchar(300))") v1 = 'ABCDEFGHIJ' * 30 v2 = '0123456789' * 30 v3 = '9876543210' * 30 self.cursor.execute("insert into t1(c1, c2, c3) values (?,?,?)", v1, v2, v3); row = self.cursor.execute("select c1, c2, c3, len(c1) as l1, len(c2) as l2, len(c3) as l3 from t1").fetchone() self.assertEqual(v1, row.c1) self.assertEqual(v2, row.c2) self.assertEqual(v3, row.c3) def test_varchar_upperlatin(self): self._test_strtype('varchar', 'á') # # unicode # def test_unicode_null(self): self._test_strtype('nvarchar', None, colsize=100) # Generate a test for each fencepost size: test_unicode_0, etc. def _maketest(value): def t(self): self._test_strtype('nvarchar', value, colsize=len(value)) return t for value in UNICODE_SMALL_FENCEPOSTS: locals()['test_unicode_%s' % len(value)] = _maketest(value) # Also test nvarchar(max) def _maketest(value): def t(self): self._test_strtype('nvarchar', value, colsize='max') return t for value in UNICODE_MAX_FENCEPOSTS: locals()['test_nvarcharmax_%s' % len(value)] = _maketest(value) def test_unicode_upperlatin(self): self._test_strtype('nvarchar', u'á') def test_unicode_longmax(self): # Issue 188: Segfault when fetching NVARCHAR(MAX) data over 511 bytes ver = self.get_sqlserver_version() if ver < 9: # 2005+ return # so pass / ignore self.cursor.execute("select cast(replicate(N'x', 512) as nvarchar(max))") # # binary # def test_binary_null(self): self._test_strtype('varbinary', None, colsize=100) def test_large_binary_null(self): # Bug 1575064 self._test_strtype('varbinary', None, colsize=4000) def test_binaryNull_object(self): self.cursor.execute("create table t1(n varbinary(10))") self.cursor.execute("insert into t1 values (?)", pyodbc.BinaryNull); # buffer def _maketest(value): def t(self): self._test_strtype('varbinary', buffer(value), resulttype=pyodbc.BINARY, colsize=len(value)) return t for value in ANSI_SMALL_FENCEPOSTS: locals()['test_binary_buffer_%s' % len(value)] = _maketest(value) # bytearray if sys.hexversion >= 0x02060000: def _maketest(value): def t(self): self._test_strtype('varbinary', bytearray(value), colsize=len(value)) return t for value in ANSI_SMALL_FENCEPOSTS: locals()['test_binary_bytearray_%s' % len(value)] = _maketest(value) # varbinary(max) def _maketest(value): def t(self): self._test_strtype('varbinary', buffer(value), resulttype=pyodbc.BINARY, colsize='max') return t for value in ANSI_MAX_FENCEPOSTS: locals()['test_binarymax_buffer_%s' % len(value)] = _maketest(value) # bytearray if sys.hexversion >= 0x02060000: def _maketest(value): def t(self): self._test_strtype('varbinary', bytearray(value), colsize='max') return t for value in ANSI_MAX_FENCEPOSTS: locals()['test_binarymax_bytearray_%s' % len(value)] = _maketest(value) # # image # def test_image_null(self): self._test_strliketype('image', None, type(None)) # Generate a test for each fencepost size: test_unicode_0, etc. def _maketest(value): def t(self): self._test_strliketype('image', buffer(value), pyodbc.BINARY) return t for value in ANSI_LARGE_FENCEPOSTS: locals()['test_image_buffer_%s' % len(value)] = _maketest(value) if sys.hexversion >= 0x02060000: # Python 2.6+ supports bytearray, which pyodbc considers varbinary. # Generate a test for each fencepost size: test_unicode_0, etc. def _maketest(value): def t(self): self._test_strtype('image', bytearray(value)) return t for value in ANSI_LARGE_FENCEPOSTS: locals()['test_image_bytearray_%s' % len(value)] = _maketest(value) def test_image_upperlatin(self): self._test_strliketype('image', buffer('á'), pyodbc.BINARY) # # text # # def test_empty_text(self): # self._test_strliketype('text', bytearray('')) def test_null_text(self): self._test_strliketype('text', None, type(None)) # Generate a test for each fencepost size: test_unicode_0, etc. def _maketest(value): def t(self): self._test_strliketype('text', value) return t for value in ANSI_SMALL_FENCEPOSTS: locals()['test_text_buffer_%s' % len(value)] = _maketest(value) def test_text_upperlatin(self): self._test_strliketype('text', 'á') # # xml # # def test_empty_xml(self): # self._test_strliketype('xml', bytearray('')) def test_null_xml(self): self._test_strliketype('xml', None, type(None)) # Generate a test for each fencepost size: test_unicode_0, etc. def _maketest(value): def t(self): self._test_strliketype('xml', value) return t for value in ANSI_SMALL_FENCEPOSTS: locals()['test_xml_buffer_%s' % len(value)] = _maketest(value) def test_xml_upperlatin(self): self._test_strliketype('xml', 'á') # # bit # def test_bit(self): value = True self.cursor.execute("create table t1(b bit)") self.cursor.execute("insert into t1 values (?)", value) v = self.cursor.execute("select b from t1").fetchone()[0] self.assertEqual(type(v), bool) self.assertEqual(v, value) # # decimal # def _decimal(self, precision, scale, negative): # From test provided by planders (thanks!) in Issue 91 self.cursor.execute("create table t1(d decimal(%s, %s))" % (precision, scale)) # Construct a decimal that uses the maximum precision and scale. decStr = '9' * (precision - scale) if scale: decStr = decStr + "." + '9' * scale if negative: decStr = "-" + decStr value = Decimal(decStr) self.cursor.execute("insert into t1 values(?)", value) v = self.cursor.execute("select d from t1").fetchone()[0] self.assertEqual(v, value) def _maketest(p, s, n): def t(self): self._decimal(p, s, n) return t for (p, s, n) in [ (1, 0, False), (1, 0, True), (6, 0, False), (6, 2, False), (6, 4, True), (6, 6, True), (38, 0, False), (38, 10, False), (38, 38, False), (38, 0, True), (38, 10, True), (38, 38, True) ]: locals()['test_decimal_%s_%s_%s' % (p, s, n and 'n' or 'p')] = _maketest(p, s, n) def test_decimal_e(self): """Ensure exponential notation decimals are properly handled""" value = Decimal((0, (1, 2, 3), 5)) # prints as 1.23E+7 self.cursor.execute("create table t1(d decimal(10, 2))") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(result, value) def test_subquery_params(self): """Ensure parameter markers work in a subquery""" self.cursor.execute("create table t1(id integer, s varchar(20))") self.cursor.execute("insert into t1 values (?,?)", 1, 'test') row = self.cursor.execute(""" select x.id from ( select id from t1 where s = ? and id between ? and ? ) x """, 'test', 1, 10).fetchone() self.assertNotEqual(row, None) self.assertEqual(row[0], 1) def _exec(self): self.cursor.execute(self.sql) def test_close_cnxn(self): """Make sure using a Cursor after closing its connection doesn't crash.""" self.cursor.execute("create table t1(id integer, s varchar(20))") self.cursor.execute("insert into t1 values (?,?)", 1, 'test') self.cursor.execute("select * from t1") self.cnxn.close() # Now that the connection is closed, we expect an exception. (If the code attempts to use # the HSTMT, we'll get an access violation instead.) self.sql = "select * from t1" self.assertRaises(pyodbc.ProgrammingError, self._exec) def test_empty_string(self): self.cursor.execute("create table t1(s varchar(20))") self.cursor.execute("insert into t1 values(?)", "") def test_fixed_str(self): value = "testing" self.cursor.execute("create table t1(s char(7))") self.cursor.execute("insert into t1 values(?)", "testing") v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), str) self.assertEqual(len(v), len(value)) # If we alloc'd wrong, the test below might work because of an embedded NULL self.assertEqual(v, value) def test_empty_unicode(self): self.cursor.execute("create table t1(s nvarchar(20))") self.cursor.execute("insert into t1 values(?)", u"") def test_unicode_query(self): self.cursor.execute(u"select 1") def test_negative_row_index(self): self.cursor.execute("create table t1(s varchar(20))") self.cursor.execute("insert into t1 values(?)", "1") row = self.cursor.execute("select * from t1").fetchone() self.assertEquals(row[0], "1") self.assertEquals(row[-1], "1") def test_version(self): self.assertEquals(3, len(pyodbc.version.split('.'))) # 1.3.1 etc. # # date, time, datetime # def test_datetime(self): value = datetime(2007, 1, 15, 3, 4, 5) self.cursor.execute("create table t1(dt datetime)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select dt from t1").fetchone()[0] self.assertEquals(type(value), datetime) self.assertEquals(value, result) def test_datetime_fraction(self): # SQL Server supports milliseconds, but Python's datetime supports nanoseconds, so the most granular datetime # supported is xxx000. value = datetime(2007, 1, 15, 3, 4, 5, 123000) self.cursor.execute("create table t1(dt datetime)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select dt from t1").fetchone()[0] self.assertEquals(type(value), datetime) self.assertEquals(result, value) def test_datetime_fraction_rounded(self): # SQL Server supports milliseconds, but Python's datetime supports nanoseconds. pyodbc rounds down to what the # database supports. full = datetime(2007, 1, 15, 3, 4, 5, 123456) rounded = datetime(2007, 1, 15, 3, 4, 5, 123000) self.cursor.execute("create table t1(dt datetime)") self.cursor.execute("insert into t1 values (?)", full) result = self.cursor.execute("select dt from t1").fetchone()[0] self.assertEquals(type(result), datetime) self.assertEquals(result, rounded) def test_date(self): ver = self.get_sqlserver_version() if ver < 10: # 2008 only return # so pass / ignore value = date.today() self.cursor.execute("create table t1(d date)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select d from t1").fetchone()[0] self.assertEquals(type(value), date) self.assertEquals(value, result) def test_time(self): ver = self.get_sqlserver_version() if ver < 10: # 2008 only return # so pass / ignore value = datetime.now().time() # We aren't yet writing values using the new extended time type so the value written to the database is only # down to the second. value = value.replace(microsecond=0) self.cursor.execute("create table t1(t time)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select t from t1").fetchone()[0] self.assertEquals(type(value), time) self.assertEquals(value, result) def test_datetime2(self): value = datetime(2007, 1, 15, 3, 4, 5) self.cursor.execute("create table t1(dt datetime2)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select dt from t1").fetchone()[0] self.assertEquals(type(value), datetime) self.assertEquals(value, result) # # ints and floats # def test_int(self): value = 1234 self.cursor.execute("create table t1(n int)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select n from t1").fetchone()[0] self.assertEquals(result, value) def test_negative_int(self): value = -1 self.cursor.execute("create table t1(n int)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select n from t1").fetchone()[0] self.assertEquals(result, value) def test_bigint(self): input = 3000000000 self.cursor.execute("create table t1(d bigint)") self.cursor.execute("insert into t1 values (?)", input) result = self.cursor.execute("select d from t1").fetchone()[0] self.assertEqual(result, input) def test_float(self): value = 1234.567 self.cursor.execute("create table t1(n float)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select n from t1").fetchone()[0] self.assertEquals(result, value) def test_negative_float(self): value = -200 self.cursor.execute("create table t1(n float)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select n from t1").fetchone()[0] self.assertEqual(value, result) # # stored procedures # # def test_callproc(self): # "callproc with a simple input-only stored procedure" # pass def test_sp_results(self): self.cursor.execute( """ Create procedure proc1 AS select top 10 name, id, xtype, refdate from sysobjects """) rows = self.cursor.execute("exec proc1").fetchall() self.assertEquals(type(rows), list) self.assertEquals(len(rows), 10) # there has to be at least 10 items in sysobjects self.assertEquals(type(rows[0].refdate), datetime) def test_sp_results_from_temp(self): # Note: I've used "set nocount on" so that we don't get the number of rows deleted from #tmptable. # If you don't do this, you'd need to call nextset() once to skip it. self.cursor.execute( """ Create procedure proc1 AS set nocount on select top 10 name, id, xtype, refdate into #tmptable from sysobjects select * from #tmptable """) self.cursor.execute("exec proc1") self.assert_(self.cursor.description is not None) self.assert_(len(self.cursor.description) == 4) rows = self.cursor.fetchall() self.assertEquals(type(rows), list) self.assertEquals(len(rows), 10) # there has to be at least 10 items in sysobjects self.assertEquals(type(rows[0].refdate), datetime) def test_sp_results_from_vartbl(self): self.cursor.execute( """ Create procedure proc1 AS set nocount on declare @tmptbl table(name varchar(100), id int, xtype varchar(4), refdate datetime) insert into @tmptbl select top 10 name, id, xtype, refdate from sysobjects select * from @tmptbl """) self.cursor.execute("exec proc1") rows = self.cursor.fetchall() self.assertEquals(type(rows), list) self.assertEquals(len(rows), 10) # there has to be at least 10 items in sysobjects self.assertEquals(type(rows[0].refdate), datetime) def test_sp_with_dates(self): # Reported in the forums that passing two datetimes to a stored procedure doesn't work. self.cursor.execute( """ if exists (select * from dbo.sysobjects where id = object_id(N'[test_sp]') and OBJECTPROPERTY(id, N'IsProcedure') = 1) drop procedure [dbo].[test_sp] """) self.cursor.execute( """ create procedure test_sp(@d1 datetime, @d2 datetime) AS declare @d as int set @d = datediff(year, @d1, @d2) select @d """) self.cursor.execute("exec test_sp ?, ?", datetime.now(), datetime.now()) rows = self.cursor.fetchall() self.assert_(rows is not None) self.assert_(rows[0][0] == 0) # 0 years apart def test_sp_with_none(self): # Reported in the forums that passing None caused an error. self.cursor.execute( """ if exists (select * from dbo.sysobjects where id = object_id(N'[test_sp]') and OBJECTPROPERTY(id, N'IsProcedure') = 1) drop procedure [dbo].[test_sp] """) self.cursor.execute( """ create procedure test_sp(@x varchar(20)) AS declare @y varchar(20) set @y = @x select @y """) self.cursor.execute("exec test_sp ?", None) rows = self.cursor.fetchall() self.assert_(rows is not None) self.assert_(rows[0][0] == None) # 0 years apart # # rowcount # def test_rowcount_delete(self): self.assertEquals(self.cursor.rowcount, -1) self.cursor.execute("create table t1(i int)") count = 4 for i in range(count): self.cursor.execute("insert into t1 values (?)", i) self.cursor.execute("delete from t1") self.assertEquals(self.cursor.rowcount, count) def test_rowcount_nodata(self): """ This represents a different code path than a delete that deleted something. The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over the code that errors out and drop down to the same SQLRowCount code. On the other hand, we could hardcode a zero return value. """ self.cursor.execute("create table t1(i int)") # This is a different code path internally. self.cursor.execute("delete from t1") self.assertEquals(self.cursor.rowcount, 0) def test_rowcount_select(self): """ Ensure Cursor.rowcount is set properly after a select statement. pyodbc calls SQLRowCount after each execute and sets Cursor.rowcount, but SQL Server 2005 returns -1 after a select statement, so we'll test for that behavior. This is valid behavior according to the DB API specification, but people don't seem to like it. """ self.cursor.execute("create table t1(i int)") count = 4 for i in range(count): self.cursor.execute("insert into t1 values (?)", i) self.cursor.execute("select * from t1") self.assertEquals(self.cursor.rowcount, -1) rows = self.cursor.fetchall() self.assertEquals(len(rows), count) self.assertEquals(self.cursor.rowcount, -1) def test_rowcount_reset(self): "Ensure rowcount is reset to -1" self.cursor.execute("create table t1(i int)") count = 4 for i in range(count): self.cursor.execute("insert into t1 values (?)", i) self.assertEquals(self.cursor.rowcount, 1) self.cursor.execute("create table t2(i int)") self.assertEquals(self.cursor.rowcount, -1) # # always return Cursor # # In the 2.0.x branch, Cursor.execute sometimes returned the cursor and sometimes the rowcount. This proved very # confusing when things went wrong and added very little value even when things went right since users could always # use: cursor.execute("...").rowcount def test_retcursor_delete(self): self.cursor.execute("create table t1(i int)") self.cursor.execute("insert into t1 values (1)") v = self.cursor.execute("delete from t1") self.assertEquals(v, self.cursor) def test_retcursor_nodata(self): """ This represents a different code path than a delete that deleted something. The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over the code that errors out and drop down to the same SQLRowCount code. """ self.cursor.execute("create table t1(i int)") # This is a different code path internally. v = self.cursor.execute("delete from t1") self.assertEquals(v, self.cursor) def test_retcursor_select(self): self.cursor.execute("create table t1(i int)") self.cursor.execute("insert into t1 values (1)") v = self.cursor.execute("select * from t1") self.assertEquals(v, self.cursor) # # misc # def table_with_spaces(self): "Ensure we can select using [x z] syntax" try: self.cursor.execute("create table [test one](int n)") self.cursor.execute("insert into [test one] values(1)") self.cursor.execute("select * from [test one]") v = self.cursor.fetchone()[0] self.assertEquals(v, 1) finally: self.cnxn.rollback() def test_lower_case(self): "Ensure pyodbc.lowercase forces returned column names to lowercase." # Has to be set before creating the cursor, so we must recreate self.cursor. pyodbc.lowercase = True self.cursor = self.cnxn.cursor() self.cursor.execute("create table t1(Abc int, dEf int)") self.cursor.execute("select * from t1") names = [ t[0] for t in self.cursor.description ] names.sort() self.assertEquals(names, [ "abc", "def" ]) # Put it back so other tests don't fail. pyodbc.lowercase = False def test_row_description(self): """ Ensure Cursor.description is accessible as Row.cursor_description. """ self.cursor = self.cnxn.cursor() self.cursor.execute("create table t1(a int, b char(3))") self.cnxn.commit() self.cursor.execute("insert into t1 values(1, 'abc')") row = self.cursor.execute("select * from t1").fetchone() self.assertEquals(self.cursor.description, row.cursor_description) def test_temp_select(self): # A project was failing to create temporary tables via select into. self.cursor.execute("create table t1(s char(7))") self.cursor.execute("insert into t1 values(?)", "testing") v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), str) self.assertEqual(v, "testing") self.cursor.execute("select s into t2 from t1") v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), str) self.assertEqual(v, "testing") def test_money(self): d = Decimal('123456.78') self.cursor.execute("create table t1(i int identity(1,1), m money)") self.cursor.execute("insert into t1(m) values (?)", d) v = self.cursor.execute("select m from t1").fetchone()[0] self.assertEqual(v, d) def test_executemany(self): self.cursor.execute("create table t1(a int, b varchar(10))") params = [ (i, str(i)) for i in range(1, 6) ] self.cursor.executemany("insert into t1(a, b) values (?,?)", params) count = self.cursor.execute("select count(*) from t1").fetchone()[0] self.assertEqual(count, len(params)) self.cursor.execute("select a, b from t1 order by a") rows = self.cursor.fetchall() self.assertEqual(count, len(rows)) for param, row in zip(params, rows): self.assertEqual(param[0], row[0]) self.assertEqual(param[1], row[1]) def test_executemany_one(self): "Pass executemany a single sequence" self.cursor.execute("create table t1(a int, b varchar(10))") params = [ (1, "test") ] self.cursor.executemany("insert into t1(a, b) values (?,?)", params) count = self.cursor.execute("select count(*) from t1").fetchone()[0] self.assertEqual(count, len(params)) self.cursor.execute("select a, b from t1 order by a") rows = self.cursor.fetchall() self.assertEqual(count, len(rows)) for param, row in zip(params, rows): self.assertEqual(param[0], row[0]) self.assertEqual(param[1], row[1]) def test_executemany_failure(self): """ Ensure that an exception is raised if one query in an executemany fails. """ self.cursor.execute("create table t1(a int, b varchar(10))") params = [ (1, 'good'), ('error', 'not an int'), (3, 'good') ] self.failUnlessRaises(pyodbc.Error, self.cursor.executemany, "insert into t1(a, b) value (?, ?)", params) def test_row_slicing(self): self.cursor.execute("create table t1(a int, b int, c int, d int)"); self.cursor.execute("insert into t1 values(1,2,3,4)") row = self.cursor.execute("select * from t1").fetchone() result = row[:] self.failUnless(result is row) result = row[:-1] self.assertEqual(result, (1,2,3)) result = row[0:4] self.failUnless(result is row) def test_row_repr(self): self.cursor.execute("create table t1(a int, b int, c int, d int)"); self.cursor.execute("insert into t1 values(1,2,3,4)") row = self.cursor.execute("select * from t1").fetchone() result = str(row) self.assertEqual(result, "(1, 2, 3, 4)") result = str(row[:-1]) self.assertEqual(result, "(1, 2, 3)") result = str(row[:1]) self.assertEqual(result, "(1,)") def test_concatenation(self): v2 = '0123456789' * 30 v3 = '9876543210' * 30 self.cursor.execute("create table t1(c1 int identity(1, 1), c2 varchar(300), c3 varchar(300))") self.cursor.execute("insert into t1(c2, c3) values (?,?)", v2, v3) row = self.cursor.execute("select c2, c3, c2 + c3 as both from t1").fetchone() self.assertEqual(row.both, v2 + v3) def test_view_select(self): # Reported in forum: Can't select from a view? I think I do this a lot, but another test never hurts. # Create a table (t1) with 3 rows and a view (t2) into it. self.cursor.execute("create table t1(c1 int identity(1, 1), c2 varchar(50))") for i in range(3): self.cursor.execute("insert into t1(c2) values (?)", "string%s" % i) self.cursor.execute("create view t2 as select * from t1") # Select from the view self.cursor.execute("select * from t2") rows = self.cursor.fetchall() self.assert_(rows is not None) self.assert_(len(rows) == 3) def test_autocommit(self): self.assertEqual(self.cnxn.autocommit, False) othercnxn = pyodbc.connect(self.connection_string, autocommit=True) self.assertEqual(othercnxn.autocommit, True) othercnxn.autocommit = False self.assertEqual(othercnxn.autocommit, False) def test_cursorcommit(self): "Ensure cursor.commit works" othercnxn = pyodbc.connect(self.connection_string) othercursor = othercnxn.cursor() othercnxn = None othercursor.execute("create table t1(s varchar(20))") othercursor.execute("insert into t1 values(?)", 'test') othercursor.commit() value = self.cursor.execute("select s from t1").fetchone()[0] self.assertEqual(value, 'test') def test_unicode_results(self): "Ensure unicode_results forces Unicode" othercnxn = pyodbc.connect(self.connection_string, unicode_results=True) othercursor = othercnxn.cursor() # ANSI data in an ANSI column ... othercursor.execute("create table t1(s varchar(20))") othercursor.execute("insert into t1 values(?)", 'test') # ... should be returned as Unicode value = othercursor.execute("select s from t1").fetchone()[0] self.assertEqual(value, u'test') def test_sqlserver_callproc(self): try: self.cursor.execute("drop procedure pyodbctest") self.cnxn.commit() except: pass self.cursor.execute("create table t1(s varchar(10))") self.cursor.execute("insert into t1 values(?)", "testing") self.cursor.execute(""" create procedure pyodbctest @var1 varchar(32) as begin select s from t1 return end """) self.cnxn.commit() # for row in self.cursor.procedureColumns('pyodbctest'): # print row.procedure_name, row.column_name, row.column_type, row.type_name self.cursor.execute("exec pyodbctest 'hi'") # print self.cursor.description # for row in self.cursor: # print row.s def test_skip(self): # Insert 1, 2, and 3. Fetch 1, skip 2, fetch 3. self.cursor.execute("create table t1(id int)"); for i in range(1, 5): self.cursor.execute("insert into t1 values(?)", i) self.cursor.execute("select id from t1 order by id") self.assertEqual(self.cursor.fetchone()[0], 1) self.cursor.skip(2) self.assertEqual(self.cursor.fetchone()[0], 4) def test_timeout(self): self.assertEqual(self.cnxn.timeout, 0) # defaults to zero (off) self.cnxn.timeout = 30 self.assertEqual(self.cnxn.timeout, 30) self.cnxn.timeout = 0 self.assertEqual(self.cnxn.timeout, 0) def test_sets_execute(self): # Only lists and tuples are allowed. def f(): self.cursor.execute("create table t1 (word varchar (100))") words = set (['a']) self.cursor.execute("insert into t1 (word) VALUES (?)", [words]) self.assertRaises(pyodbc.ProgrammingError, f) def test_sets_executemany(self): # Only lists and tuples are allowed. def f(): self.cursor.execute("create table t1 (word varchar (100))") words = set (['a']) self.cursor.executemany("insert into t1 (word) values (?)", [words]) self.assertRaises(TypeError, f) def test_row_execute(self): "Ensure we can use a Row object as a parameter to execute" self.cursor.execute("create table t1(n int, s varchar(10))") self.cursor.execute("insert into t1 values (1, 'a')") row = self.cursor.execute("select n, s from t1").fetchone() self.assertNotEqual(row, None) self.cursor.execute("create table t2(n int, s varchar(10))") self.cursor.execute("insert into t2 values (?, ?)", row) def test_row_executemany(self): "Ensure we can use a Row object as a parameter to executemany" self.cursor.execute("create table t1(n int, s varchar(10))") for i in range(3): self.cursor.execute("insert into t1 values (?, ?)", i, chr(ord('a')+i)) rows = self.cursor.execute("select n, s from t1").fetchall() self.assertNotEqual(len(rows), 0) self.cursor.execute("create table t2(n int, s varchar(10))") self.cursor.executemany("insert into t2 values (?, ?)", rows) def test_description(self): "Ensure cursor.description is correct" self.cursor.execute("create table t1(n int, s varchar(8), d decimal(5,2))") self.cursor.execute("insert into t1 values (1, 'abc', '1.23')") self.cursor.execute("select * from t1") # (I'm not sure the precision of an int is constant across different versions, bits, so I'm hand checking the # items I do know. # int t = self.cursor.description[0] self.assertEqual(t[0], 'n') self.assertEqual(t[1], int) self.assertEqual(t[5], 0) # scale self.assertEqual(t[6], True) # nullable # varchar(8) t = self.cursor.description[1] self.assertEqual(t[0], 's') self.assertEqual(t[1], str) self.assertEqual(t[4], 8) # precision self.assertEqual(t[5], 0) # scale self.assertEqual(t[6], True) # nullable # decimal(5, 2) t = self.cursor.description[2] self.assertEqual(t[0], 'd') self.assertEqual(t[1], Decimal) self.assertEqual(t[4], 5) # precision self.assertEqual(t[5], 2) # scale self.assertEqual(t[6], True) # nullable def test_none_param(self): "Ensure None can be used for params other than the first" # Some driver/db versions would fail if NULL was not the first parameter because SQLDescribeParam (only used # with NULL) could not be used after the first call to SQLBindParameter. This means None always worked for the # first column, but did not work for later columns. # # If SQLDescribeParam doesn't work, pyodbc would use VARCHAR which almost always worked. However, # binary/varbinary won't allow an implicit conversion. self.cursor.execute("create table t1(n int, blob varbinary(max))") self.cursor.execute("insert into t1 values (1, newid())") row = self.cursor.execute("select * from t1").fetchone() self.assertEqual(row.n, 1) self.assertEqual(type(row.blob), bytearray) self.cursor.execute("update t1 set n=?, blob=?", 2, None) row = self.cursor.execute("select * from t1").fetchone() self.assertEqual(row.n, 2) self.assertEqual(row.blob, None) def test_output_conversion(self): def convert(value): # `value` will be a string. We'll simply add an X at the beginning at the end. return 'X' + value + 'X' self.cnxn.add_output_converter(pyodbc.SQL_VARCHAR, convert) self.cursor.execute("create table t1(n int, v varchar(10))") self.cursor.execute("insert into t1 values (1, '123.45')") value = self.cursor.execute("select v from t1").fetchone()[0] self.assertEqual(value, 'X123.45X') # Now clear the conversions and try again. There should be no Xs this time. self.cnxn.clear_output_converters() value = self.cursor.execute("select v from t1").fetchone()[0] self.assertEqual(value, '123.45') def test_too_large(self): """Ensure error raised if insert fails due to truncation""" value = 'x' * 1000 self.cursor.execute("create table t1(s varchar(800))") def test(): self.cursor.execute("insert into t1 values (?)", value) self.assertRaises(pyodbc.DataError, test) def test_geometry_null_insert(self): def convert(value): return value self.cnxn.add_output_converter(-151, convert) # -151 is SQL Server's geometry self.cursor.execute("create table t1(n int, v geometry)") self.cursor.execute("insert into t1 values (?, ?)", 1, None) value = self.cursor.execute("select v from t1").fetchone()[0] self.assertEqual(value, None) self.cnxn.clear_output_converters() def test_login_timeout(self): # This can only test setting since there isn't a way to cause it to block on the server side. cnxns = pyodbc.connect(self.connection_string, timeout=2) def test_row_equal(self): self.cursor.execute("create table t1(n int, s varchar(20))") self.cursor.execute("insert into t1 values (1, 'test')") row1 = self.cursor.execute("select n, s from t1").fetchone() row2 = self.cursor.execute("select n, s from t1").fetchone() b = (row1 == row2) self.assertEqual(b, True) def test_row_gtlt(self): self.cursor.execute("create table t1(n int, s varchar(20))") self.cursor.execute("insert into t1 values (1, 'test1')") self.cursor.execute("insert into t1 values (1, 'test2')") rows = self.cursor.execute("select n, s from t1 order by s").fetchall() self.assert_(rows[0] < rows[1]) self.assert_(rows[0] <= rows[1]) self.assert_(rows[1] > rows[0]) self.assert_(rows[1] >= rows[0]) self.assert_(rows[0] != rows[1]) rows = list(rows) rows.sort() # uses < def test_context_manager_success(self): """ Ensure a successful with statement causes a commit. """ self.cursor.execute("create table t1(n int)") self.cnxn.commit() with pyodbc.connect(self.connection_string) as cnxn: cursor = cnxn.cursor() cursor.execute("insert into t1 values (1)") cnxn = None cursor = None rows = self.cursor.execute("select n from t1").fetchall() self.assertEquals(len(rows), 1) self.assertEquals(rows[0][0], 1) def test_context_manager_fail(self): """ Ensure an exception in a with statement causes a rollback. """ self.cursor.execute("create table t1(n int)") self.cnxn.commit() try: with pyodbc.connect(self.connection_string) as cnxn: cursor = cnxn.cursor() cursor.execute("insert into t1 values (1)") raise Exception("Testing failure") except Exception: pass cnxn = None cursor = None count = self.cursor.execute("select count(*) from t1").fetchone()[0] self.assertEquals(count, 0) def test_cursor_context_manager_success(self): """ Ensure a successful with statement using a cursor causes a commit. """ self.cursor.execute("create table t1(n int)") self.cnxn.commit() with pyodbc.connect(self.connection_string).cursor() as cursor: cursor.execute("insert into t1 values (1)") cursor = None rows = self.cursor.execute("select n from t1").fetchall() self.assertEquals(len(rows), 1) self.assertEquals(rows[0][0], 1) def test_cursor_context_manager_fail(self): """ Ensure an exception in a with statement using a cursor causes a rollback. """ self.cursor.execute("create table t1(n int)") self.cnxn.commit() try: with pyodbc.connect(self.connection_string).cursor() as cursor: cursor.execute("insert into t1 values (1)") raise Exception("Testing failure") except Exception: pass cursor = None count = self.cursor.execute("select count(*) from t1").fetchone()[0] self.assertEquals(count, 0) def test_untyped_none(self): # From issue 129 value = self.cursor.execute("select ?", None).fetchone()[0] self.assertEqual(value, None) def test_large_update_nodata(self): self.cursor.execute('create table t1(a varbinary(max))') hundredkb = bytearray('x'*100*1024) self.cursor.execute('update t1 set a=? where 1=0', (hundredkb,)) def test_func_param(self): self.cursor.execute(''' create function func1 (@testparam varchar(4)) returns @rettest table (param varchar(4)) as begin insert @rettest select @testparam return end ''') self.cnxn.commit() value = self.cursor.execute("select * from func1(?)", 'test').fetchone()[0] self.assertEquals(value, 'test') def test_no_fetch(self): # Issue 89 with FreeTDS: Multiple selects (or catalog functions that issue selects) without fetches seem to # confuse the driver. self.cursor.execute('select 1') self.cursor.execute('select 1') self.cursor.execute('select 1') def test_drivers(self): drivers = pyodbc.drivers() self.assertEqual(list, type(drivers)) self.assert_(len(drivers) > 1) m = re.search('DRIVER={([^}]+)}', self.connection_string, re.IGNORECASE) current = m.group(1) self.assert_(current in drivers) def test_prepare_cleanup(self): # When statement is prepared, it is kept in case the next execute uses the same statement. This must be # removed when a non-execute statement is used that returns results, such as SQLTables. self.cursor.execute("select top 1 name from sysobjects where name = ?", "bogus") self.cursor.fetchone() self.cursor.tables("bogus") self.cursor.execute("select top 1 name from sysobjects where name = ?", "bogus") self.cursor.fetchone() def main(): from optparse import OptionParser parser = OptionParser(usage=usage) parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)") parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items") parser.add_option("-t", "--test", help="Run only the named test") (options, args) = parser.parse_args() if len(args) > 1: parser.error('Only one argument is allowed. Do you need quotes around the connection string?') if not args: connection_string = load_setup_connection_string('sqlservertests') if not connection_string: parser.print_help() raise SystemExit() else: connection_string = args[0] cnxn = pyodbc.connect(connection_string) print_library_info(cnxn) cnxn.close() suite = load_tests(SqlServerTestCase, options.test, connection_string) testRunner = unittest.TextTestRunner(verbosity=options.verbose) result = testRunner.run(suite) if __name__ == '__main__': # Add the build directory to the path so we're testing the latest build, not the installed version. add_to_path() import pyodbc main()
FlipperPA/pyodbc
tests2/sqlservertests.py
Python
mit
54,094
0.003605
#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import glob import os import sys import ah_bootstrap from setuptools import setup #A dirty hack to get around some early import/configurations ambiguities if sys.version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True from astropy_helpers.setup_helpers import (register_commands, adjust_compiler, get_debug_option, get_package_info, add_command_option) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg from distutils import config conf = config.ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'packagename') DESCRIPTION = metadata.get('description', 'Astropy affiliated package') AUTHOR = metadata.get('author', '') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', 'http://astropy.org') # Get the long description from the package's docstring #__import__(PACKAGENAME) #package = sys.modules[PACKAGENAME] LONG_DESCRIPTION = "" #package.__doc__ # Store the package name in a built-in variable so it's easy # to get from other parts of the setup infrastructure builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME # VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) VERSION = '1.5.dev' # Indicates if this version is a release version RELEASE = 'dev' not in VERSION if not RELEASE: VERSION += get_git_devstr(False) # Populate the dict of setup command overrides; this should be done before # invoking any other functionality from distutils since it can potentially # modify distutils' behavior. cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE) add_command_option('install', 'with-openmp', 'compile TARDIS without OpenMP', is_bool=True) add_command_option('build', 'with-openmp', 'compile TARDIS without OpenMP', is_bool=True) add_command_option('develop', 'with-openmp', 'compile TARDIS without OpenMP', is_bool=True) # Adjust the compiler in case the default on this platform is to use a # broken one. adjust_compiler(PACKAGENAME) # Freeze build information in version.py generate_version_py(PACKAGENAME, VERSION, RELEASE, get_debug_option(PACKAGENAME)) # Treat everything in scripts except README.rst as a script to be installed scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) if os.path.basename(fname) != 'README.rst'] # Get configuration information from all of the various subpackages. # See the docstring for setup_helpers.update_package_files for more # details. package_info = get_package_info() # Add the project-global data package_info['package_data'].setdefault(PACKAGENAME, []) package_info['package_data'][PACKAGENAME].append('data/*') # Define entry points for command-line scripts entry_points = {} for hook in [('prereleaser', 'middle'), ('releaser', 'middle'), ('postreleaser', 'before'), ('postreleaser', 'middle')]: hook_ep = 'zest.releaser.' + '.'.join(hook) hook_name = 'astropy.release.' + '.'.join(hook) hook_func = 'astropy.utils.release:' + '_'.join(hook) entry_points[hook_ep] = ['%s = %s' % (hook_name, hook_func)] # Include all .c files, recursively, including those generated by # Cython, since we can not do this in MANIFEST.in with a "dynamic" # directory name. c_files = [] for root, dirs, files in os.walk(PACKAGENAME): for filename in files: if filename.endswith('.c'): c_files.append( os.path.join( os.path.relpath(root, PACKAGENAME), filename)) package_info['package_data'][PACKAGENAME].extend(c_files) setup(name=PACKAGENAME + '-sn', version=VERSION, description=DESCRIPTION, scripts=scripts, requires=['astropy'], install_requires=['astropy'], provides=[PACKAGENAME], author=AUTHOR, author_email=AUTHOR_EMAIL, license=LICENSE, url=URL, long_description=LONG_DESCRIPTION, cmdclass=cmdclassd, zip_safe=False, use_2to3=True, entry_points=entry_points, **package_info )
utkbansal/tardis
setup.py
Python
bsd-3-clause
4,442
0.002701
np.identity(3)
jorisvandenbossche/DS-python-data-analysis
notebooks/python_recap/_solutions/05-numpy35.py
Python
bsd-3-clause
14
0.071429
import os import glob from pathlib import Path import flask import yaml class VersionManager(object): def __init__(self, versions=None): self._versions = versions @property def versions(self): if self._versions is None: version_paths = Path('versions').glob('**/*.yaml') version_files = map(str, version_paths) versions = map(Version.fromfile, version_files) versions = sorted(versions, key=lambda v: v.version) self._versions = list(versions) return self._versions def all(self): return self def filter(self, version=None, pre_release=None, snapshots=None, platform=None): versions = self.versions if version: versions = [v for v in versions if v.version == version] if pre_release is True: versions = [v for v in versions if v.is_pre_release] if pre_release is False: versions = [v for v in versions if not v.is_pre_release] if snapshots is True: versions = [v for v in versions if v.is_snapshot] if snapshots is False: versions = [v for v in versions if not v.is_snapshot] if platform: versions = [v for v in versions if v.supports_platform(platform)] return VersionManager(versions) def get(self, **kwargs): if kwargs: versions = self.filter(**kwargs) return versions.get() if len(self.versions) == 1: return self.versions[0] raise flask.abort(404) class Version(object): objects = VersionManager() @classmethod def fromfile(cls, path): version = os.path.splitext(os.path.basename(path))[0] with open(path) as fp: content = yaml.safe_load(fp.read()) binaries = {} for (key, value) in content['binaries'].items(): # convert between old and new schema if isinstance(value, str): binaries[key] = { 'x86_64': value, } else: binaries[key] = value if 'version' in content: version = content['version'] return cls(version, binaries) def __init__(self, version, binaries): self.version = version self.binaries = binaries def __str__(self): return self.version def __eq__(self, other): if isinstance(other, Version): return self.version == other.version and self.binaries == other.binaries return False @property def is_pre_release(self): return '-' in self.version @property def is_snapshot(self): return 'SNAPSHOT' in self.version def supports_platform(self, platform): """ Returns if the version has a binary release for the given platform. """ return platform in self.binaries.keys() @property def path(self): if self.version.startswith('DEVELOPMENT-SNAPSHOT-'): version = self.version[len('DEVELOPMENT-SNAPSHOT-'):] (year, month, rest) = version.split('-', 2) return os.path.join('versions', 'DEVELOPMENT-SNAPSHOT', year, month, '{}.yaml'.format(rest)) if '-' in self.version: version, rest = self.version.split('-', 1) else: version = self.version rest = None major = version.split('.', 1)[0] if rest: if rest.startswith('DEVELOPMENT-SNAPSHOT-'): rest = rest[len('DEVELOPMENT-SNAPSHOT-'):] return os.path.join('versions', major, '{}-DEVELOPMENT-SNAPSHOT'.format(version), '{}.yaml'.format(rest)) return os.path.join('versions', major, '{}.yaml'.format(self.version)) def save(self): path = Path(os.path.split(self.path)[0]) path.mkdir(parents=True, exist_ok=True) with open(self.path, 'w') as fp: yaml.dump({'version': self.version, 'binaries': self.binaries}, fp, default_flow_style=False)
kylef/swiftenv-api
versions.py
Python
bsd-2-clause
4,108
0.001217
""" A library of useful helper classes to the saxlib classes, for the convenience of application and driver writers. $Id: saxutils.py,v 1.19 2001/03/20 07:19:46 loewis Exp $ """ import types, sys, urllib, urlparse, os, string import handler, _exceptions, xmlreader try: _StringTypes = [types.StringType, types.UnicodeType] except AttributeError: # 1.5 compatibility:UnicodeType not defined _StringTypes = [types.StringType] def escape(data, entities={}): """Escape &, <, and > in a string of data. You can escape other strings of data by passing a dictionary as the optional entities parameter. The keys and values must all be strings; each key will be replaced with its corresponding value. """ data = string.replace(data, "&", "&amp;") data = string.replace(data, "<", "&lt;") data = string.replace(data, ">", "&gt;") for chars, entity in entities.items(): data = string.replace(data, chars, entity) return data # --- DefaultHandler class DefaultHandler(handler.EntityResolver, handler.DTDHandler, handler.ContentHandler, handler.ErrorHandler): """Default base class for SAX2 event handlers. Implements empty methods for all callback methods, which can be overridden by application implementors. Replaces the deprecated SAX1 HandlerBase class.""" # --- Location class Location: """Represents a location in an XML entity. Initialized by being passed a locator, from which it reads off the current location, which is then stored internally.""" def __init__(self, locator): self.__col = locator.getColumnNumber() self.__line = locator.getLineNumber() self.__pubid = locator.getPublicId() self.__sysid = locator.getSystemId() def getColumnNumber(self): return self.__col def getLineNumber(self): return self.__line def getPublicId(self): return self.__pubid def getSystemId(self): return self.__sysid # --- ErrorPrinter class ErrorPrinter: "A simple class that just prints error messages to standard out." def __init__(self, level=0, outfile=sys.stderr): self._level = level self._outfile = outfile def warning(self, exception): if self._level <= 0: self._outfile.write("WARNING in %s: %s\n" % (self.__getpos(exception), exception.getMessage())) def error(self, exception): if self._level <= 1: self._outfile.write("ERROR in %s: %s\n" % (self.__getpos(exception), exception.getMessage())) def fatalError(self, exception): if self._level <= 2: self._outfile.write("FATAL ERROR in %s: %s\n" % (self.__getpos(exception), exception.getMessage())) def __getpos(self, exception): if isinstance(exception, _exceptions.SAXParseException): return "%s:%s:%s" % (exception.getSystemId(), exception.getLineNumber(), exception.getColumnNumber()) else: return "<unknown>" # --- ErrorRaiser class ErrorRaiser: "A simple class that just raises the exceptions it is passed." def __init__(self, level = 0): self._level = level def error(self, exception): if self._level <= 1: raise exception def fatalError(self, exception): if self._level <= 2: raise exception def warning(self, exception): if self._level <= 0: raise exception # --- AttributesImpl now lives in xmlreader from xmlreader import AttributesImpl # --- XMLGenerator is the SAX2 ContentHandler for writing back XML try: import codecs def _outputwrapper(stream,encoding): writerclass = codecs.lookup(encoding)[3] return writerclass(stream) except ImportError: # 1.5 compatibility: fall back to do-nothing def _outputwrapper(stream,encoding): return stream class XMLGenerator(handler.ContentHandler): def __init__(self, out=None, encoding="iso-8859-1"): if out is None: import sys out = sys.stdout handler.ContentHandler.__init__(self) self._out = _outputwrapper(out,encoding) self._ns_contexts = [{}] # contains uri -> prefix dicts self._current_context = self._ns_contexts[-1] self._undeclared_ns_maps = [] self._encoding = encoding # ContentHandler methods def startDocument(self): self._out.write('<?xml version="1.0" encoding="%s"?>\n' % self._encoding) def startPrefixMapping(self, prefix, uri): self._ns_contexts.append(self._current_context.copy()) self._current_context[uri] = prefix self._undeclared_ns_maps.append((prefix, uri)) def endPrefixMapping(self, prefix): self._current_context = self._ns_contexts[-1] del self._ns_contexts[-1] def startElement(self, name, attrs): self._out.write('<' + name) for (name, value) in attrs.items(): self._out.write(' %s="%s"' % (name, escape(value))) self._out.write('>') def endElement(self, name): self._out.write('</%s>' % name) def startElementNS(self, name, qname, attrs): if name[0] is None: name = name[1] elif self._current_context[name[0]] is None: # default namespace name = name[1] else: name = self._current_context[name[0]] + ":" + name[1] self._out.write('<' + name) for k,v in self._undeclared_ns_maps: if k is None: self._out.write(' xmlns="%s"' % v) else: self._out.write(' xmlns:%s="%s"' % (k,v)) self._undeclared_ns_maps = [] for (name, value) in attrs.items(): name = self._current_context[name[0]] + ":" + name[1] self._out.write(' %s="%s"' % (name, escape(value))) self._out.write('>') def endElementNS(self, name, qname): # XXX: if qname is not None, we better use it. # Python 2.0b2 requires us to use the recorded prefix for # name[0], though if name[0] is None: qname = name[1] elif self._current_context[name[0]] is None: qname = name[1] else: qname = self._current_context[name[0]] + ":" + name[1] self._out.write('</%s>' % qname) def characters(self, content): self._out.write(escape(content)) def ignorableWhitespace(self, content): self._out.write(content) def processingInstruction(self, target, data): self._out.write('<?%s %s?>' % (target, data)) # --- ContentGenerator is the SAX1 DocumentHandler for writing back XML class ContentGenerator(XMLGenerator): def characters(self, str, start, end): # In SAX1, characters receives start and end; in SAX2, it receives # a string. For plain strings, we may want to use a buffer object. return XMLGenerator.characters(self, str[start:start+end]) # --- XMLFilterImpl class XMLFilterBase(xmlreader.XMLReader): """This class is designed to sit between an XMLReader and the client application's event handlers. By default, it does nothing but pass requests up to the reader and events on to the handlers unmodified, but subclasses can override specific methods to modify the event stream or the configuration requests as they pass through.""" # ErrorHandler methods def error(self, exception): self._err_handler.error(exception) def fatalError(self, exception): self._err_handler.fatalError(exception) def warning(self, exception): self._err_handler.warning(exception) # ContentHandler methods def setDocumentLocator(self, locator): self._cont_handler.setDocumentLocator(locator) def startDocument(self): self._cont_handler.startDocument() def endDocument(self): self._cont_handler.endDocument() def startPrefixMapping(self, prefix, uri): self._cont_handler.startPrefixMapping(prefix, uri) def endPrefixMapping(self, prefix): self._cont_handler.endPrefixMapping(prefix) def startElement(self, name, attrs): self._cont_handler.startElement(name, attrs) def endElement(self, name): self._cont_handler.endElement(name) def startElementNS(self, name, qname, attrs): self._cont_handler.startElementNS(name, qname, attrs) def endElementNS(self, name, qname): self._cont_handler.endElementNS(name, qname) def characters(self, content): self._cont_handler.characters(content) def ignorableWhitespace(self, chars): self._cont_handler.ignorableWhitespace(chars) def processingInstruction(self, target, data): self._cont_handler.processingInstruction(target, data) def skippedEntity(self, name): self._cont_handler.skippedEntity(name) # DTDHandler methods def notationDecl(self, name, publicId, systemId): self._dtd_handler.notationDecl(name, publicId, systemId) def unparsedEntityDecl(self, name, publicId, systemId, ndata): self._dtd_handler.unparsedEntityDecl(name, publicId, systemId, ndata) # EntityResolver methods def resolveEntity(self, publicId, systemId): self._ent_handler.resolveEntity(publicId, systemId) # XMLReader methods def parse(self, source): self._parent.setContentHandler(self) self._parent.setErrorHandler(self) self._parent.setEntityResolver(self) self._parent.setDTDHandler(self) self._parent.parse(source) def setLocale(self, locale): self._parent.setLocale(locale) def getFeature(self, name): return self._parent.getFeature(name) def setFeature(self, name, state): self._parent.setFeature(name, state) def getProperty(self, name): return self._parent.getProperty(name) def setProperty(self, name, value): self._parent.setProperty(name, value) # FIXME: remove this backward compatibility hack when not needed anymore XMLFilterImpl = XMLFilterBase # --- BaseIncrementalParser class BaseIncrementalParser(xmlreader.IncrementalParser): """This class implements the parse method of the XMLReader interface using the feed, close and reset methods of the IncrementalParser interface as a convenience to SAX 2.0 driver writers.""" def parse(self, source): source = prepare_input_source(source) self.prepareParser(source) self._cont_handler.startDocument() # FIXME: what about char-stream? inf = source.getByteStream() buffer = inf.read(16384) while buffer != "": self.feed(buffer) buffer = inf.read(16384) self.close() self.reset() self._cont_handler.endDocument() def prepareParser(self, source): """This method is called by the parse implementation to allow the SAX 2.0 driver to prepare itself for parsing.""" raise NotImplementedError("prepareParser must be overridden!") # --- Utility functions def prepare_input_source(source, base = ""): """This function takes an InputSource and an optional base URL and returns a fully resolved InputSource object ready for reading.""" if type(source) in _StringTypes: source = xmlreader.InputSource(source) elif hasattr(source, "read"): f = source source = xmlreader.InputSource() source.setByteStream(f) if hasattr(f, "name"): source.setSystemId(f.name) if source.getByteStream() is None: sysid = source.getSystemId() if os.path.isfile(sysid): basehead = os.path.split(os.path.normpath(base))[0] source.setSystemId(os.path.join(basehead, sysid)) f = open(sysid, "rb") else: source.setSystemId(urlparse.urljoin(base, sysid)) f = urllib.urlopen(source.getSystemId()) source.setByteStream(f) return source # =========================================================================== # # DEPRECATED SAX 1.0 CLASSES # # =========================================================================== # --- AttributeMap class AttributeMap: """An implementation of AttributeList that takes an (attr,val) hash and uses it to implement the AttributeList interface.""" def __init__(self, map): self.map=map def getLength(self): return len(self.map.keys()) def getName(self, i): try: return self.map.keys()[i] except IndexError,e: return None def getType(self, i): return "CDATA" def getValue(self, i): try: if type(i)==types.IntType: return self.map[self.getName(i)] else: return self.map[i] except KeyError,e: return None def __len__(self): return len(self.map) def __getitem__(self, key): if type(key)==types.IntType: return self.map.keys()[key] else: return self.map[key] def items(self): return self.map.items() def keys(self): return self.map.keys() def has_key(self,key): return self.map.has_key(key) def get(self, key, alternative=None): return self.map.get(key, alternative) def copy(self): return AttributeMap(self.map.copy()) def values(self): return self.map.values() # --- Event broadcasting object class EventBroadcaster: """Takes a list of objects and forwards any method calls received to all objects in the list. The attribute list holds the list and can freely be modified by clients.""" class Event: "Helper objects that represent event methods." def __init__(self,list,name): self.list=list self.name=name def __call__(self,*rest): for obj in self.list: apply(getattr(obj,self.name), rest) def __init__(self,list): self.list=list def __getattr__(self,name): return self.Event(self.list,name) def __repr__(self): return "<EventBroadcaster instance at %d>" % id(self) # --- ESIS document handler import saxlib class ESISDocHandler(saxlib.HandlerBase): "A SAX document handler that produces naive ESIS output." def __init__(self,writer=sys.stdout): self.writer=writer def processingInstruction (self,target, remainder): """Receive an event signalling that a processing instruction has been found.""" self.writer.write("?"+target+" "+remainder+"\n") def startElement(self,name,amap): "Receive an event signalling the start of an element." self.writer.write("("+name+"\n") for a_name in amap.keys(): self.writer.write("A"+a_name+" "+amap[a_name]+"\n") def endElement(self,name): "Receive an event signalling the end of an element." self.writer.write(")"+name+"\n") def characters(self,data,start_ix,length): "Receive an event signalling that character data has been found." self.writer.write("-"+data[start_ix:start_ix+length]+"\n") # --- XML canonizer class Canonizer(saxlib.HandlerBase): "A SAX document handler that produces canonized XML output." def __init__(self,writer=sys.stdout): self.elem_level=0 self.writer=writer def processingInstruction (self,target, remainder): if not target=="xml": self.writer.write("<?"+target+" "+remainder+"?>") def startElement(self,name,amap): self.writer.write("<"+name) a_names=amap.keys() a_names.sort() for a_name in a_names: self.writer.write(" "+a_name+"=\"") self.write_data(amap[a_name]) self.writer.write("\"") self.writer.write(">") self.elem_level=self.elem_level+1 def endElement(self,name): self.writer.write("</"+name+">") self.elem_level=self.elem_level-1 def ignorableWhitespace(self,data,start_ix,length): self.characters(data,start_ix,length) def characters(self,data,start_ix,length): if self.elem_level>0: self.write_data(data[start_ix:start_ix+length]) def write_data(self,data): "Writes datachars to writer." data=string.replace(data,"&","&amp;") data=string.replace(data,"<","&lt;") data=string.replace(data,"\"","&quot;") data=string.replace(data,">","&gt;") data=string.replace(data,chr(9),"&#9;") data=string.replace(data,chr(10),"&#10;") data=string.replace(data,chr(13),"&#13;") self.writer.write(data) # --- mllib class mllib: """A re-implementation of the htmllib, sgmllib and xmllib interfaces as a SAX DocumentHandler.""" # Unsupported: # - setnomoretags # - setliteral # - translate_references # - handle_xml # - handle_doctype # - handle_charref # - handle_entityref # - handle_comment # - handle_cdata # - tag_attributes def __init__(self): self.reset() def reset(self): import saxexts # only used here self.parser=saxexts.XMLParserFactory.make_parser() self.handler=mllib.Handler(self.parser,self) self.handler.reset() def feed(self,data): self.parser.feed(data) def close(self): self.parser.close() def get_stack(self): return self.handler.get_stack() # --- Handler methods (to be overridden) def handle_starttag(self,name,method,atts): method(atts) def handle_endtag(self,name,method): method() def handle_data(self,data): pass def handle_proc(self,target,data): pass def unknown_starttag(self,name,atts): pass def unknown_endtag(self,name): pass def syntax_error(self,message): pass # --- The internal handler class class Handler(saxlib.DocumentHandler,saxlib.ErrorHandler): """An internal class to handle SAX events and translate them to mllib events.""" def __init__(self,driver,handler): self.driver=driver self.driver.setDocumentHandler(self) self.driver.setErrorHandler(self) self.handler=handler self.reset() def get_stack(self): return self.stack def reset(self): self.stack=[] # --- DocumentHandler methods def characters(self, ch, start, length): self.handler.handle_data(ch[start:start+length]) def endElement(self, name): if hasattr(self.handler,"end_"+name): self.handler.handle_endtag(name, getattr(self.handler,"end_"+name)) else: self.handler.unknown_endtag(name) del self.stack[-1] def ignorableWhitespace(self, ch, start, length): self.handler.handle_data(ch[start:start+length]) def processingInstruction(self, target, data): self.handler.handle_proc(target,data) def startElement(self, name, atts): self.stack.append(name) if hasattr(self.handler,"start_"+name): self.handler.handle_starttag(name, getattr(self.handler, "start_"+name), atts) else: self.handler.unknown_starttag(name,atts) # --- ErrorHandler methods def error(self, exception): self.handler.syntax_error(str(exception)) def fatalError(self, exception): raise RuntimeError(str(exception))
Integral-Technology-Solutions/ConfigNOW
Lib/xml/sax/saxutils.py
Python
mit
20,106
0.006864
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*- # vi: set ft=python sts=4 ts=4 sw=4 noet : # This file is part of Fail2Ban. # # Fail2Ban is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Fail2Ban is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Fail2Ban; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # Author: Cyril Jaquier # __author__ = "Cyril Jaquier" __copyright__ = "Copyright (c) 2004 Cyril Jaquier" __license__ = "GPL" import time import json from ..helpers import getLogger from .. import version # Gets the instance of the logger. logSys = getLogger(__name__) class Transmitter: ## # Constructor. # # @param The server reference def __init__(self, server): self.__server = server ## # Proceeds a command. # # Proceeds an incoming command. # @param command The incoming command def proceed(self, command): # Deserialize object logSys.debug("Command: " + `command`) try: ret = self.__commandHandler(command) ack = 0, ret except Exception, e: logSys.warning("Command %r has failed. Received %r" % (command, e)) ack = 1, e return ack ## # Handle an command. # # def __commandHandler(self, command): if command[0] == "ping": return "pong" elif command[0] == "add": name = command[1] if name == "all": raise Exception("Reserved name") try: backend = command[2] except IndexError: backend = "auto" self.__server.addJail(name, backend) return name elif command[0] == "start": name = command[1] self.__server.startJail(name) return None elif command[0] == "stop": if len(command) == 1: self.__server.quit() elif command[1] == "all": self.__server.stopAllJail() else: name = command[1] self.__server.stopJail(name) return None elif command[0] == "sleep": value = command[1] time.sleep(int(value)) return None elif command[0] == "flushlogs": return self.__server.flushLogs() elif command[0] == "set": return self.__commandSet(command[1:]) elif command[0] == "get": return self.__commandGet(command[1:]) elif command[0] == "status": return self.status(command[1:]) elif command[0] == "version": return version.version raise Exception("Invalid command") def __commandSet(self, command): name = command[0] # Logging if name == "loglevel": value = command[1] self.__server.setLogLevel(value) return self.__server.getLogLevel() elif name == "logtarget": value = command[1] if self.__server.setLogTarget(value): return self.__server.getLogTarget() else: raise Exception("Failed to change log target") #Database elif name == "dbfile": self.__server.setDatabase(command[1]) db = self.__server.getDatabase() if db is None: return None else: return db.filename elif name == "dbpurgeage": db = self.__server.getDatabase() if db is None: return None else: db.purgeage = command[1] return db.purgeage # Jail elif command[1] == "idle": if command[2] == "on": self.__server.setIdleJail(name, True) elif command[2] == "off": self.__server.setIdleJail(name, False) else: raise Exception("Invalid idle option, must be 'on' or 'off'") return self.__server.getIdleJail(name) # Filter elif command[1] == "addignoreip": value = command[2] self.__server.addIgnoreIP(name, value) return self.__server.getIgnoreIP(name) elif command[1] == "delignoreip": value = command[2] self.__server.delIgnoreIP(name, value) return self.__server.getIgnoreIP(name) elif command[1] == "ignorecommand": value = command[2] self.__server.setIgnoreCommand(name, value) return self.__server.getIgnoreCommand(name) elif command[1] == "addlogpath": value = command[2] tail = False if len(command) == 4: if command[3].lower() == "tail": tail = True elif command[3].lower() != "head": raise ValueError("File option must be 'head' or 'tail'") elif len(command) > 4: raise ValueError("Only one file can be added at a time") self.__server.addLogPath(name, value, tail) return self.__server.getLogPath(name) elif command[1] == "dellogpath": value = command[2] self.__server.delLogPath(name, value) return self.__server.getLogPath(name) elif command[1] == "logencoding": value = command[2] self.__server.setLogEncoding(name, value) return self.__server.getLogEncoding(name) elif command[1] == "addjournalmatch": # pragma: systemd no cover value = command[2:] self.__server.addJournalMatch(name, value) return self.__server.getJournalMatch(name) elif command[1] == "deljournalmatch": # pragma: systemd no cover value = command[2:] self.__server.delJournalMatch(name, value) return self.__server.getJournalMatch(name) elif command[1] == "addfailregex": value = command[2] self.__server.addFailRegex(name, value) return self.__server.getFailRegex(name) elif command[1] == "delfailregex": value = int(command[2]) self.__server.delFailRegex(name, value) return self.__server.getFailRegex(name) elif command[1] == "addignoreregex": value = command[2] self.__server.addIgnoreRegex(name, value) return self.__server.getIgnoreRegex(name) elif command[1] == "delignoreregex": value = int(command[2]) self.__server.delIgnoreRegex(name, value) return self.__server.getIgnoreRegex(name) elif command[1] == "usedns": value = command[2] self.__server.setUseDns(name, value) return self.__server.getUseDns(name) elif command[1] == "findtime": value = command[2] self.__server.setFindTime(name, int(value)) return self.__server.getFindTime(name) elif command[1] == "datepattern": value = command[2] self.__server.setDatePattern(name, value) return self.__server.getDatePattern(name) elif command[1] == "maxretry": value = command[2] self.__server.setMaxRetry(name, int(value)) return self.__server.getMaxRetry(name) elif command[1] == "maxlines": value = command[2] self.__server.setMaxLines(name, int(value)) return self.__server.getMaxLines(name) # command elif command[1] == "bantime": value = command[2] self.__server.setBanTime(name, int(value)) return self.__server.getBanTime(name) elif command[1] == "banip": value = command[2] return self.__server.setBanIP(name,value) elif command[1] == "unbanip": value = command[2] self.__server.setUnbanIP(name, value) return value elif command[1] == "addaction": args = [command[2]] if len(command) > 3: args.extend([command[3], json.loads(command[4])]) self.__server.addAction(name, *args) return args[0] elif command[1] == "delaction": value = command[2] self.__server.delAction(name, value) return None elif command[1] == "action": actionname = command[2] actionkey = command[3] action = self.__server.getAction(name, actionname) if callable(getattr(action, actionkey, None)): actionvalue = json.loads(command[4]) if len(command)>4 else {} return getattr(action, actionkey)(**actionvalue) else: actionvalue = command[4] setattr(action, actionkey, actionvalue) return getattr(action, actionkey) raise Exception("Invalid command (no set action or not yet implemented)") def __commandGet(self, command): name = command[0] # Logging if name == "loglevel": return self.__server.getLogLevel() elif name == "logtarget": return self.__server.getLogTarget() #Database elif name == "dbfile": db = self.__server.getDatabase() if db is None: return None else: return db.filename elif name == "dbpurgeage": db = self.__server.getDatabase() if db is None: return None else: return db.purgeage # Filter elif command[1] == "logpath": return self.__server.getLogPath(name) elif command[1] == "logencoding": return self.__server.getLogEncoding(name) elif command[1] == "journalmatch": # pragma: systemd no cover return self.__server.getJournalMatch(name) elif command[1] == "ignoreip": return self.__server.getIgnoreIP(name) elif command[1] == "ignorecommand": return self.__server.getIgnoreCommand(name) elif command[1] == "failregex": return self.__server.getFailRegex(name) elif command[1] == "ignoreregex": return self.__server.getIgnoreRegex(name) elif command[1] == "usedns": return self.__server.getUseDns(name) elif command[1] == "findtime": return self.__server.getFindTime(name) elif command[1] == "datepattern": return self.__server.getDatePattern(name) elif command[1] == "maxretry": return self.__server.getMaxRetry(name) elif command[1] == "maxlines": return self.__server.getMaxLines(name) # Action elif command[1] == "bantime": return self.__server.getBanTime(name) elif command[1] == "actions": return self.__server.getActions(name).keys() elif command[1] == "action": actionname = command[2] actionvalue = command[3] action = self.__server.getAction(name, actionname) return getattr(action, actionvalue) elif command[1] == "actionproperties": actionname = command[2] action = self.__server.getAction(name, actionname) return [ key for key in dir(action) if not key.startswith("_") and not callable(getattr(action, key))] elif command[1] == "actionmethods": actionname = command[2] action = self.__server.getAction(name, actionname) return [ key for key in dir(action) if not key.startswith("_") and callable(getattr(action, key))] raise Exception("Invalid command (no get action or not yet implemented)") def status(self, command): if len(command) == 0: return self.__server.status() elif len(command) == 1: name = command[0] return self.__server.statusJail(name) raise Exception("Invalid command (no status)")
TonyThompson/fail2ban-patch
fail2ban/server/transmitter.py
Python
gpl-2.0
10,359
0.03147
class ResourceManager: def __init__(self): self.resourceList = {} def addResource(self, name, resource): print "adding Resource", name self.resourceList[name] = resource print "resources:", self.resourceList def getResource(self, name): if not self.hasResource(name): return None return self.resourceList[name] def hasResource(self, name): return name in self.resourceList def removeResource(self, name): if self.hasResource(name): del self.resourceList[name] resourcemanager = ResourceManager()
Taapat/enigma2-openpli-vuplus
lib/python/Components/ResourceManager.py
Python
gpl-2.0
529
0.032136
# Copyright (C) 2009, Benjamin Berg, Sebastian Berg # Copyright (C) 2010, Walter Bender # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. import numpy as np class RingBuffer1d(object): """This class implements an array being written in as a ring and that can be read from continuously ending with the newest data or starting with the oldest. It returns a numpy array copy of the data; """ def __init__(self, length, dtype=None): """Initialize the 1 dimensional ring buffer with the given lengths. The initial values are all 0s """ self.offset = 0 self._data = np.zeros(length, dtype=dtype) self.stored = 0 def fill(self, number): self._data.fill(number) self.offset = 0 def append(self, data): """Append to the ring buffer (and overwrite old data). If len(data) is greater then the ring buffers length, the newest data takes precedence. """ data = np.asarray(data) if len(self._data) == 0: return if len(data) >= len(self._data): self._data[:] = data[-len(self._data):] self.offset = 0 self.stored = len(self._data) elif len(self._data) - self.offset >= len(data): self._data[self.offset: self.offset + len(data)] = data self.offset = self.offset + len(data) self.stored += len(data) else: self._data[self.offset:] = data[:len(self._data) - self.offset] self._data[:len(data) - (len(self._data) - self.offset)] = \ data[-len(data) + (len(self._data) - self.offset):] self.offset = len(data) - (len(self._data) - self.offset) self.stored += len(data) if len(self._data) <= self.stored: self.read = self._read def read(self, number=None, step=1): """Read the ring Buffer. Number can be positive or negative. Positive values will give the latest information, negative values will give the newest added information from the buffer. (in normal order) Before the buffer is filled once: This returns just None """ return np.array([]) def _read(self, number=None, step=1): """Read the ring Buffer. Number can be positive or negative. Positive values will give the latest information, negative values will give the newest added information from the buffer. (in normal order) """ if number == None: number = len(self._data) // step number *= step assert abs(number) <= len(self._data), \ 'Number to read*step must be smaller then length' if number < 0: if abs(number) <= self.offset: return self._data[self.offset + number:self.offset:step] spam = (self.offset - 1) % step return np.concatenate( (self._data[step - spam - 1 + self.offset + number::step], self._data[spam:self.offset:step])) if number - (len(self._data) - self.offset) > 0: spam = ((self.offset + number) - self.offset - 1) % step return np.concatenate( (self._data[self.offset:self.offset + number:step], self._data[spam:number - (len(self._data) - self.offset):step])) return self._data[self.offset:self.offset + number:step].copy()
walterbender/turtle3D
plugins/audio_sensors/ringbuffer.py
Python
mit
4,149
0.000241
# # MLDB-2100_fetcher_timeout_test.py # Francois-Michel L'Heureux, 2016-11-20 # This file is part of MLDB. Copyright 2016 mldb.ai inc. All rights reserved. # import socket import threading import time class MyThread(threading.Thread): def run(self): try: threading.Thread.run(self) except Exception as xxx_todo_changeme: self.err = xxx_todo_changeme pass else: self.err = None # timeout in case MLDB fails to connect to the socket, the test won't hang socket.setdefaulttimeout(10) from mldb import mldb serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serversocket.bind(('127.0.0.1', 0)) serversocket.listen(1) port_num = serversocket.getsockname()[1] keep_going = threading.Event() def sleeper(): while not keep_going.is_set(): time.sleep(1) def client_thread(clientsocket): return threading.Thread(target=sleeper) def mldb_test(): mldb.log("MLDB querying") res = mldb.query( "SELECT fetcher('http://localhost:{}/toto')".format(port_num)) assert res[1][2].find("Timeout was reached") != -1 mldb_thread = MyThread(target=mldb_test) mldb_thread.start() # accept connections from outside try: (clientsocket, address) = serversocket.accept() except socket.timeout: mldb.log("MLDB did not contact the socket") raise # now do something with the clientsocket # in this case, we'll pretend this is a threaded server ct = client_thread(clientsocket) ct.start() mldb_thread.join() keep_going.set() ct.join() if mldb_thread.err: raise mldb_thread.err request.set_return("success")
mldbai/mldb
testing/MLDB-2100_fetcher_timeout_test.py
Python
apache-2.0
1,628
0.003686
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.paging import Paged class PacketCaptureResultPaged(Paged): """ A paging container for iterating over a list of :class:`PacketCaptureResult <azure.mgmt.network.v2017_08_01.models.PacketCaptureResult>` object """ _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[PacketCaptureResult]'} } def __init__(self, *args, **kwargs): super(PacketCaptureResultPaged, self).__init__(*args, **kwargs)
lmazuel/azure-sdk-for-python
azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/packet_capture_result_paged.py
Python
mit
987
0.001013
# -*- coding: UTF-8 -*- from ..model.interfacebuilder import InterfaceBuilder from ..models import ITLog, ITStatement import interface_platform.settings as settings import logging.config import os from datetime import datetime # 运行接口 # 传入it_id和验证数据 class InterfaceRunner(object): def __init__(self, it_id): print "InterfaceRunner.__init__()" self._it = ITStatement.objects.get(id=it_id) self._log_name = str(datetime.now()) + ".log" self.set_log_file(self._log_name) self._logger = logging.getLogger(settings.LOGGER_NAME) self._interface = None self._interfacebuilder = InterfaceBuilder(self._it, self._logger) @property def interfacebuilder(self): return self._interfacebuilder @property def logger(self): return self._logger @property def interface(self): return self._interface # 执行之后返回执行结果:失败、通过、未执行 def runner(self): print "InterfaceRunner.runner()" self._interface = self._interfacebuilder.build() self._interface.run() self._interface.validate() self._logger.info("接口执行状态: " + self._interface.status) self._logger.info("接口构建完成!") return self._interface.status # 该函数实现的功能:传递一个要记录日志的文件名作为参数,默认是settings.py里的LOGGING中filename # 1. 设置日志存储路径 # 2. 建立用例日志表或读取日志文件路径 def set_log_file(self, filename): print "InterfaceRunner.set_log_file()" # 日志存储路径 log_path = os.path.join(settings.LOG_ROOT, filename.decode("utf-8")) # 存储日志文件路径到数据库 # 有中文的话这里的路径使用不方便 logs = ITLog.objects.filter(name=filename) if logs.exists() is False: log = ITLog.objects.create(it=self._it, name=filename, log_path=log_path) log.save() else: # 删除文件重新创建 # 可能文件被占用的情况,删除失败 # remove_log(log_path) pass logging_dic = settings.LOGGING logging_dic['handlers']['eat']['filename'] = log_path logging.config.dictConfig(logging_dic) # 获取日志内容 # 打开后记得关闭文件 def get_log(self): print "InterfaceRunner.get_log()" log_path = os.path.join(settings.LOG_ROOT, self._log_name.decode('utf-8')) f_log = open(log_path) content = f_log.readlines() f_log.close() return content
longmazhanfeng/interface_web
interface_platform/management/interfacerunner.py
Python
mit
2,680
0.000856
# A file to contain exclusively dependencies of the NeuroML package. # See: # https://github.com/NeuralEnsemble/libNeuroML # http://neuroml.org from __future__ import print_function from collections import defaultdict try: from neuroml import Cell, Segment, SegmentParent, Morphology, \ NeuroMLDocument, Point3DWithDiam except ImportError: print("NeuroML module could not be loaded.") def neuroml_single_cell(skeleton_id, nodes, pre, post): """ Encapsulate a single skeleton into a NeuroML Cell instance. skeleton_id: the ID of the skeleton to which all nodes belong. nodes: a dictionary of node ID vs tuple of node parent ID, location as a tuple of 3 floats, and radius. In nanometers. pre: a dictionary of node ID vs list of connector ID post: a dictionary of node ID vs list of connector ID Returns a Cell with id=skeleton_id. """ # Collect the children of every node successors = defaultdict(list) # parent node ID vs list of children node IDs rootID = None for nodeID, props in nodes.iteritems(): parentID = props[0] if not parentID: rootID = nodeID continue successors[parentID].append(nodeID) # Cache of Point3DWithDiam points = {} def asPoint(nodeID): """ Return the node as a Point3DWithDiam, in micrometers. """ p = points.get(nodeID) if not p: props = nodes[nodeID] radius = props[2] if radius < 0: radius = 0.1 # FUTURE Will have to change loc = props[1] # Point in micrometers p = Point3DWithDiam(loc[0] / 1000.0, loc[1] / 1000.0, loc[2] / 1000.0, radius) points[nodeID] = p return p # Starting from the root node, iterate towards the end nodes, adding a segment # for each parent-child pair. segments = [] segment_id = 1 todo = [rootID] # VERY CONFUSINGLY, the Segment.parent is a SegmentParent with the same id as the parent Segment. An unseemly overheady way to reference the parent Segment. while todo: nodeID = todo.pop() children = successors[nodeID] if not children: continue p1 = asPoint(nodeID) parent = segments[-1] if segments else None segment_parent = SegmentParent(segments=parent.id) if parent else None for childID in children: p2 = asPoint(childID) segment_id += 1 segment = Segment(proximal=p1, distal=p2, parent=segment_parent) segment.id = segment_id segment.name = "%s-%s" % (nodeID, childID) segments.append(segment) todo.append(childID) # Pack the segments into a Cell morphology = Morphology() morphology.segments.extend(segments) morphology.id = "Skeleton #%s" % skeleton_id # Synapses: TODO requires input from Padraig Gleeson cell = Cell() cell.name = 'Cell' cell.id = skeleton_id cell.morphology = morphology return cell def neuroml_network(cells, response): """ Write a list of Cell instances. cells: a list of Cell instances. response: somewhere to write to, like an HttpResponse Returns nothing. """ doc = NeuroMLDocument() doc.cells.extend(cells) doc.id = "NeuroMLDocument" namespacedef = 'xmlns="http://www.neuroml.org/schema/neuroml2"' \ + ' xmlns:xi="http://www.w3.org/2001/XInclude"' \ + ' xmlns:xs="http://www.w3.org/2001/XMLSchema"' \ + ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"' \ + ' xsi:schemaLocation="http://www.w3.org/2001/XMLSchema"' doc.export( response, 0, name_="neuroml", namespacedef_=namespacedef) return response
dwitvliet/CATMAID
django/applications/catmaid/control/exportneuroml.py
Python
gpl-3.0
3,874
0.003356
from __future__ import unicode_literals import copy import inspect import sys import warnings from django.apps import apps from django.apps.config import MODELS_MODULE_NAME from django.conf import settings from django.core import checks from django.core.exceptions import (ObjectDoesNotExist, MultipleObjectsReturned, FieldError, ValidationError, NON_FIELD_ERRORS) from django.db import (router, connections, transaction, DatabaseError, DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY) from django.db.models.deletion import Collector from django.db.models.fields import AutoField, FieldDoesNotExist from django.db.models.fields.related import (ForeignObjectRel, ManyToOneRel, OneToOneField, add_lazy_relation) from django.db.models.manager import ensure_default_manager from django.db.models.options import Options from django.db.models.query import Q from django.db.models.query_utils import DeferredAttribute, deferred_class_factory from django.db.models import signals from django.utils import six from django.utils.deprecation import RemovedInDjango19Warning from django.utils.encoding import force_str, force_text from django.utils.functional import curry from django.utils.six.moves import zip from django.utils.text import get_text_list, capfirst from django.utils.translation import ugettext_lazy as _ from django.utils.version import get_version def subclass_exception(name, parents, module, attached_to=None): """ Create exception subclass. Used by ModelBase below. If 'attached_to' is supplied, the exception will be created in a way that allows it to be pickled, assuming the returned exception class will be added as an attribute to the 'attached_to' class. """ class_dict = {'__module__': module} if attached_to is not None: def __reduce__(self): # Exceptions are special - they've got state that isn't # in self.__dict__. We assume it is all in self.args. return (unpickle_inner_exception, (attached_to, name), self.args) def __setstate__(self, args): self.args = args class_dict['__reduce__'] = __reduce__ class_dict['__setstate__'] = __setstate__ return type(name, parents, class_dict) class ModelBase(type): """ Metaclass for all models. """ def __new__(cls, name, bases, attrs): super_new = super(ModelBase, cls).__new__ # Also ensure initialization is only performed for subclasses of Model # (excluding Model class itself). parents = [b for b in bases if isinstance(b, ModelBase)] if not parents: return super_new(cls, name, bases, attrs) # Create the class. module = attrs.pop('__module__') new_class = super_new(cls, name, bases, {'__module__': module}) attr_meta = attrs.pop('Meta', None) abstract = getattr(attr_meta, 'abstract', False) if not attr_meta: meta = getattr(new_class, 'Meta', None) else: meta = attr_meta base_meta = getattr(new_class, '_meta', None) # Look for an application configuration to attach the model to. app_config = apps.get_containing_app_config(module) if getattr(meta, 'app_label', None) is None: if app_config is None: # If the model is imported before the configuration for its # application is created (#21719), or isn't in an installed # application (#21680), use the legacy logic to figure out the # app_label by looking one level up from the package or module # named 'models'. If no such package or module exists, fall # back to looking one level up from the module this model is # defined in. # For 'django.contrib.sites.models', this would be 'sites'. # For 'geo.models.places' this would be 'geo'. msg = ( "Model class %s.%s doesn't declare an explicit app_label " "and either isn't in an application in INSTALLED_APPS or " "else was imported before its application was loaded. " "This will no longer be supported in Django 1.9." % (module, name)) if not abstract: warnings.warn(msg, RemovedInDjango19Warning, stacklevel=2) model_module = sys.modules[new_class.__module__] package_components = model_module.__name__.split('.') package_components.reverse() # find the last occurrence of 'models' try: app_label_index = package_components.index(MODELS_MODULE_NAME) + 1 except ValueError: app_label_index = 1 kwargs = {"app_label": package_components[app_label_index]} else: kwargs = {"app_label": app_config.label} else: kwargs = {} new_class.add_to_class('_meta', Options(meta, **kwargs)) if not abstract: new_class.add_to_class( 'DoesNotExist', subclass_exception( str('DoesNotExist'), tuple( x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (ObjectDoesNotExist,), module, attached_to=new_class)) new_class.add_to_class( 'MultipleObjectsReturned', subclass_exception( str('MultipleObjectsReturned'), tuple( x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (MultipleObjectsReturned,), module, attached_to=new_class)) if base_meta and not base_meta.abstract: # Non-abstract child classes inherit some attributes from their # non-abstract parent (unless an ABC comes before it in the # method resolution order). if not hasattr(meta, 'ordering'): new_class._meta.ordering = base_meta.ordering if not hasattr(meta, 'get_latest_by'): new_class._meta.get_latest_by = base_meta.get_latest_by is_proxy = new_class._meta.proxy # If the model is a proxy, ensure that the base class # hasn't been swapped out. if is_proxy and base_meta and base_meta.swapped: raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)) if getattr(new_class, '_default_manager', None): if not is_proxy: # Multi-table inheritance doesn't inherit default manager from # parents. new_class._default_manager = None new_class._base_manager = None else: # Proxy classes do inherit parent's default manager, if none is # set explicitly. new_class._default_manager = new_class._default_manager._copy_to_model(new_class) new_class._base_manager = new_class._base_manager._copy_to_model(new_class) # Add all attributes to the class. for obj_name, obj in attrs.items(): new_class.add_to_class(obj_name, obj) # All the fields of any type declared on this model new_fields = ( new_class._meta.local_fields + new_class._meta.local_many_to_many + new_class._meta.virtual_fields ) field_names = set(f.name for f in new_fields) # Basic setup for proxy models. if is_proxy: base = None for parent in [kls for kls in parents if hasattr(kls, '_meta')]: if parent._meta.abstract: if parent._meta.fields: raise TypeError( "Abstract base class containing model fields not " "permitted for proxy model '%s'." % name ) else: continue if base is not None: raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name) else: base = parent if base is None: raise TypeError("Proxy model '%s' has no non-abstract model base class." % name) new_class._meta.setup_proxy(base) new_class._meta.concrete_model = base._meta.concrete_model else: new_class._meta.concrete_model = new_class # Collect the parent links for multi-table inheritance. parent_links = {} for base in reversed([new_class] + parents): # Conceptually equivalent to `if base is Model`. if not hasattr(base, '_meta'): continue # Skip concrete parent classes. if base != new_class and not base._meta.abstract: continue # Locate OneToOneField instances. for field in base._meta.local_fields: if isinstance(field, OneToOneField): parent_links[field.rel.to] = field # Do the appropriate setup for any model parents. for base in parents: original_base = base if not hasattr(base, '_meta'): # Things without _meta aren't functional models, so they're # uninteresting parents. continue parent_fields = base._meta.local_fields + base._meta.local_many_to_many # Check for clashes between locally declared fields and those # on the base classes (we cannot handle shadowed fields at the # moment). for field in parent_fields: if field.name in field_names: raise FieldError( 'Local field %r in class %r clashes ' 'with field of similar name from ' 'base class %r' % (field.name, name, base.__name__) ) if not base._meta.abstract: # Concrete classes... base = base._meta.concrete_model if base in parent_links: field = parent_links[base] elif not is_proxy: attr_name = '%s_ptr' % base._meta.model_name field = OneToOneField(base, name=attr_name, auto_created=True, parent_link=True) # Only add the ptr field if it's not already present; # e.g. migrations will already have it specified if not hasattr(new_class, attr_name): new_class.add_to_class(attr_name, field) else: field = None new_class._meta.parents[base] = field else: # .. and abstract ones. for field in parent_fields: new_class.add_to_class(field.name, copy.deepcopy(field)) # Pass any non-abstract parent classes onto child. new_class._meta.parents.update(base._meta.parents) # Inherit managers from the abstract base classes. new_class.copy_managers(base._meta.abstract_managers) # Proxy models inherit the non-abstract managers from their base, # unless they have redefined any of them. if is_proxy: new_class.copy_managers(original_base._meta.concrete_managers) # Inherit virtual fields (like GenericForeignKey) from the parent # class for field in base._meta.virtual_fields: if base._meta.abstract and field.name in field_names: raise FieldError( 'Local field %r in class %r clashes ' 'with field of similar name from ' 'abstract base class %r' % (field.name, name, base.__name__) ) new_class.add_to_class(field.name, copy.deepcopy(field)) if abstract: # Abstract base models can't be instantiated and don't appear in # the list of models for an app. We do the final setup for them a # little differently from normal models. attr_meta.abstract = False new_class.Meta = attr_meta return new_class new_class._prepare() new_class._meta.apps.register_model(new_class._meta.app_label, new_class) return new_class def copy_managers(cls, base_managers): # This is in-place sorting of an Options attribute, but that's fine. base_managers.sort() for _, mgr_name, manager in base_managers: # NOQA (redefinition of _) val = getattr(cls, mgr_name, None) if not val or val is manager: new_manager = manager._copy_to_model(cls) cls.add_to_class(mgr_name, new_manager) def add_to_class(cls, name, value): # We should call the contribute_to_class method only if it's bound if not inspect.isclass(value) and hasattr(value, 'contribute_to_class'): value.contribute_to_class(cls, name) else: setattr(cls, name, value) def _prepare(cls): """ Creates some methods once self._meta has been populated. """ opts = cls._meta opts._prepare(cls) if opts.order_with_respect_to: cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True) cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False) # defer creating accessors on the foreign class until we are # certain it has been created def make_foreign_order_accessors(field, model, cls): setattr( field.rel.to, 'get_%s_order' % cls.__name__.lower(), curry(method_get_order, cls) ) setattr( field.rel.to, 'set_%s_order' % cls.__name__.lower(), curry(method_set_order, cls) ) add_lazy_relation( cls, opts.order_with_respect_to, opts.order_with_respect_to.rel.to, make_foreign_order_accessors ) # Give the class a docstring -- its definition. if cls.__doc__ is None: cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.attname for f in opts.fields)) get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get( '%s.%s' % (opts.app_label, opts.model_name) ) if get_absolute_url_override: setattr(cls, 'get_absolute_url', get_absolute_url_override) ensure_default_manager(cls) signals.class_prepared.send(sender=cls) class ModelState(object): """ A class for storing instance state """ def __init__(self, db=None): self.db = db # If true, uniqueness validation checks will consider this a new, as-yet-unsaved object. # Necessary for correct validation of new instances of objects with explicit (non-auto) PKs. # This impacts validation only; it has no effect on the actual save. self.adding = True class Model(six.with_metaclass(ModelBase)): _deferred = False def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self) @classmethod def from_db(cls, db, field_names, values): if cls._deferred: new = cls(**dict(zip(field_names, values))) else: new = cls(*values) new._state.adding = False new._state.db = db return new def __repr__(self): try: u = six.text_type(self) except (UnicodeEncodeError, UnicodeDecodeError): u = '[Bad Unicode data]' return force_str('<%s: %s>' % (self.__class__.__name__, u)) def __str__(self): if six.PY2 and hasattr(self, '__unicode__'): return force_text(self).encode('utf-8') return '%s object' % self.__class__.__name__ def __eq__(self, other): if not isinstance(other, Model): return False if self._meta.concrete_model != other._meta.concrete_model: return False my_pk = self._get_pk_val() if my_pk is None: return self is other return my_pk == other._get_pk_val() def __ne__(self, other): return not self.__eq__(other) def __hash__(self): if self._get_pk_val() is None: raise TypeError("Model instances without primary key value are unhashable") return hash(self._get_pk_val()) def __reduce__(self): """ Provides pickling support. Normally, this just dispatches to Python's standard handling. However, for models with deferred field loading, we need to do things manually, as they're dynamically created classes and only module-level classes can be pickled by the default path. """ data = self.__dict__ data[DJANGO_VERSION_PICKLE_KEY] = get_version() if not self._deferred: class_id = self._meta.app_label, self._meta.object_name return model_unpickle, (class_id, [], simple_class_factory), data defers = [] for field in self._meta.fields: if isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute): defers.append(field.attname) model = self._meta.proxy_for_model class_id = model._meta.app_label, model._meta.object_name return (model_unpickle, (class_id, defers, deferred_class_factory), data) def __setstate__(self, state): msg = None pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) if pickled_version: current_version = get_version() if current_version != pickled_version: msg = ("Pickled model instance's Django version %s does" " not match the current version %s." % (pickled_version, current_version)) else: msg = "Pickled model instance's Django version is not specified." if msg: warnings.warn(msg, RuntimeWarning, stacklevel=2) self.__dict__.update(state) def _get_pk_val(self, meta=None): if not meta: meta = self._meta return getattr(self, meta.pk.attname) def _set_pk_val(self, value): return setattr(self, self._meta.pk.attname, value) pk = property(_get_pk_val, _set_pk_val) def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Saves the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set. """ using = using or router.db_for_write(self.__class__, instance=self) if force_insert and (force_update or update_fields): raise ValueError("Cannot force both insert and updating in model saving.") if update_fields is not None: # If update_fields is empty, skip the save. We do also check for # no-op saves later on for inheritance cases. This bailout is # still needed for skipping signal sending. if len(update_fields) == 0: return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.fields: if not field.primary_key: field_names.add(field.name) if field.name != field.attname: field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError("The following fields do not exist in this " "model or are m2m fields: %s" % ', '.join(non_model_fields)) # If saving to the same database, and this model is deferred, then # automatically do a "update_fields" save on the loaded fields. elif not force_insert and self._deferred and using == self._state.db: field_names = set() for field in self._meta.concrete_fields: if not field.primary_key and not hasattr(field, 'through'): field_names.add(field.attname) deferred_fields = [ f.attname for f in self._meta.fields if (f.attname not in self.__dict__ and isinstance(self.__class__.__dict__[f.attname], DeferredAttribute)) ] loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields) save.alters_data = True def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.atomic(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using) save_base.alters_data = True def _save_parents(self, cls, using, update_fields): """ Saves all the parents of cls using values from self. """ meta = cls._meta for parent, field in meta.parents.items(): # Make sure the link fields are synced between parent and self. if (field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None): setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) self._save_parents(cls=parent, using=using, update_fields=update_fields) self._save_table(cls=parent, using=using, update_fields=update_fields) # Set the parent's PK value to self. if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) # Since we didn't have an instance of the parent handy set # attname directly, bypassing the descriptor. Invalidate # the related object cache, in case it's been accidentally # populated. A fresh instance will be re-built from the # database if necessary. cache_name = field.get_cache_name() if hasattr(self, cache_name): delattr(self, cache_name) def _save_table(self, raw=False, cls=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Does the heavy-lifting involved in saving. Updates or inserts the data for a single table. """ meta = cls._meta non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] if update_fields: non_pks = [f for f in non_pks if f.name in update_fields or f.attname in update_fields] pk_val = self._get_pk_val(meta) pk_set = pk_val is not None if not pk_set and (force_update or update_fields): raise ValueError("Cannot force an update in save() with no primary key.") updated = False # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. if pk_set and not force_insert: base_qs = cls._base_manager.using(using) values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False))) for f in non_pks] forced_update = update_fields or force_update updated = self._do_update(base_qs, using, pk_val, values, update_fields, forced_update) if force_update and not updated: raise DatabaseError("Forced update did not affect any rows.") if update_fields and not updated: raise DatabaseError("Save with update_fields did not affect any rows.") if not updated: if meta.order_with_respect_to: # If this is a model with an order_with_respect_to # autopopulate the _order field field = meta.order_with_respect_to order_value = cls._base_manager.using(using).filter( **{field.name: getattr(self, field.attname)}).count() self._order = order_value fields = meta.local_concrete_fields if not pk_set: fields = [f for f in fields if not isinstance(f, AutoField)] update_pk = bool(meta.has_auto_field and not pk_set) result = self._do_insert(cls._base_manager, using, fields, update_pk, raw) if update_pk: setattr(self, meta.pk.attname, result) return updated def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): """ This method will try to update the model. If the model was updated (in the sense that an update query was done and a matching row was found from the DB) the method will return True. """ filtered = base_qs.filter(pk=pk_val) if not values: # We can end up here when saving a model in inheritance chain where # update_fields doesn't target any field in current model. In that # case we just say the update succeeded. Another case ending up here # is a model with just PK - in that case check that the PK still # exists. return update_fields is not None or filtered.exists() if self._meta.select_on_save and not forced_update: if filtered.exists(): # It may happen that the object is deleted from the DB right after # this check, causing the subsequent UPDATE to return zero matching # rows. The same result can occur in some rare cases when the # database returns zero despite the UPDATE being executed # successfully (a row is matched and updated). In order to # distinguish these two cases, the object's existence in the # database is again checked for if the UPDATE query returns 0. return filtered._update(values) > 0 or filtered.exists() else: return False return filtered._update(values) > 0 def _do_insert(self, manager, using, fields, update_pk, raw): """ Do an INSERT. If update_pk is defined then this method should return the new pk for the model. """ return manager._insert([self], fields=fields, return_id=update_pk, using=using, raw=raw) def delete(self, using=None): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self]) collector.delete() delete.alters_data = True def _get_FIELD_display(self, field): value = getattr(self, field.attname) return force_text(dict(field.flatchoices).get(value, value), strings_only=True) def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): if not self.pk: raise ValueError("get_next/get_previous cannot be used on unsaved objects.") op = 'gt' if is_next else 'lt' order = '' if is_next else '-' param = force_text(getattr(self, field.attname)) q = Q(**{'%s__%s' % (field.name, op): param}) q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by( '%s%s' % (order, field.name), '%spk' % order ) try: return qs[0] except IndexError: raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = 'gt' if is_next else 'lt' order = '_order' if is_next else '-_order' order_field = self._meta.order_with_respect_to obj = self._default_manager.filter(**{ order_field.name: getattr(self, order_field.attname) }).filter(**{ '_order__%s' % op: self._default_manager.values('_order').filter(**{ self._meta.pk.name: self.pk }) }).order_by(order)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename) def prepare_database_save(self, field): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return getattr(self, field.rel.field_name) def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors) def _get_unique_checks(self, exclude=None): """ Gather a list of checks to perform. Since validate_unique could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] for parent_class in self._meta.parents.keys(): if parent_class._meta.unique_together: unique_togethers.append((parent_class, parent_class._meta.unique_together)) for model_class, unique_together in unique_togethers: for check in unique_together: for name in check: # If this is an excluded field, don't add this check. if name in exclude: break else: unique_checks.append((model_class, tuple(check))) # These are checks for the unique_for_<date/year/month>. date_checks = [] # Gather a list of checks for fields declared as unique and add them to # the list of checks. fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.parents.keys(): fields_with_class.append((parent_class, parent_class._meta.local_fields)) for model_class, fields in fields_with_class: for f in fields: name = f.name if name in exclude: continue if f.unique: unique_checks.append((model_class, (name,))) if f.unique_for_date and f.unique_for_date not in exclude: date_checks.append((model_class, 'date', name, f.unique_for_date)) if f.unique_for_year and f.unique_for_year not in exclude: date_checks.append((model_class, 'year', name, f.unique_for_year)) if f.unique_for_month and f.unique_for_month not in exclude: date_checks.append((model_class, 'month', name, f.unique_for_month)) return unique_checks, date_checks def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: # Try to look up an existing object with the same values as this # object's values for all the unique field. lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) if lookup_value is None: # no value, skip the lookup continue if f.primary_key and not self._state.adding: # no need to check for unique primary key when editing continue lookup_kwargs[str(field_name)] = lookup_value # some fields were skipped, no reason to do the check if len(unique_check) != len(lookup_kwargs): continue qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) # Note that we need to use the pk as defined by model_class, not # self.pk. These can be different fields because model inheritance # allows single model to have effectively multiple primary keys. # Refs #17615. model_class_pk = self._get_pk_val(model_class._meta) if not self._state.adding and model_class_pk is not None: qs = qs.exclude(pk=model_class_pk) if qs.exists(): if len(unique_check) == 1: key = unique_check[0] else: key = NON_FIELD_ERRORS errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check)) return errors def _perform_date_checks(self, date_checks): errors = {} for model_class, lookup_type, field, unique_for in date_checks: lookup_kwargs = {} # there's a ticket to add a date lookup, we can remove this special # case if that makes it's way in date = getattr(self, unique_for) if date is None: continue if lookup_type == 'date': lookup_kwargs['%s__day' % unique_for] = date.day lookup_kwargs['%s__month' % unique_for] = date.month lookup_kwargs['%s__year' % unique_for] = date.year else: lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type) lookup_kwargs[field] = getattr(self, field) qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) if not self._state.adding and self.pk is not None: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.setdefault(field, []).append( self.date_error_message(lookup_type, field, unique_for) ) return errors def date_error_message(self, lookup_type, field_name, unique_for): opts = self._meta field = opts.get_field(field_name) return ValidationError( message=field.error_messages['unique_for_date'], code='unique_for_date', params={ 'model': self, 'model_name': six.text_type(capfirst(opts.verbose_name)), 'lookup_type': lookup_type, 'field': field_name, 'field_label': six.text_type(capfirst(field.verbose_name)), 'date_field': unique_for, 'date_field_label': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), } ) def unique_error_message(self, model_class, unique_check): opts = model_class._meta params = { 'model': self, 'model_class': model_class, 'model_name': six.text_type(capfirst(opts.verbose_name)), 'unique_check': unique_check, } # A unique field if len(unique_check) == 1: field = opts.get_field(unique_check[0]) params['field_label'] = six.text_type(capfirst(field.verbose_name)) return ValidationError( message=field.error_messages['unique'], code='unique', params=params, ) # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] params['field_labels'] = six.text_type(get_text_list(field_labels, _('and'))) return ValidationError( message=_("%(model_name)s with this %(field_labels)s already exists."), code='unique_together', params=params, ) def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] else: exclude = list(exclude) try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors) def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing a dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors) @classmethod def check(cls, **kwargs): errors = [] errors.extend(cls._check_swappable()) errors.extend(cls._check_model()) errors.extend(cls._check_managers(**kwargs)) if not cls._meta.swapped: errors.extend(cls._check_fields(**kwargs)) errors.extend(cls._check_m2m_through_same_relationship()) errors.extend(cls._check_long_column_names()) clash_errors = cls._check_id_field() + cls._check_field_name_clashes() errors.extend(clash_errors) # If there are field name clashes, hide consequent column name # clashes. if not clash_errors: errors.extend(cls._check_column_name_clashes()) errors.extend(cls._check_index_together()) errors.extend(cls._check_unique_together()) errors.extend(cls._check_ordering()) return errors @classmethod def _check_swappable(cls): """ Check if the swapped model exists. """ errors = [] if cls._meta.swapped: try: apps.get_model(cls._meta.swapped) except ValueError: errors.append( checks.Error( "'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable, hint=None, obj=None, id='models.E001', ) ) except LookupError: app_label, model_name = cls._meta.swapped.split('.') errors.append( checks.Error( ("'%s' references '%s.%s', which has not been installed, or is abstract.") % ( cls._meta.swappable, app_label, model_name ), hint=None, obj=None, id='models.E002', ) ) return errors @classmethod def _check_model(cls): errors = [] if cls._meta.proxy: if cls._meta.local_fields or cls._meta.local_many_to_many: errors.append( checks.Error( "Proxy model '%s' contains model fields." % cls.__name__, hint=None, obj=None, id='models.E017', ) ) return errors @classmethod def _check_managers(cls, **kwargs): """ Perform all manager checks. """ errors = [] managers = cls._meta.concrete_managers + cls._meta.abstract_managers for __, __, manager in managers: errors.extend(manager.check(**kwargs)) return errors @classmethod def _check_fields(cls, **kwargs): """ Perform all field checks. """ errors = [] for field in cls._meta.local_fields: errors.extend(field.check(**kwargs)) for field in cls._meta.local_many_to_many: errors.extend(field.check(from_model=cls, **kwargs)) return errors @classmethod def _check_m2m_through_same_relationship(cls): """ Check if no relationship model is used by more than one m2m field. """ errors = [] seen_intermediary_signatures = [] fields = cls._meta.local_many_to_many # Skip when the target model wasn't found. fields = (f for f in fields if isinstance(f.rel.to, ModelBase)) # Skip when the relationship model wasn't found. fields = (f for f in fields if isinstance(f.rel.through, ModelBase)) for f in fields: signature = (f.rel.to, cls, f.rel.through) if signature in seen_intermediary_signatures: errors.append( checks.Error( ("The model has two many-to-many relations through " "the intermediate model '%s.%s'.") % ( f.rel.through._meta.app_label, f.rel.through._meta.object_name ), hint=None, obj=cls, id='models.E003', ) ) else: seen_intermediary_signatures.append(signature) return errors @classmethod def _check_id_field(cls): """ Check if `id` field is a primary key. """ fields = list(f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk) # fields is empty or consists of the invalid "id" field if fields and not fields[0].primary_key and cls._meta.pk.name == 'id': return [ checks.Error( ("'id' can only be used as a field name if the field also " "sets 'primary_key=True'."), hint=None, obj=cls, id='models.E004', ) ] else: return [] @classmethod def _check_field_name_clashes(cls): """ Ref #17673. """ errors = [] used_fields = {} # name or attname -> field # Check that multi-inheritance doesn't cause field name shadowing. for parent in cls._meta.parents: for f in parent._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None if clash: errors.append( checks.Error( ("The field '%s' from parent model " "'%s' clashes with the field '%s' " "from parent model '%s'.") % ( clash.name, clash.model._meta, f.name, f.model._meta ), hint=None, obj=cls, id='models.E005', ) ) used_fields[f.name] = f used_fields[f.attname] = f # Check that fields defined in the model don't clash with fields from # parents. for f in cls._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None # Note that we may detect clash between user-defined non-unique # field "id" and automatically added unique field "id", both # defined at the same model. This special case is considered in # _check_id_field and here we ignore it. id_conflict = (f.name == "id" and clash and clash.name == "id" and clash.model == cls) if clash and not id_conflict: errors.append( checks.Error( ("The field '%s' clashes with the field '%s' " "from model '%s'.") % ( f.name, clash.name, clash.model._meta ), hint=None, obj=f, id='models.E006', ) ) used_fields[f.name] = f used_fields[f.attname] = f return errors @classmethod def _check_column_name_clashes(cls): # Store a list of column names which have already been used by other fields. used_column_names = [] errors = [] for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Ensure the column name is not already in use. if column_name and column_name in used_column_names: errors.append( checks.Error( "Field '%s' has column name '%s' that is used by another field." % (f.name, column_name), hint="Specify a 'db_column' for the field.", obj=cls, id='models.E007' ) ) else: used_column_names.append(column_name) return errors @classmethod def _check_index_together(cls): """ Check the value of "index_together" option. """ if not isinstance(cls._meta.index_together, (tuple, list)): return [ checks.Error( "'index_together' must be a list or tuple.", hint=None, obj=cls, id='models.E008', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together): return [ checks.Error( "All 'index_together' elements must be lists or tuples.", hint=None, obj=cls, id='models.E009', ) ] else: errors = [] for fields in cls._meta.index_together: errors.extend(cls._check_local_fields(fields, "index_together")) return errors @classmethod def _check_unique_together(cls): """ Check the value of "unique_together" option. """ if not isinstance(cls._meta.unique_together, (tuple, list)): return [ checks.Error( "'unique_together' must be a list or tuple.", hint=None, obj=cls, id='models.E010', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together): return [ checks.Error( "All 'unique_together' elements must be lists or tuples.", hint=None, obj=cls, id='models.E011', ) ] else: errors = [] for fields in cls._meta.unique_together: errors.extend(cls._check_local_fields(fields, "unique_together")) return errors @classmethod def _check_local_fields(cls, fields, option): from django.db import models errors = [] for field_name in fields: try: field = cls._meta.get_field(field_name, many_to_many=True) except models.FieldDoesNotExist: errors.append( checks.Error( "'%s' refers to the non-existent field '%s'." % (option, field_name), hint=None, obj=cls, id='models.E012', ) ) else: if isinstance(field.rel, models.ManyToManyRel): errors.append( checks.Error( ("'%s' refers to a ManyToManyField '%s', but " "ManyToManyFields are not permitted in '%s'.") % ( option, field_name, option ), hint=None, obj=cls, id='models.E013', ) ) elif field not in cls._meta.local_fields: errors.append( checks.Error( ("'%s' refers to field '%s' which is not local " "to model '%s'.") % ( option, field_name, cls._meta.object_name ), hint=("This issue may be caused by multi-table " "inheritance."), obj=cls, id='models.E016', ) ) return errors @classmethod def _check_ordering(cls): """ Check "ordering" option -- is it a list of strings and do all fields exist? """ from django.db.models import FieldDoesNotExist if not cls._meta.ordering: return [] if not isinstance(cls._meta.ordering, (list, tuple)): return [ checks.Error( ("'ordering' must be a tuple or list " "(even if you want to order by only one field)."), hint=None, obj=cls, id='models.E014', ) ] errors = [] fields = cls._meta.ordering # Skip '?' fields. fields = (f for f in fields if f != '?') # Convert "-field" to "field". fields = ((f[1:] if f.startswith('-') else f) for f in fields) fields = (f for f in fields if f != '_order' or not cls._meta.order_with_respect_to) # Skip ordering in the format field1__field2 (FIXME: checking # this format would be nice, but it's a little fiddly). fields = (f for f in fields if '__' not in f) # Skip ordering on pk. This is always a valid order_by field # but is an alias and therefore won't be found by opts.get_field. fields = (f for f in fields if f != 'pk') for field_name in fields: try: cls._meta.get_field(field_name, many_to_many=False) except FieldDoesNotExist: if field_name.endswith('_id'): try: field = cls._meta.get_field(field_name[:-3], many_to_many=False) except FieldDoesNotExist: pass else: if field.attname == field_name: continue errors.append( checks.Error( "'ordering' refers to the non-existent field '%s'." % field_name, hint=None, obj=cls, id='models.E015', ) ) return errors @classmethod def _check_long_column_names(cls): """ Check that any auto-generated column names are shorter than the limits for each database in which the model will be created. """ errors = [] allowed_len = None db_alias = None # Find the minimum max allowed length among all specified db_aliases. for db in settings.DATABASES.keys(): # skip databases where the model won't be created if not router.allow_migrate(db, cls): continue connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is None or connection.features.truncates_names: continue else: if allowed_len is None: allowed_len = max_name_length db_alias = db elif max_name_length < allowed_len: allowed_len = max_name_length db_alias = db if allowed_len is None: return errors for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Check if auto-generated name for the field is too long # for the database. if (f.db_column is None and column_name is not None and len(column_name) > allowed_len): errors.append( checks.Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (column_name, allowed_len, db_alias), hint="Set the column name manually using 'db_column'.", obj=cls, id='models.E018', ) ) for f in cls._meta.local_many_to_many: # Check if auto-generated name for the M2M field is too long # for the database. for m2m in f.rel.through._meta.local_fields: _, rel_name = m2m.get_attname_column() if (m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len): errors.append( checks.Error( 'Autogenerated column name too long for M2M field ' '"%s". Maximum length is "%s" for database "%s".' % (rel_name, allowed_len, db_alias), hint=("Use 'through' to create a separate model " "for M2M and then set column_name using " "'db_column'."), obj=cls, id='models.E019', ) ) return errors ############################################ # HELPER FUNCTIONS (CURRIED MODEL METHODS) # ############################################ # ORDERING METHODS ######################### def method_set_order(ordered_obj, self, id_list, using=None): if using is None: using = DEFAULT_DB_ALIAS rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name) order_name = ordered_obj._meta.order_with_respect_to.name # FIXME: It would be nice if there was an "update many" version of update # for situations like this. with transaction.atomic(using=using, savepoint=False): for i, j in enumerate(id_list): ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i) def method_get_order(ordered_obj, self): rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name) order_name = ordered_obj._meta.order_with_respect_to.name pk_name = ordered_obj._meta.pk.name return [r[pk_name] for r in ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)] ######## # MISC # ######## def simple_class_factory(model, attrs): """ Needed for dynamic classes. """ return model def model_unpickle(model_id, attrs, factory): """ Used to unpickle Model subclasses with deferred fields. """ if isinstance(model_id, tuple): model = apps.get_model(*model_id) else: # Backwards compat - the model was cached directly in earlier versions. model = model_id cls = factory(model, attrs) return cls.__new__(cls) model_unpickle.__safe_for_unpickle__ = True def unpickle_inner_exception(klass, exception_name): # Get the exception class from the class it is attached to: exception = getattr(klass, exception_name) return exception.__new__(exception)
helenst/django
django/db/models/base.py
Python
bsd-3-clause
66,310
0.001508
""" Convenient way to expose filepaths to scripts. Also, important constants are centralized here to avoid multiple copies. """ import sys, os sys.path.append(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(os.path.join(os.path.dirname(__file__))) sys.path.append(os.path.join(os.path.dirname(__file__), 'utils')) sys.path.append(os.path.join(os.path.dirname(__file__), 'utils', 'tests')) TR = 2.5 #we choose cutoff value values by inspecting the histogram of data values of the standard mni brain MNI_CUTOFF = 5000 MIN_STD_SHAPE = (91, 109, 91)
nhejazi/project-gamma
code/project_config.py
Python
bsd-3-clause
562
0.007117
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import shutil from migrate import exceptions from migrate.versioning import version, repository from migrate.versioning.script import * from migrate.versioning.util import * from migrate.tests import fixture from migrate.tests.fixture.models import tmp_sql_table class TestBaseScript(fixture.Pathed): def test_all(self): """Testing all basic BaseScript operations""" # verify / source / run src = self.tmp() open(src, 'w').close() bscript = BaseScript(src) BaseScript.verify(src) self.assertEqual(bscript.source(), '') self.assertRaises(NotImplementedError, bscript.run, 'foobar') class TestPyScript(fixture.Pathed, fixture.DB): cls = PythonScript def test_create(self): """We can create a migration script""" path = self.tmp_py() # Creating a file that doesn't exist should succeed self.cls.create(path) self.assertTrue(os.path.exists(path)) # Created file should be a valid script (If not, raises an error) self.cls.verify(path) # Can't create it again: it already exists self.assertRaises(exceptions.PathFoundError,self.cls.create,path) @fixture.usedb(supported='sqlite') def test_run(self): script_path = self.tmp_py() pyscript = PythonScript.create(script_path) pyscript.run(self.engine, 1) pyscript.run(self.engine, -1) self.assertRaises(exceptions.ScriptError, pyscript.run, self.engine, 0) self.assertRaises(exceptions.ScriptError, pyscript._func, 'foobar') # clean pyc file os.remove(script_path + 'c') # test deprecated upgrade/downgrade with no arguments contents = open(script_path, 'r').read() f = open(script_path, 'w') f.write(contents.replace("upgrade(migrate_engine)", "upgrade()")) f.close() pyscript = PythonScript(script_path) pyscript._module = None try: pyscript.run(self.engine, 1) pyscript.run(self.engine, -1) except exceptions.ScriptError: pass else: self.fail() def test_verify_notfound(self): """Correctly verify a python migration script: nonexistant file""" path = self.tmp_py() self.assertFalse(os.path.exists(path)) # Fails on empty path self.assertRaises(exceptions.InvalidScriptError,self.cls.verify,path) self.assertRaises(exceptions.InvalidScriptError,self.cls,path) def test_verify_invalidpy(self): """Correctly verify a python migration script: invalid python file""" path=self.tmp_py() # Create empty file f = open(path,'w') f.write("def fail") f.close() self.assertRaises(Exception,self.cls.verify_module,path) # script isn't verified on creation, but on module reference py = self.cls(path) self.assertRaises(Exception,(lambda x: x.module),py) def test_verify_nofuncs(self): """Correctly verify a python migration script: valid python file; no upgrade func""" path = self.tmp_py() # Create empty file f = open(path, 'w') f.write("def zergling():\n\tprint 'rush'") f.close() self.assertRaises(exceptions.InvalidScriptError, self.cls.verify_module, path) # script isn't verified on creation, but on module reference py = self.cls(path) self.assertRaises(exceptions.InvalidScriptError,(lambda x: x.module),py) @fixture.usedb(supported='sqlite') def test_preview_sql(self): """Preview SQL abstract from ORM layer (sqlite)""" path = self.tmp_py() f = open(path, 'w') content = ''' from migrate import * from sqlalchemy import * metadata = MetaData() UserGroup = Table('Link', metadata, Column('link1ID', Integer), Column('link2ID', Integer), UniqueConstraint('link1ID', 'link2ID')) def upgrade(migrate_engine): metadata.create_all(migrate_engine) ''' f.write(content) f.close() pyscript = self.cls(path) SQL = pyscript.preview_sql(self.url, 1) self.assertEqualIgnoreWhitespace(""" CREATE TABLE "Link" ("link1ID" INTEGER, "link2ID" INTEGER, UNIQUE ("link1ID", "link2ID")) """, SQL) # TODO: test: No SQL should be executed! def test_verify_success(self): """Correctly verify a python migration script: success""" path = self.tmp_py() # Succeeds after creating self.cls.create(path) self.cls.verify(path) # test for PythonScript.make_update_script_for_model @fixture.usedb() def test_make_update_script_for_model(self): """Construct script source from differences of two models""" self.setup_model_params() self.write_file(self.first_model_path, self.base_source) self.write_file(self.second_model_path, self.base_source + self.model_source) source_script = self.pyscript.make_update_script_for_model( engine=self.engine, oldmodel=load_model('testmodel_first:meta'), model=load_model('testmodel_second:meta'), repository=self.repo_path, ) self.assertTrue("['User'].create()" in source_script) self.assertTrue("['User'].drop()" in source_script) @fixture.usedb() def test_make_update_script_for_equal_models(self): """Try to make update script from two identical models""" self.setup_model_params() self.write_file(self.first_model_path, self.base_source + self.model_source) self.write_file(self.second_model_path, self.base_source + self.model_source) source_script = self.pyscript.make_update_script_for_model( engine=self.engine, oldmodel=load_model('testmodel_first:meta'), model=load_model('testmodel_second:meta'), repository=self.repo_path, ) self.assertFalse('User.create()' in source_script) self.assertFalse('User.drop()' in source_script) @fixture.usedb() def test_make_update_script_direction(self): """Check update scripts go in the right direction""" self.setup_model_params() self.write_file(self.first_model_path, self.base_source) self.write_file(self.second_model_path, self.base_source + self.model_source) source_script = self.pyscript.make_update_script_for_model( engine=self.engine, oldmodel=load_model('testmodel_first:meta'), model=load_model('testmodel_second:meta'), repository=self.repo_path, ) self.assertTrue(0 < source_script.find('upgrade') < source_script.find("['User'].create()") < source_script.find('downgrade') < source_script.find("['User'].drop()")) def setup_model_params(self): self.script_path = self.tmp_py() self.repo_path = self.tmp() self.first_model_path = os.path.join(self.temp_usable_dir, 'testmodel_first.py') self.second_model_path = os.path.join(self.temp_usable_dir, 'testmodel_second.py') self.base_source = """from sqlalchemy import *\nmeta = MetaData()\n""" self.model_source = """ User = Table('User', meta, Column('id', Integer, primary_key=True), Column('login', Unicode(40)), Column('passwd', String(40)), )""" self.repo = repository.Repository.create(self.repo_path, 'repo') self.pyscript = PythonScript.create(self.script_path) sys.modules.pop('testmodel_first', None) sys.modules.pop('testmodel_second', None) def write_file(self, path, contents): f = open(path, 'w') f.write(contents) f.close() class TestSqlScript(fixture.Pathed, fixture.DB): @fixture.usedb() def test_error(self): """Test if exception is raised on wrong script source""" src = self.tmp() f = open(src, 'w') f.write("""foobar""") f.close() sqls = SqlScript(src) self.assertRaises(Exception, sqls.run, self.engine) @fixture.usedb() def test_success(self): """Test sucessful SQL execution""" # cleanup and prepare python script tmp_sql_table.metadata.drop_all(self.engine, checkfirst=True) script_path = self.tmp_py() pyscript = PythonScript.create(script_path) # populate python script contents = open(script_path, 'r').read() contents = contents.replace("pass", "tmp_sql_table.create(migrate_engine)") contents = 'from migrate.tests.fixture.models import tmp_sql_table\n' + contents f = open(script_path, 'w') f.write(contents) f.close() # write SQL script from python script preview pyscript = PythonScript(script_path) src = self.tmp() f = open(src, 'w') f.write(pyscript.preview_sql(self.url, 1)) f.close() # run the change sqls = SqlScript(src) sqls.run(self.engine, executemany=False) tmp_sql_table.metadata.drop_all(self.engine, checkfirst=True)
razzius/sqlalchemy-migrate
migrate/tests/versioning/test_script.py
Python
mit
9,323
0.002789
import maya.cmds as cmds import pymel.all as pm import traceback controlCurve = pm.PyNode('control_curve') ## to make a numerical 'floating point' ## attribute, we use at='double', keyable=True controlCurve.addAttr( 'allCurl', at='double', keyable=True ) controlCurve.addAttr( 'pointerAllCurl', at='double', keyable=True ) controlCurve.addAttr( 'middleAllCurl', at='double', keyable=True ) controlCurve.addAttr( 'pinkyAllCurl', at='double', keyable=True ) controlCurve.addAttr( 'pointerACurl', at='double', keyable=True ) controlCurve.addAttr( 'pointerBCurl', at='double', keyable=True ) controlCurve.addAttr( 'pointerCCurl', at='double', keyable=True ) controlCurve.addAttr( 'middleACurl', at='double', keyable=True ) controlCurve.addAttr( 'middleBCurl', at='double', keyable=True ) controlCurve.addAttr( 'middleCCurl', at='double', keyable=True ) controlCurve.addAttr( 'pinkyACurl', at='double', keyable=True ) controlCurve.addAttr( 'pinkyBCurl', at='double', keyable=True ) controlCurve.addAttr( 'pinkyCCurl', at='double', keyable=True ) pointerA = pm.PyNode('pointer_a') pointerB = pm.PyNode('pointer_b') pointerC = pm.PyNode('pointer_c') middleA = pm.PyNode('middle_a') middleB = pm.PyNode('middle_b') middleC = pm.PyNode('middle_c') pinkyA = pm.PyNode('pinky_a') pinkyB = pm.PyNode('pinky_b') pinkyC = pm.PyNode('pinky_c') pointerAll = [ pointerA, pointerB, pointerC ] middleAll = [ middleA, middleB, middleC ] pinkyAll = [ pinkyA, pinkyB, pinkyC ] all = pointerAll + middleAll + pinkyAll adds = { } for jnt in all: addNodeY = pm.createNode( 'plusMinusAverage' ) addNodeZ = pm.createNode( 'plusMinusAverage' ) addNodeY.rename( jnt.name()+'_addY' ) addNodeZ.rename( jnt.name()+'_addZ' ) ## the operator >> means "connect" for pymel addNodeY.output1D >> jnt.rotateY addNodeZ.output1D >> jnt.rotateZ adds[ jnt.name()+'Y' ] = addNodeY adds[ jnt.name()+'Z' ] = addNodeZ ## We can't hard core the name because Maya might change that ## #controlCurve.pointerAllCurl >> pm.PyNode('pointer_a_addZ').input1D ## so we use our adds dictionary to get the right answer ## pointerAllCurl connections target = adds[ 'pointer_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pointerAllCurl >> target.input1D[num] target = adds[ 'pointer_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pointerAllCurl >> target.input1D[num] target = adds[ 'pointer_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pointerAllCurl >> target.input1D[num] ## pointer A,B,C connections target = adds[ 'pointer_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pointerACurl >> target.input1D[num] target = adds[ 'pointer_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pointerBCurl >> target.input1D[num] target = adds[ 'pointer_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pointerCCurl >> target.input1D[num] ## middleAllCurl connections target = adds[ 'middle_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.middleAllCurl >> target.input1D[num] target = adds[ 'middle_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.middleAllCurl >> target.input1D[num] target = adds[ 'middle_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.middleAllCurl >> target.input1D[num] ## middle A,B,C connections target = adds[ 'middle_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.middleACurl >> target.input1D[num] target = adds[ 'middle_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.middleBCurl >> target.input1D[num] target = adds[ 'middle_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.middleCCurl >> target.input1D[num] ## pinkyAllCurl connections target = adds[ 'pinky_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pinkyAllCurl >> target.input1D[num] target = adds[ 'pinky_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pinkyAllCurl >> target.input1D[num] target = adds[ 'pinky_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pinkyAllCurl >> target.input1D[num] ## pinky A,B,C connections target = adds[ 'pinky_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pinkyACurl >> target.input1D[num] target = adds[ 'pinky_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pinkyBCurl >> target.input1D[num] target = adds[ 'pinky_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.pinkyCCurl >> target.input1D[num] ## allCurl connections target = adds[ 'pointer_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] target = adds[ 'pointer_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] target = adds[ 'pointer_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] target = adds[ 'middle_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] target = adds[ 'middle_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] target = adds[ 'middle_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] target = adds[ 'pinky_a' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] target = adds[ 'pinky_b' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] target = adds[ 'pinky_c' + 'Z' ] num = target.input1D.getNumElements() controlCurve.allCurl >> target.input1D[num] """ ## These are ignorable comments pointerAllCurlTargetsZ = [pointerA, pointerB, pointerC] attrToTargets = { 'pointerAllCurl'+'Z': [pointerA, pointerB, pointerC] } """ ## These are ignorable comments
joetainment/mmmmtools
MmmmToolsMod/script_file_runner_scripts/hand_auto_rigging.py
Python
gpl-3.0
5,762
0.020305
"""Define the InverseProblem class. Author: Ilias Bilionis Date: 1/14/2013 1/21/2013 """ __all__ = ['InverseProblem'] import numpy as np import itertools from ..random import StudentTLikelihoodFunction from ..random import RandomWalkProposal from ..random import SequentialMonteCarlo class InverseProblem(object): """The general inverse problem class.""" # The SMC object _smc = None # The final particles _r = None # The final weights _w = None # A resampled version of the particles _resampled_r = None # The mean of the particles _mean = None # The variance of the particles _variance = None @property def smc(self): """Get the SMC object.""" return self._smc @property def alpha(self): """Get the alpha parameter of the Gamma dist. for the precision.""" return self._alpha @property def beta(self): """Get the beta parameter of the Gamma dist. for the precision.""" return self._beta @property def particles(self): """Get the final particles.""" return self._r @property def weights(self): """Get the final weights.""" return self._w @property def resampled_particles(self): """Get the resampled particles.""" return self._resampled_r @property def mean(self): """Get the mean of the particles.""" return self._mean @property def variance(self): """Get the variance of the particles.""" return self._variance def __init__(self, solver=None, prior=None, data=None, alpha=1e-2, beta=1e-2, verbose=True, mpi=None, comm=None, num_particles=100, num_mcmc=10, proposal=RandomWalkProposal(dt=0.2), store_intermediate_samples=False): """Initialize the object. Keyword Arguments: solver --- The forward solver you wish to use. prior --- The prior distribution of the parameters. proposal--- The MCMC proposal. alpha --- The alpha parameter (shape) of the Gamma distribution of the precision of the forward solver. beta --- The beta parameter (rate) of the Gamma distribution of the precision of the forward solver. verbose --- Be verbose ir not. mpi --- Use MPI or not. comm --- The MPI communicator. num_particles --- The number of particles. num_mcmc --- The number of MCMC steps per SMC step. proposal --- The MCMC proposal. """ if solver is None: raise ValueError('The forward solver must be specified.') if data is None: raise ValueError('The data must be specified.') if prior is None: raise ValueError('The prior must be specified.') likelihood = StudentTLikelihoodFunction(2. * alpha, num_input=prior.num_input, data=data, mean_function=solver, cov=(beta / alpha)) self._smc = SequentialMonteCarlo(prior=prior, likelihood=likelihood, verbose=verbose, num_particles=num_particles, num_mcmc=num_mcmc, proposal=proposal, store_intermediate_samples=store_intermediate_samples, mpi=mpi, comm=comm) def solve(self): """Solve the inverse problem.""" r, w = self.smc.sample() self._r = r self._w = w idf = lambda(x): x self._mean = self.mean_of(idf) self._variance = self.variance_of(idf, self.mean) return r, w def mean_of(self, function): """Calculate the mean of a function of the particles.""" y = np.array([self._w[i] * function(self._r[i,:]) for i in xrange(self._r.shape[0])]) return np.mean(y, axis=0) def variance_of(self, function, mean=None): """Calculate the variance of a function""" if mean is None: mean = self.mean_of(function) v = np.array([self._w[i] * (function(self._r[i, :]) - mean) ** 2 for i in xrange(self._r.shape[0])]) return np.mean(v, axis=0)
ebilionis/py-best
best/inverse/_inverse_problem.py
Python
lgpl-3.0
4,531
0.002207
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function class PSDispatcher(object): """ PSDispatcher is the base class for dispatching vars into different pserver instance. You need to implement the `dispatch` interface. """ def __init__(self, pserver_endpoints): self._eps = pserver_endpoints self._step = 0 @property def eps(self): return self._eps def reset(self): """ reset the step counter, set it zero. """ self._step = 0 def dispatch(self, varlist): """ Args: varlist(list): a list of Variables Returns: a map of pserver endpoint -> varname """ raise NotImplementedError("Interface has not been implemented.") class HashName(PSDispatcher): """ Hash variable names to several endpoints using python "hash()" function. Args: pserver_endpoints (list): list of endpoint(ip:port). Examples: .. code-block:: python pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"] vars = ["var1","var2","var3","var4","var5"] rr = RoundRobin(pserver_endpoints) rr.dispatch(vars) """ def __init__(self, pserver_endpoints): super(HashName, self).__init__(pserver_endpoints) def _hash_block(self, block_str, total): return hash(block_str) % total def dispatch(self, varlist): """ use `HashName` method to dispatch variables with each parameter server. Args: varlist (list): a list of Variables """ eplist = [] for var in varlist: server_id = self._hash_block(var.name(), len(self._eps)) server_for_param = self._eps[server_id] eplist.append(server_for_param) return eplist class RoundRobin(PSDispatcher): """ Distribute variables to several endpoints using RondRobin<https://en.wikipedia.org/wiki/Round-robin_scheduling> method. Args: pserver_endpoints (list): list of endpoint(ip:port). Examples: .. code-block:: python pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"] vars = ["var1","var2","var3","var4","var5"] rr = RoundRobin(pserver_endpoints) rr.dispatch(vars) """ def __init__(self, pserver_endpoints): super(RoundRobin, self).__init__(pserver_endpoints) def dispatch(self, varlist): """ use `RoundRobin` method to dispatch variables with each parameter server. Args: varlist (list): a list of Variables """ eplist = [] for var in varlist: server_for_param = self._eps[self._step] eplist.append(server_for_param) self._step += 1 if self._step >= len(self._eps): self._step = 0 return eplist
luotao1/Paddle
python/paddle/fluid/incubate/fleet/parameter_server/ir/ps_dispatcher.py
Python
apache-2.0
3,500
0.000286
# Generated by Django 1.11.15 on 2018-09-25 15:40 # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.db import migrations class Migration(migrations.Migration): dependencies = [] operations = []
lonnen/socorro
webapp-django/crashstats/crashstats/migrations/0001_initial.py
Python
mpl-2.0
369
0
import argparse import xml.etree.cElementTree as ET import jobset argp = argparse.ArgumentParser(description='Run interop tests.') argp.add_argument('-l', '--language', default='c++') args = argp.parse_args() # build job build_job = jobset.JobSpec(cmdline=['tools/run_tests/run_interops_build.sh', '%s' % args.language], shortname='build') # test jobs, each test is a separate job to run in parallel _TESTS = ['large_unary', 'empty_unary', 'ping_pong', 'client_streaming', 'server_streaming'] jobs = [] jobNumber = 0 for test in _TESTS: test_job = jobset.JobSpec( cmdline=['tools/run_tests/run_interops_test.sh', '%s' % args.language, '%s' % test], shortname=test, timeout_seconds=15*60) jobs.append(test_job) jobNumber+=1 root = ET.Element('testsuites') testsuite = ET.SubElement(root, 'testsuite', id='1', package='grpc', name='tests') # always do the build of docker first, and then all the tests can run in parallel jobset.run([build_job], maxjobs=1, xml_report=testsuite) jobset.run(jobs, maxjobs=jobNumber, xml_report=testsuite) tree = ET.ElementTree(root) tree.write('report.xml', encoding='UTF-8')
crast/grpc
tools/run_tests/run_interops.py
Python
bsd-3-clause
1,163
0.009458
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import filecmp import os import tempfile import unittest from .preprocess_csv import create_formatted_csv_file from .preprocess_csv import create_tmcf_file # module_dir_ is the path to where this test is running from. module_dir_ = os.path.dirname(__file__) class TestPreprocessCsvTest(unittest.TestCase): def test_create_csv(self): with tempfile.TemporaryDirectory() as tmp_dir: f = os.path.join(module_dir_, 'test_data/test_data.csv') expected_csv_file = os.path.join( module_dir_, 'test_data/expected_formatted_data.csv') with open(f, "r") as f_in: result_csv_file = os.path.join(tmp_dir, 'OurWorldInData_Covid19.csv') create_formatted_csv_file(f_in, result_csv_file) with open(result_csv_file, "r") as result_f: result_str: str = result_f.read() with open(expected_csv_file, "r") as expect_f: expect_str: str = expect_f.read() self.assertEqual(result_str, expect_str) os.remove(result_csv_file) def test_create_tmcf(self): with tempfile.TemporaryDirectory() as tmp_dir: expected_tmcf_file = os.path.join( module_dir_, 'test_data/expected_covid19.tmcf') result_tmcf_file = os.path.join(tmp_dir, 'OurWorldInData_Covid19.tmcf') create_tmcf_file(result_tmcf_file) with open(result_tmcf_file, "r") as result_f: result_str: str = result_f.read() with open(expected_tmcf_file, "r") as expect_f: expect_str: str = expect_f.read() self.assertEqual(result_str, expect_str) os.remove(result_tmcf_file) if __name__ == '__main__': unittest.main()
datacommonsorg/data
scripts/ourworldindata/covid19/preprocess_csv_test.py
Python
apache-2.0
2,497
0
import unittest import os from xml.etree.ElementTree import ParseError from programy.parser.aiml_parser import AIMLParser from programy.parser.exceptions import ParserException from programy.parser.pattern.nodes.root import PatternRootNode from programy.parser.pattern.nodes.topic import PatternTopicNode from programy.parser.pattern.nodes.that import PatternThatNode from programy.parser.pattern.nodes.word import PatternWordNode from programy.parser.pattern.nodes.oneormore import PatternOneOrMoreWildCardNode from programy.parser.pattern.nodes.template import PatternTemplateNode from programy.dialog import Sentence class AIMLParserTests(unittest.TestCase): def setUp(self): self.parser = AIMLParser(supress_warnings=True, stop_on_invalid=True) self.assertIsNotNone(self.parser) def test_parse_from_file_valid(self): filename = os.path.dirname(__file__)+ '/valid.aiml' self.parser.parse_from_file(filename) def test_parse_from_file_invalid(self): filename = os.path.dirname(__file__)+ '/invalid.aiml' self.parser.parse_from_file(filename) def test_crud(self): with self.assertRaises(ParseError) as raised: self.parser.parse_from_text( """Blah Blah Blah """) def test_no_aiml(self): with self.assertRaises(ParseError) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> """) self.assertTrue(str(raised.exception).startswith("no element found:")) def test_no_content(self): with self.assertRaises(ParseError) as raised: self.parser.parse_from_text( """ """) self.assertTrue(str(raised.exception).startswith("no element found:")) def test_base_aiml_no_content(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> </aiml> """) self.assertEqual(raised.exception.message, "Error, no categories in aiml file") def test_base_aiml_topic_no_name(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic> </topic> </aiml> """) self.assertEqual(raised.exception.message, "Error, missing name attribute for topic") def test_base_aiml_topic_no_category(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name="test"> </topic> </aiml> """) self.assertEqual(raised.exception.message, "Error, no categories in topic") def test_base_aiml_topic_category_no_content(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name="test"> <category> </category> </topic> </aiml> """) self.assertEqual(raised.exception.message, "Error, no template node found in category") def test_base_aiml_topic_at_multiple_levels(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name="test"> <category> <topic name="test2" /> <pattern>*</pattern> <template>RESPONSE</template> </category> </topic> </aiml> """) self.assertEqual(raised.exception.message, "Error, topic exists in category AND as parent node") def test_base_aiml_topic_category_no_template(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name="test"> <category> <pattern>*</pattern> </category> </topic> </aiml> """) self.assertEqual(raised.exception.message, "Error, no template node found in category") def test_base_aiml_category_no_content(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> </category> </aiml> """) self.assertEqual(raised.exception.message, "Error, no template node found in category") def test_base_aiml_category_no_template(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>*</pattern> </category> </aiml> """) self.assertEqual(raised.exception.message, "Error, no template node found in category") def test_base_aiml_topic_empty_parent_node(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name=""> <category> <pattern>*</pattern> <template>RESPONSE</template> </category> </topic> </aiml> """) self.assertEqual(raised.exception.message, "Topic name empty or null") def test_base_aiml_topic_with_something_else(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name="test"> <xxxx> <pattern>*</pattern> <template>RESPONSE</template> </xxxx> </topic> </aiml> """) self.assertEqual(raised.exception.message, "Error unknown child node of topic, xxxx") def test_base_aiml_topic_empty_child_node1(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <topic name="" /> <pattern>*</pattern> <template>RESPONSE</template> </category> </aiml> """) self.assertEqual(raised.exception.message, "Topic node text is empty") def test_base_aiml_topic_empty_child_node2(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <topic></topic> <pattern>*</pattern> <template>RESPONSE</template> </category> </aiml> """) self.assertEqual(raised.exception.message, "Topic node text is empty") def test_base_aiml_that_empty_child_node(self): with self.assertRaises(ParserException) as raised: self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <that></that> <pattern>*</pattern> <template>RESPONSE</template> </category> </aiml> """) self.assertEqual(raised.exception.message, "That node text is empty") def test_base_aiml_topic_category_template(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name="test"> <category> <pattern>*</pattern> <template>RESPONSE</template> </category> </topic> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode) self.assertTrue(self.parser.pattern_parser.root.has_one_or_more()) node = self.parser.pattern_parser.root.star self.assertIsNotNone(node) self.assertIsInstance(node, PatternOneOrMoreWildCardNode) self.assertEquals(node.wildcard, "*") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertEqual(len(topic.children), 1) self.assertIsNotNone(topic.children[0]) self.assertIsInstance(topic.children[0], PatternWordNode) self.assertEqual(topic.children[0].word, "test") that = topic.children[0].that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") template = that.star.template self.assertIsNotNone(template) self.assertIsInstance(template, PatternTemplateNode) self.assertEqual(template.template.resolve(bot=None, clientid="test"), "RESPONSE") def test_base_aiml_topic_category_template_multi_line(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name="test"> <category> <pattern>*</pattern> <template> RESPONSE1, RESPONSE2. RESPONSE3 </template> </category> </topic> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode) self.assertTrue(self.parser.pattern_parser.root.has_one_or_more()) node = self.parser.pattern_parser.root.star self.assertIsNotNone(node) self.assertIsInstance(node, PatternOneOrMoreWildCardNode) self.assertEquals(node.wildcard, "*") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertEqual(len(topic.children), 1) self.assertIsNotNone(topic.children[0]) self.assertIsInstance(topic.children[0], PatternWordNode) self.assertEqual(topic.children[0].word, "test") that = topic.children[0].that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") template = that.star.template self.assertIsNotNone(template) self.assertIsInstance(template, PatternTemplateNode) self.assertEqual(template.template.resolve(bot=None, clientid="test"), "RESPONSE1, RESPONSE2. RESPONSE3") def test_base_aiml_category_template(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>*</pattern> <template>RESPONSE</template> </category> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode) self.assertTrue(self.parser.pattern_parser.root.has_one_or_more()) node = self.parser.pattern_parser.root.star self.assertIsNotNone(node) self.assertIsInstance(node, PatternOneOrMoreWildCardNode) self.assertEquals(node.wildcard, "*") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertTrue(topic.has_one_or_more()) self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode) self.assertEquals(topic.star.wildcard, "*") that = topic.star.that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") template = that.star.template self.assertIsNotNone(template) self.assertIsInstance(template, PatternTemplateNode) self.assertEqual(template.template.resolve(bot=None, clientid="test"), "RESPONSE") def test_base_aiml_category_template_that(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>*</pattern> <that>something</that> <template>RESPONSE</template> </category> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode) self.assertTrue(self.parser.pattern_parser.root.has_one_or_more()) node = self.parser.pattern_parser.root.star self.assertIsNotNone(node) self.assertIsInstance(node, PatternOneOrMoreWildCardNode) self.assertEquals(node.wildcard, "*") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertTrue(topic.has_one_or_more()) self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode) self.assertEquals(topic.star.wildcard, "*") that = topic.star.that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertEqual(len(that.children), 1) self.assertIsNotNone(that.children[0]) self.assertIsInstance(that.children[0], PatternWordNode) self.assertEqual(that.children[0].word, "something") template = that.children[0].template self.assertIsNotNone(template) self.assertIsInstance(template, PatternTemplateNode) self.assertEqual(template.template.resolve(bot=None, clientid="test"), "RESPONSE") def test_base_aiml_category_template_topic(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>*</pattern> <topic>something</topic> <template>RESPONSE</template> </category> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode) self.assertTrue(self.parser.pattern_parser.root.has_one_or_more()) node = self.parser.pattern_parser.root.star self.assertIsNotNone(node) self.assertIsInstance(node, PatternOneOrMoreWildCardNode) self.assertEquals(node.wildcard, "*") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertEqual(len(topic.children), 1) self.assertIsNotNone(topic.children[0]) self.assertIsInstance(topic.children[0], PatternWordNode) self.assertEqual(topic.children[0].word, "something") that = topic.children[0].that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") template = that.star.template self.assertIsNotNone(template) self.assertIsInstance(template, PatternTemplateNode) self.assertEqual(template.template.resolve(bot=None, clientid="test"), "RESPONSE") def test_base_aiml_category_template_topic_that(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>*</pattern> <that>something</that> <topic>other</topic> <template>RESPONSE</template> </category> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode) self.assertTrue(self.parser.pattern_parser.root.has_one_or_more()) node = self.parser.pattern_parser.root.star self.assertIsNotNone(node) self.assertIsInstance(node, PatternOneOrMoreWildCardNode) self.assertEquals(node.wildcard, "*") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertEqual(len(topic.children), 1) self.assertIsNotNone(topic.children[0]) self.assertIsInstance(topic.children[0], PatternWordNode) self.assertEqual(topic.children[0].word, "other") that = topic.children[0].that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertEqual(len(that.children), 1) self.assertIsNotNone(that.children[0]) self.assertIsInstance(that.children[0], PatternWordNode) self.assertEqual(that.children[0].word, "something") template = that.children[0].template self.assertIsNotNone(template) self.assertIsInstance(template, PatternTemplateNode) self.assertEqual(template.template.resolve(bot=None, clientid="test"), "RESPONSE") def test_base_aiml_multiple_categories(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>Hello</pattern> <template>Hiya</template> </category> <category> <pattern>Goodbye</pattern> <template>See ya</template> </category> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode) self.assertEqual(2, len(self.parser.pattern_parser.root.children)) node = self.parser.pattern_parser.root.children[1] self.assertIsNotNone(node) self.assertIsInstance(node, PatternWordNode) self.assertEquals(node.word, "Hello") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertTrue(topic.has_one_or_more()) self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode) self.assertEquals(topic.star.wildcard, "*") that = topic.star.that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") node = self.parser.pattern_parser.root.children[0] self.assertIsNotNone(node) self.assertIsInstance(node, PatternWordNode) self.assertEquals(node.word, "Goodbye") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertTrue(topic.has_one_or_more()) self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode) self.assertEquals(topic.star.wildcard, "*") that = topic.star.that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") def test_base_aiml_multiple_categories_in_a_topic(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <topic name="test"> <category> <pattern>Hello</pattern> <template>Hiya</template> </category> <category> <pattern>Goodbye</pattern> <template>See ya</template> </category> </topic> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertEqual(2, len(self.parser.pattern_parser.root.children)) node = self.parser.pattern_parser.root.children[1] self.assertIsNotNone(node) self.assertIsInstance(node, PatternWordNode) self.assertEquals(node.word, "Hello") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertEqual(len(topic.children), 1) self.assertIsNotNone(topic.children[0]) self.assertIsInstance(topic.children[0], PatternWordNode) self.assertEqual(topic.children[0].word, "test") that = topic.children[0].that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") node = self.parser.pattern_parser.root.children[0] self.assertIsNotNone(node) self.assertIsInstance(node, PatternWordNode) self.assertEquals(node.word, "Goodbye") topic = node.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertEqual(len(topic.children), 1) self.assertIsNotNone(topic.children[0]) self.assertIsInstance(topic.children[0], PatternWordNode) self.assertEqual(topic.children[0].word, "test") that = topic.children[0].that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") def test_base_aiml_multiple_categories_in_and_out_of_topic(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>Welcome</pattern> <template>Hello there</template> </category> <topic name="test"> <category> <pattern>Hello</pattern> <template>Hiya</template> </category> <category> <pattern>Goodbye</pattern> <template>See ya</template> </category> </topic> <category> <pattern>Interesting</pattern> <template>Yes</template> </category> </aiml> """) self.assertIsNotNone(self.parser.pattern_parser.root) self.assertEqual(4, len(self.parser.pattern_parser.root.children)) node1 = self.parser.pattern_parser.root.children[0] self.assertIsNotNone(node1) self.assertIsInstance(node1, PatternWordNode) self.assertEquals(node1.word, "Interesting") topic = node1.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertTrue(topic.has_one_or_more()) self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode) self.assertEquals(topic.star.wildcard, "*") that = topic.star.that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") node2 = self.parser.pattern_parser.root.children[1] self.assertIsNotNone(node2) self.assertIsInstance(node2, PatternWordNode) self.assertEquals(node2.word, "Goodbye") topic = node2.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertEqual(len(topic.children), 1) self.assertIsNotNone(topic.children[0]) self.assertIsInstance(topic.children[0], PatternWordNode) self.assertEqual(topic.children[0].word, "test") that = topic.children[0].that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") node3 = self.parser.pattern_parser.root.children[2] self.assertIsNotNone(node3) self.assertIsInstance(node3, PatternWordNode) self.assertEquals(node3.word, "Hello") topic = node3.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertEqual(len(topic.children), 1) self.assertIsNotNone(topic.children[0]) self.assertIsInstance(topic.children[0], PatternWordNode) self.assertEqual(topic.children[0].word, "test") that = topic.children[0].that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") node4 = self.parser.pattern_parser.root.children[3] self.assertIsNotNone(node4) self.assertIsInstance(node4, PatternWordNode) self.assertEquals(node4.word, "Welcome") topic = node4.topic self.assertIsNotNone(topic) self.assertIsInstance(topic, PatternTopicNode) self.assertTrue(topic.has_one_or_more()) self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode) self.assertEquals(topic.star.wildcard, "*") that = topic.star.that self.assertIsNotNone(that) self.assertIsInstance(that, PatternThatNode) self.assertTrue(that.has_one_or_more()) self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode) self.assertEquals(that.star.wildcard, "*") def test_match_sentence(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>HELLO</pattern> <template>Hiya</template> </category> </aiml> """) self.parser.pattern_parser.dump() context = self.parser.match_sentence(None, "test", Sentence("HELLO"), "*", "*") self.assertIsNotNone(context) self.assertEqual("Hiya", context.template_node().template.resolve(None, None)) def test_inline_br_html(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>HELLO</pattern> <template>Hello <br/> World</template> </category> </aiml> """) def test_inline_bold_html(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>HELLO</pattern> <template>Hello <bold>You</bold> World</template> </category> </aiml> """) def test_iset(self): self.parser.parse_from_text( """<?xml version="1.0" encoding="UTF-8"?> <aiml> <category> <pattern>Hello</pattern> <template>Hi There</template> </category> <category> <pattern># <iset>who, what</iset> are you</pattern> <template>OK thanks</template> </category> <category> <pattern># <iset>who, what</iset> is he</pattern> <template>OK thanks</template> </category> </aiml> """) if __name__ == '__main__': unittest.main()
dkamotsky/program-y
src/test/parser/test_aiml_parser.py
Python
mit
30,276
0.000727
from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import logging import json import os import traceback import time import datetime from socket import AddressFamily try: import psutil except ImportError: print("The reporter requires psutil to run.") import sys sys.exit(1) import ray.ray_constants as ray_constants import ray.utils # Logger for this module. It should be configured at the entry point # into the program using Ray. Ray provides a default configuration at # entry/init points. logger = logging.getLogger(__name__) def recursive_asdict(o): if isinstance(o, tuple) and hasattr(o, "_asdict"): return recursive_asdict(o._asdict()) if isinstance(o, (tuple, list)): L = [] for k in o: L.append(recursive_asdict(k)) return L if isinstance(o, dict): D = {k: recursive_asdict(v) for k, v in o.items()} return D return o def jsonify_asdict(o): return json.dumps(recursive_asdict(o)) def is_worker(cmdline): return cmdline and cmdline[0].startswith("ray_") def determine_ip_address(): """Return the first IP address for an ethernet interface on the system.""" addrs = [ x.address for k, v in psutil.net_if_addrs().items() if k[0] == "e" for x in v if x.family == AddressFamily.AF_INET ] return addrs[0] def to_posix_time(dt): return (dt - datetime.datetime(1970, 1, 1)).total_seconds() class Reporter(object): """A monitor process for monitoring Ray nodes. Attributes: host (str): The hostname of this machine. Used to improve the log messages published to Redis. redis_client: A client used to communicate with the Redis server. """ def __init__(self, redis_address, redis_password=None): """Initialize the reporter object.""" self.cpu_counts = (psutil.cpu_count(), psutil.cpu_count(logical=False)) self.ip_addr = determine_ip_address() self.hostname = os.uname().nodename _ = psutil.cpu_percent() # For initialization self.redis_key = "{}.{}".format(ray.gcs_utils.REPORTER_CHANNEL, self.hostname) self.redis_client = ray.services.create_redis_client( redis_address, password=redis_password) self.network_stats_hist = [(0, (0.0, 0.0))] # time, (sent, recv) @staticmethod def get_cpu_percent(): return psutil.cpu_percent() @staticmethod def get_boot_time(): return psutil.boot_time() @staticmethod def get_network_stats(): ifaces = [ v for k, v in psutil.net_io_counters(pernic=True).items() if k[0] == "e" ] sent = sum((iface.bytes_sent for iface in ifaces)) recv = sum((iface.bytes_recv for iface in ifaces)) return sent, recv @staticmethod def get_mem_usage(): vm = psutil.virtual_memory() return vm.total, vm.available, vm.percent @staticmethod def get_disk_usage(): return {x: psutil.disk_usage(x) for x in ["/", "/tmp"]} @staticmethod def get_workers(): return [ x.as_dict(attrs=[ "pid", "create_time", "cpu_percent", "cpu_times", "name", "cmdline", "memory_info", "memory_full_info" ]) for x in psutil.process_iter(attrs=["cmdline"]) if is_worker(x.info["cmdline"]) ] def get_load_avg(self): load = os.getloadavg() per_cpu_load = tuple((round(x / self.cpu_counts[0], 2) for x in load)) return load, per_cpu_load def get_all_stats(self): now = to_posix_time(datetime.datetime.utcnow()) network_stats = self.get_network_stats() self.network_stats_hist.append((now, network_stats)) self.network_stats_hist = self.network_stats_hist[-7:] then, prev_network_stats = self.network_stats_hist[0] netstats = ((network_stats[0] - prev_network_stats[0]) / (now - then), (network_stats[1] - prev_network_stats[1]) / (now - then)) return { "now": now, "hostname": self.hostname, "ip": self.ip_addr, "cpu": self.get_cpu_percent(), "cpus": self.cpu_counts, "mem": self.get_mem_usage(), "workers": self.get_workers(), "boot_time": self.get_boot_time(), "load_avg": self.get_load_avg(), "disk": self.get_disk_usage(), "net": netstats, } def perform_iteration(self): """Get any changes to the log files and push updates to Redis.""" stats = self.get_all_stats() self.redis_client.publish( self.redis_key, jsonify_asdict(stats), ) def run(self): """Run the reporter.""" while True: try: self.perform_iteration() except Exception: traceback.print_exc() pass time.sleep(ray_constants.REPORTER_UPDATE_INTERVAL_MS / 1000) if __name__ == "__main__": parser = argparse.ArgumentParser( description=("Parse Redis server for the " "reporter to connect to.")) parser.add_argument( "--redis-address", required=True, type=str, help="The address to use for Redis.") parser.add_argument( "--redis-password", required=False, type=str, default=None, help="the password to use for Redis") parser.add_argument( "--logging-level", required=False, type=str, default=ray_constants.LOGGER_LEVEL, choices=ray_constants.LOGGER_LEVEL_CHOICES, help=ray_constants.LOGGER_LEVEL_HELP) parser.add_argument( "--logging-format", required=False, type=str, default=ray_constants.LOGGER_FORMAT, help=ray_constants.LOGGER_FORMAT_HELP) args = parser.parse_args() ray.utils.setup_logger(args.logging_level, args.logging_format) reporter = Reporter(args.redis_address, redis_password=args.redis_password) try: reporter.run() except Exception as e: # Something went wrong, so push an error to all drivers. redis_client = ray.services.create_redis_client( args.redis_address, password=args.redis_password) traceback_str = ray.utils.format_error_message(traceback.format_exc()) message = ("The reporter on node {} failed with the following " "error:\n{}".format(os.uname()[1], traceback_str)) ray.utils.push_error_to_driver_through_redis( redis_client, ray_constants.REPORTER_DIED_ERROR, message) raise e
ujvl/ray-ng
python/ray/reporter.py
Python
apache-2.0
6,847
0
def __bootstrap__(): global __bootstrap__, __loader__, __file__ import sys, pkg_resources, imp __file__ = pkg_resources.resource_filename(__name__,'_seqmapping.so') __loader__ = None; del __bootstrap__, __loader__ imp.load_dynamic(__name__,__file__) __bootstrap__()
mikel-egana-aranguren/SADI-Galaxy-Docker
galaxy-dist/eggs/bx_python-0.7.2-py2.7-linux-x86_64-ucs4.egg/bx/_seqmapping.py
Python
gpl-3.0
281
0.035587
#!/usr/bin/env python import sys import re import getopt from typing import List, Tuple from feed_maker_util import IO def main(): num_of_recent_feeds = 1000 optlist, _ = getopt.getopt(sys.argv[1:], "n:") for o, a in optlist: if o == '-n': num_of_recent_feeds = int(a) line_list = IO.read_stdin_as_line_list() result_list: List[Tuple[str, str]] = [] for line in line_list: m = re.search(r'a href="(?P<link>[^"]+)"[^>]*title="(?P<title>[^"]+)"', line) if m: link = m.group("link") title = m.group("title") result_list.append((link, title)) for (link, title) in result_list[:num_of_recent_feeds]: print("%s\t%s" % (link, title)) if __name__ == "__main__": sys.exit(main())
terzeron/FeedMakerApplications
study/_javabeat/capture_item_link_title.py
Python
gpl-2.0
791
0.001264
from django.conf import settings from django.contrib.auth.models import User from django.db import connection from django.test import TestCase from brabeion import badges from brabeion.base import Badge, BadgeAwarded from brabeion.tests.models import PlayerStat class PointsBadge(Badge): slug = "points" levels = [ "Bronze", "Silver", "Gold", ] events = [ "points_awarded", ] multiple = False def award(self, **state): user = state["user"] points = user.stats.points if points > 10000: return BadgeAwarded(3) elif points > 7500: return BadgeAwarded(2) elif points > 5000: return BadgeAwarded(1) badges.register(PointsBadge) class BaseTestCase(TestCase): def assert_num_queries(self, n, func): current_debug = settings.DEBUG settings.DEBUG = True current = len(connection.queries) func() self.assertEqual(current + n, len(connection.queries), connection.queries[current:]) settings.DEBUG = current_debug class BadgesTests(BaseTestCase): def test_award(self): u = User.objects.create_user("Lars Bak", "lars@hotspot.com", "x864lyfe") PlayerStat.objects.create(user=u) badges.possibly_award_badge("points_awarded", user=u) self.assertEqual(u.badges_earned.count(), 0) u.stats.points += 5001 u.stats.save() badges.possibly_award_badge("points_awarded", user=u) self.assertEqual(u.badges_earned.count(), 1) self.assertEqual(u.badges_earned.all()[0].badge.name, "Bronze") badges.possibly_award_badge("points_awarded", user=u) self.assertEqual(u.badges_earned.count(), 1) u.stats.points += 2500 badges.possibly_award_badge("points_awarded", user=u) self.assertEqual(u.badges_earned.count(), 2) def test_lazy_user(self): u = User.objects.create_user("Lars Bak", "lars@hotspot.com", "x864lyfe") PlayerStat.objects.create(user=u, points=5001) badges.possibly_award_badge("points_awarded", user=u) self.assertEqual(u.badges_earned.count(), 1) self.assert_num_queries(1, lambda: u.badges_earned.get().badge)
kinsights/brabeion
brabeion/tests/tests.py
Python
bsd-3-clause
2,258
0.001329
# This file is part of wger Workout Manager. # # wger Workout Manager is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # wger Workout Manager is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Workout Manager. If not, see <http://www.gnu.org/licenses/>. # wger from wger.core.models import RepetitionUnit from wger.core.tests import api_base_test from wger.core.tests.base_testcase import ( WorkoutManagerAccessTestCase, WorkoutManagerAddTestCase, WorkoutManagerDeleteTestCase, WorkoutManagerEditTestCase, WorkoutManagerTestCase ) class RepresentationTestCase(WorkoutManagerTestCase): ''' Test the representation of a model ''' def test_representation(self): ''' Test that the representation of an object is correct ''' self.assertEqual("{0}".format(RepetitionUnit.objects.get(pk=1)), 'Repetitions') class OverviewTest(WorkoutManagerAccessTestCase): ''' Tests the settings unit overview page ''' url = 'core:repetition-unit:list' anonymous_fail = True class AddTestCase(WorkoutManagerAddTestCase): ''' Tests adding a new unit ''' object_class = RepetitionUnit url = 'core:repetition-unit:add' data = {'name': 'Furlongs'} user_success = 'admin', user_fail = ('general_manager1', 'general_manager2', 'member1', 'member2', 'trainer2', 'trainer3', 'trainer4', 'manager3') class DeleteTestCase(WorkoutManagerDeleteTestCase): ''' Tests deleting a unit ''' pk = 1 object_class = RepetitionUnit url = 'core:repetition-unit:delete' user_success = 'admin', user_fail = ('general_manager1', 'general_manager2', 'member1', 'member2', 'trainer2', 'trainer3', 'trainer4', 'manager3') class EditTestCase(WorkoutManagerEditTestCase): ''' Tests editing a unit ''' pk = 1 object_class = RepetitionUnit url = 'core:repetition-unit:edit' data = {'name': 'Furlongs'} user_success = 'admin', user_fail = ('general_manager1', 'general_manager2', 'member1', 'member2', 'trainer2', 'trainer3', 'trainer4', 'manager3') class ApiTestCase(api_base_test.ApiBaseResourceTestCase): ''' Tests the unit resource ''' pk = 1 resource = RepetitionUnit private_resource = False def get_resource_name(self): return 'setting-repetitionunit'
petervanderdoes/wger
wger/core/tests/test_repetition_unit.py
Python
agpl-3.0
3,162
0.000316
# -*- coding: utf-8 -*- import simplejson from django.core.management.base import BaseCommand from django.contrib.gis.geos import MultiPolygon, Polygon from ...models import State class Command(BaseCommand): args = 'filename' help = 'Import states from a GeoJSON file' def handle(self, *args, **options): for filename in args: data_json = open(filename, 'r').read() data = simplejson.loads(data_json) for feature in data['features']: state = State( name=feature['properties'].get('name'), code=feature['properties'].get('code'), ) if feature['geometry'].get('type') == 'MultiPolygon': state.geom = MultiPolygon( [Polygon(poly) for poly in feature['geometry'].get('coordinates')[0]] ) else: state.geom = MultiPolygon(Polygon(feature['geometry'].get('coordinates')[0])) state.save()
ibamacsr/routes_registry_api
routes_registry_api/routes/management/commands/importstates.py
Python
agpl-3.0
1,055
0.002844
from multiprocessing import Pool import h5py as hdf import numpy as np from calc_cluster_props import * from data_handler import mkTruth, mkHalo import os import sys class AsyncFactory: def __init__(self, func, cb_func): self.func = func self.cb_func = cb_func self.pool = Pool() def call(self,*args, **kwargs): self.pool.apply_async(self.func, args, kwargs, self.cb_func) def wait(self): self.pool.close() self.pool.join() def worker(pos, data, center, tZ): #print "IN:PID: %d \t Value: %d" % (os.getpid(), pos) data = updateArray(data) #data = findClusterRedshift(data) data['CLUSZ'] = tZ data = findSeperationSpatial(data, center) data = findLOSV(data) # make initial cuts mask = abs(data['LOSV']) < 5000 data = data[mask] while True: try: if size == data.size: break except NameError: pass size = data.size #print 'size', data.size #data = rejectInterlopers(data) try: x = shifty_gapper(data['SEP'], data['Z'], tZ, ngap=15, glimit=500) data = data[x] except: break #data = findLOSVD(data) data = findLOSVDgmm(data) data['LOSVD'] = data['LOSVDgmm'] data = findR200(data) mask = data['SEP'] < data['R200'][0] data = data[mask] data = findClusterRedshift(data) data = findSeperationSpatial(data, center) #data = findLOSVDgmm(data) data = calc_mass_Evrard(data, A1D = 1177, alpha = 0.364) #print "OUT:PID: %d \t Value: %d" % (os.getpid(), pos) return pos, data def cb_func((pos, data)): if pos % 1000 == 0: print pos results['IDX'][pos] = pos results['CLUSZ'][pos] = data['CLUSZ'][0] results['LOSVD'][pos] = data['LOSVD'][0] results['LOSVDgmm'][pos] = data['LOSVDgmm'][0] results['MASS'][pos] = data['MASS'][0] results['R200'][pos] = data['R200'][0] results['NGAL'][pos] = data.size if __name__ == "__main__": async_worker = AsyncFactory(worker, cb_func) halo = mkHalo() truth = mkTruth() mask = truth['g'] < 23. truth = truth[mask] mask = (halo['m200c']/0.72 >= 1e13) & (halo['upid'] == -1) maskedHalo = halo[mask] hids, uniqueIdx = np.unique(maskedHalo['id'], return_index=True) # limit the cases print sys.argv[1], sys.argv[2] if int(sys.argv[1]) == 0: hids = hids[:int(sys.argv[2])] uniqueIdx = uniqueIdx[:int(sys.argv[2])] elif int(sys.argv[2]) == 9: hids = hids[int(sys.argv[1]):] uniqueIdx = uniqueIdx[int(sys.argv[1]):] else: hids = hids[int(sys.argv[1]):int(sys.argv[2])] uniqueIdx = uniqueIdx[int(sys.argv[1]):int(sys.argv[2])] # make the results container results = np.zeros((hids.size,), dtype=[('IDX', '>i4'), ('HALOID', '>i8'), ('ZSPEC', '>f4'), ('VRMS', '>f4'), ('M200c', '>f4'), ('RVIR', '>f4'), ('CLUSZ', '>f4'), ('LOSVD', '>f4'), ('LOSVDgmm', '>f4'), ('MASS', '>f4'), ('R200', '>f4'), ('NGAL', '>i4')]) results['HALOID'] = hids # now we have to make some initial cuts and then make final spatial cuts for i, SH in enumerate(hids): center = (maskedHalo['ra'][uniqueIdx[i]], maskedHalo['dec'][uniqueIdx[i]]) raMask = (center[0] - 0.5 < truth['RA']) & (truth['RA'] < center[0] + 0.5) decMask = (center[1] - 0.5 < truth['DEC']) & (truth['DEC'] < center[1] + 0.5) async_worker.call(i, truth[raMask & decMask], center, maskedHalo['zspec'][uniqueIdx[i]]) results['ZSPEC'][i] = maskedHalo['zspec'][uniqueIdx[i]] results['VRMS'][i] = maskedHalo['vrms'][uniqueIdx[i]]/np.sqrt(3) results['M200c'][i] = maskedHalo['m200c'][uniqueIdx[i]]/0.72 results['RVIR'][i] = maskedHalo['rvir'][uniqueIdx[i]]/0.72 async_worker.wait() with hdf.File('result_targetedRealistic'+str(os.environ['LSB_JOBID'])+'.hdf5', 'w') as f: f['result_targetedRealistic'] = results
boada/desCluster
mkTargeted/legacy/targetedRealistic_async.py
Python
mit
4,107
0.006331
from fontTools.misc.py23 import strjoin, tobytes, tostr from . import asciiTable class table_T_S_I_V_(asciiTable.asciiTable): def toXML(self, writer, ttFont): data = tostr(self.data) # removing null bytes. XXX needed?? data = data.split('\0') data = strjoin(data) writer.begintag("source") writer.newline() writer.write_noindent(data.replace("\r", "\n")) writer.newline() writer.endtag("source") writer.newline() def fromXML(self, name, attrs, content, ttFont): lines = strjoin(content).split("\n") self.data = tobytes("\r".join(lines[1:-1]))
google/material-design-icons
update/venv/lib/python3.9/site-packages/fontTools/ttLib/tables/T_S_I_V_.py
Python
apache-2.0
572
0.026224
# Copyright 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_serialization import jsonutils import six from nova.db.sqlalchemy import api as db from nova.db.sqlalchemy import api_models from nova import exception from nova import objects from nova.objects import base from nova.objects import fields from nova.objects import instance as obj_instance from nova.virt import hardware @base.NovaObjectRegistry.register class RequestSpec(base.NovaObject): # Version 1.0: Initial version # Version 1.1: ImageMeta version 1.6 # Version 1.2: SchedulerRetries version 1.1 # Version 1.3: InstanceGroup version 1.10 # Version 1.4: ImageMeta version 1.7 # Version 1.5: Added get_by_instance_uuid(), create(), save() VERSION = '1.5' fields = { 'id': fields.IntegerField(), 'image': fields.ObjectField('ImageMeta', nullable=True), 'numa_topology': fields.ObjectField('InstanceNUMATopology', nullable=True), 'pci_requests': fields.ObjectField('InstancePCIRequests', nullable=True), 'project_id': fields.StringField(nullable=True), 'availability_zone': fields.StringField(nullable=True), 'flavor': fields.ObjectField('Flavor', nullable=False), 'num_instances': fields.IntegerField(default=1), 'ignore_hosts': fields.ListOfStringsField(nullable=True), 'force_hosts': fields.ListOfStringsField(nullable=True), 'force_nodes': fields.ListOfStringsField(nullable=True), 'retry': fields.ObjectField('SchedulerRetries', nullable=True), 'limits': fields.ObjectField('SchedulerLimits', nullable=True), 'instance_group': fields.ObjectField('InstanceGroup', nullable=True), # NOTE(sbauza): Since hints are depending on running filters, we prefer # to leave the API correctly validating the hints per the filters and # just provide to the RequestSpec object a free-form dictionary 'scheduler_hints': fields.DictOfListOfStringsField(nullable=True), 'instance_uuid': fields.UUIDField(), } @property def vcpus(self): return self.flavor.vcpus @property def memory_mb(self): return self.flavor.memory_mb @property def root_gb(self): return self.flavor.root_gb @property def ephemeral_gb(self): return self.flavor.ephemeral_gb @property def swap(self): return self.flavor.swap def _image_meta_from_image(self, image): if isinstance(image, objects.ImageMeta): self.image = image elif isinstance(image, dict): # NOTE(sbauza): Until Nova is fully providing an ImageMeta object # for getting properties, we still need to hydrate it here # TODO(sbauza): To be removed once all RequestSpec hydrations are # done on the conductor side and if the image is an ImageMeta self.image = objects.ImageMeta.from_dict(image) else: self.image = None def _from_instance(self, instance): if isinstance(instance, obj_instance.Instance): # NOTE(sbauza): Instance should normally be a NovaObject... getter = getattr elif isinstance(instance, dict): # NOTE(sbauza): ... but there are some cases where request_spec # has an instance key as a dictionary, just because # select_destinations() is getting a request_spec dict made by # sched_utils.build_request_spec() # TODO(sbauza): To be removed once all RequestSpec hydrations are # done on the conductor side getter = lambda x, y: x.get(y) else: # If the instance is None, there is no reason to set the fields return instance_fields = ['numa_topology', 'pci_requests', 'uuid', 'project_id', 'availability_zone'] for field in instance_fields: if field == 'uuid': setattr(self, 'instance_uuid', getter(instance, field)) elif field == 'pci_requests': self._from_instance_pci_requests(getter(instance, field)) elif field == 'numa_topology': self._from_instance_numa_topology(getter(instance, field)) else: setattr(self, field, getter(instance, field)) def _from_instance_pci_requests(self, pci_requests): if isinstance(pci_requests, dict): pci_req_cls = objects.InstancePCIRequests self.pci_requests = pci_req_cls.from_request_spec_instance_props( pci_requests) else: self.pci_requests = pci_requests def _from_instance_numa_topology(self, numa_topology): if isinstance(numa_topology, dict): self.numa_topology = hardware.instance_topology_from_instance( dict(numa_topology=numa_topology)) else: self.numa_topology = numa_topology def _from_flavor(self, flavor): if isinstance(flavor, objects.Flavor): self.flavor = flavor elif isinstance(flavor, dict): # NOTE(sbauza): Again, request_spec is primitived by # sched_utils.build_request_spec() and passed to # select_destinations() like this # TODO(sbauza): To be removed once all RequestSpec hydrations are # done on the conductor side self.flavor = objects.Flavor(**flavor) def _from_retry(self, retry_dict): self.retry = (SchedulerRetries.from_dict(self._context, retry_dict) if retry_dict else None) def _populate_group_info(self, filter_properties): if filter_properties.get('instance_group'): # New-style group information as a NovaObject, we can directly set # the field self.instance_group = filter_properties.get('instance_group') elif filter_properties.get('group_updated') is True: # Old-style group information having ugly dict keys containing sets # NOTE(sbauza): Can be dropped once select_destinations is removed policies = list(filter_properties.get('group_policies')) hosts = list(filter_properties.get('group_hosts')) members = list(filter_properties.get('group_members')) self.instance_group = objects.InstanceGroup(policies=policies, hosts=hosts, members=members) # hosts has to be not part of the updates for saving the object self.instance_group.obj_reset_changes(['hosts']) else: # Set the value anyway to avoid any call to obj_attr_is_set for it self.instance_group = None def _from_limits(self, limits_dict): self.limits = SchedulerLimits.from_dict(limits_dict) def _from_hints(self, hints_dict): if hints_dict is None: self.scheduler_hints = None return self.scheduler_hints = { hint: value if isinstance(value, list) else [value] for hint, value in six.iteritems(hints_dict)} @classmethod def from_primitives(cls, context, request_spec, filter_properties): """Returns a new RequestSpec object by hydrating it from legacy dicts. Deprecated. A RequestSpec object is created early in the boot process using the from_components method. That object will either be passed to places that require it, or it can be looked up with get_by_instance_uuid. This method can be removed when there are no longer any callers. Because the method is not remotable it is not tied to object versioning. That helper is not intended to leave the legacy dicts kept in the nova codebase, but is rather just for giving a temporary solution for populating the Spec object until we get rid of scheduler_utils' build_request_spec() and the filter_properties hydratation in the conductor. :param context: a context object :param request_spec: An old-style request_spec dictionary :param filter_properties: An old-style filter_properties dictionary """ num_instances = request_spec.get('num_instances', 1) spec = cls(context, num_instances=num_instances) # Hydrate from request_spec first image = request_spec.get('image') spec._image_meta_from_image(image) instance = request_spec.get('instance_properties') spec._from_instance(instance) flavor = request_spec.get('instance_type') spec._from_flavor(flavor) # Hydrate now from filter_properties spec.ignore_hosts = filter_properties.get('ignore_hosts') spec.force_hosts = filter_properties.get('force_hosts') spec.force_nodes = filter_properties.get('force_nodes') retry = filter_properties.get('retry', {}) spec._from_retry(retry) limits = filter_properties.get('limits', {}) spec._from_limits(limits) spec._populate_group_info(filter_properties) scheduler_hints = filter_properties.get('scheduler_hints', {}) spec._from_hints(scheduler_hints) return spec def get_scheduler_hint(self, hint_name, default=None): """Convenient helper for accessing a particular scheduler hint since it is hydrated by putting a single item into a list. In order to reduce the complexity, that helper returns a string if the requested hint is a list of only one value, and if not, returns the value directly (ie. the list). If the hint is not existing (or scheduler_hints is None), then it returns the default value. :param hint_name: name of the hint :param default: the default value if the hint is not there """ if (not self.obj_attr_is_set('scheduler_hints') or self.scheduler_hints is None): return default hint_val = self.scheduler_hints.get(hint_name, default) return (hint_val[0] if isinstance(hint_val, list) and len(hint_val) == 1 else hint_val) def _to_legacy_image(self): return base.obj_to_primitive(self.image) if ( self.obj_attr_is_set('image') and self.image) else {} def _to_legacy_instance(self): # NOTE(sbauza): Since the RequestSpec only persists a few Instance # fields, we can only return a dict. instance = {} instance_fields = ['numa_topology', 'pci_requests', 'project_id', 'availability_zone', 'instance_uuid'] for field in instance_fields: if not self.obj_attr_is_set(field): continue if field == 'instance_uuid': instance['uuid'] = getattr(self, field) else: instance[field] = getattr(self, field) flavor_fields = ['root_gb', 'ephemeral_gb', 'memory_mb', 'vcpus'] if not self.obj_attr_is_set('flavor'): return instance for field in flavor_fields: instance[field] = getattr(self.flavor, field) return instance def _to_legacy_group_info(self): # NOTE(sbauza): Since this is only needed until the AffinityFilters are # modified by using directly the RequestSpec object, we need to keep # the existing dictionary as a primitive. return {'group_updated': True, 'group_hosts': set(self.instance_group.hosts), 'group_policies': set(self.instance_group.policies)} def to_legacy_request_spec_dict(self): """Returns a legacy request_spec dict from the RequestSpec object. Since we need to manage backwards compatibility and rolling upgrades within our RPC API, we need to accept to provide an helper for primitiving the right RequestSpec object into a legacy dict until we drop support for old Scheduler RPC API versions. If you don't understand why this method is needed, please don't use it. """ req_spec = {} if not self.obj_attr_is_set('num_instances'): req_spec['num_instances'] = self.fields['num_instances'].default else: req_spec['num_instances'] = self.num_instances req_spec['image'] = self._to_legacy_image() req_spec['instance_properties'] = self._to_legacy_instance() if self.obj_attr_is_set('flavor'): req_spec['instance_type'] = self.flavor else: req_spec['instance_type'] = {} return req_spec def to_legacy_filter_properties_dict(self): """Returns a legacy filter_properties dict from the RequestSpec object. Since we need to manage backwards compatibility and rolling upgrades within our RPC API, we need to accept to provide an helper for primitiving the right RequestSpec object into a legacy dict until we drop support for old Scheduler RPC API versions. If you don't understand why this method is needed, please don't use it. """ filt_props = {} if self.obj_attr_is_set('ignore_hosts') and self.ignore_hosts: filt_props['ignore_hosts'] = self.ignore_hosts if self.obj_attr_is_set('force_hosts') and self.force_hosts: filt_props['force_hosts'] = self.force_hosts if self.obj_attr_is_set('force_nodes') and self.force_nodes: filt_props['force_nodes'] = self.force_nodes if self.obj_attr_is_set('retry') and self.retry: filt_props['retry'] = self.retry.to_dict() if self.obj_attr_is_set('limits') and self.limits: filt_props['limits'] = self.limits.to_dict() if self.obj_attr_is_set('instance_group') and self.instance_group: filt_props.update(self._to_legacy_group_info()) if self.obj_attr_is_set('scheduler_hints') and self.scheduler_hints: # NOTE(sbauza): We need to backport all the hints correctly since # we had to hydrate the field by putting a single item into a list. filt_props['scheduler_hints'] = {hint: self.get_scheduler_hint( hint) for hint in self.scheduler_hints} return filt_props @classmethod def from_components(cls, context, instance_uuid, image, flavor, numa_topology, pci_requests, filter_properties, instance_group, availability_zone): """Returns a new RequestSpec object hydrated by various components. This helper is useful in creating the RequestSpec from the various objects that are assembled early in the boot process. This method creates a complete RequestSpec object with all properties set or intentionally left blank. :param context: a context object :param instance_uuid: the uuid of the instance to schedule :param image: a dict of properties for an image or volume :param flavor: a flavor NovaObject :param numa_topology: InstanceNUMATopology or None :param pci_requests: InstancePCIRequests :param filter_properties: a dict of properties for scheduling :param instance_group: None or an instance group NovaObject :param availability_zone: an availability_zone string """ spec_obj = cls(context) spec_obj.num_instances = 1 spec_obj.instance_uuid = instance_uuid spec_obj.instance_group = instance_group spec_obj.project_id = context.project_id spec_obj._image_meta_from_image(image) spec_obj._from_flavor(flavor) spec_obj._from_instance_pci_requests(pci_requests) spec_obj._from_instance_numa_topology(numa_topology) spec_obj.ignore_hosts = filter_properties.get('ignore_hosts') spec_obj.force_hosts = filter_properties.get('force_hosts') spec_obj.force_nodes = filter_properties.get('force_nodes') spec_obj._from_retry(filter_properties.get('retry', {})) spec_obj._from_limits(filter_properties.get('limits', {})) spec_obj._from_hints(filter_properties.get('scheduler_hints', {})) spec_obj.availability_zone = availability_zone return spec_obj @staticmethod def _from_db_object(context, spec, db_spec): spec_obj = spec.obj_from_primitive(jsonutils.loads(db_spec['spec'])) for key in spec.fields: # Load these from the db model not the serialized object within, # though they should match. if key in ['id', 'instance_uuid']: setattr(spec, key, db_spec[key]) else: setattr(spec, key, getattr(spec_obj, key)) spec._context = context spec.obj_reset_changes() return spec @staticmethod @db.api_context_manager.reader def _get_by_instance_uuid_from_db(context, instance_uuid): db_spec = context.session.query(api_models.RequestSpec).filter_by( instance_uuid=instance_uuid).first() if not db_spec: raise exception.RequestSpecNotFound( instance_uuid=instance_uuid) return db_spec @base.remotable_classmethod def get_by_instance_uuid(cls, context, instance_uuid): db_spec = cls._get_by_instance_uuid_from_db(context, instance_uuid) return cls._from_db_object(context, cls(), db_spec) @staticmethod @db.api_context_manager.writer def _create_in_db(context, updates): db_spec = api_models.RequestSpec() db_spec.update(updates) db_spec.save(context.session) return db_spec def _get_update_primitives(self): """Serialize object to match the db model. We store copies of embedded objects rather than references to these objects because we want a snapshot of the request at this point. If the references changed or were deleted we would not be able to reschedule this instance under the same conditions as it was originally scheduled with. """ updates = self.obj_get_changes() # NOTE(alaski): The db schema is the full serialized object in a # 'spec' column. If anything has changed we rewrite the full thing. if updates: db_updates = {'spec': jsonutils.dumps(self.obj_to_primitive())} if 'instance_uuid' in updates: db_updates['instance_uuid'] = updates['instance_uuid'] return db_updates @base.remotable def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self._get_update_primitives() db_spec = self._create_in_db(self._context, updates) self._from_db_object(self._context, self, db_spec) @staticmethod @db.api_context_manager.writer def _save_in_db(context, instance_uuid, updates): # FIXME(sbauza): Provide a classmethod when oslo.db bug #1520195 is # fixed and released db_spec = RequestSpec._get_by_instance_uuid_from_db(context, instance_uuid) db_spec.update(updates) db_spec.save(context.session) return db_spec @base.remotable def save(self): updates = self._get_update_primitives() db_spec = self._save_in_db(self._context, self.instance_uuid, updates) self._from_db_object(self._context, self, db_spec) self.obj_reset_changes() @base.NovaObjectRegistry.register class SchedulerRetries(base.NovaObject): # Version 1.0: Initial version # Version 1.1: ComputeNodeList version 1.14 VERSION = '1.1' fields = { 'num_attempts': fields.IntegerField(), # NOTE(sbauza): Even if we are only using host/node strings, we need to # know which compute nodes were tried 'hosts': fields.ObjectField('ComputeNodeList'), } @classmethod def from_dict(cls, context, retry_dict): # NOTE(sbauza): We are not persisting the user context since it's only # needed for hydrating the Retry object retry_obj = cls() if not ('num_attempts' and 'hosts') in retry_dict: # NOTE(sbauza): We prefer to return an empty object if the # primitive is not good enough return retry_obj retry_obj.num_attempts = retry_dict.get('num_attempts') # NOTE(sbauza): each retry_dict['hosts'] item is a list of [host, node] computes = [objects.ComputeNode(context=context, host=host, hypervisor_hostname=node) for host, node in retry_dict.get('hosts')] retry_obj.hosts = objects.ComputeNodeList(objects=computes) return retry_obj def to_dict(self): legacy_hosts = [[cn.host, cn.hypervisor_hostname] for cn in self.hosts] return {'num_attempts': self.num_attempts, 'hosts': legacy_hosts} @base.NovaObjectRegistry.register class SchedulerLimits(base.NovaObject): # Version 1.0: Initial version VERSION = '1.0' fields = { 'numa_topology': fields.ObjectField('NUMATopologyLimits', nullable=True, default=None), 'vcpu': fields.IntegerField(nullable=True, default=None), 'disk_gb': fields.IntegerField(nullable=True, default=None), 'memory_mb': fields.IntegerField(nullable=True, default=None), } @classmethod def from_dict(cls, limits_dict): limits = cls(**limits_dict) # NOTE(sbauza): Since the limits can be set for each field or not, we # prefer to have the fields nullable, but default the value to None. # Here we accept that the object is always generated from a primitive # hence the use of obj_set_defaults exceptionally. limits.obj_set_defaults() return limits def to_dict(self): limits = {} for field in self.fields: if getattr(self, field) is not None: limits[field] = getattr(self, field) return limits
dims/nova
nova/objects/request_spec.py
Python
apache-2.0
22,987
0.000131
from django.test import TestCase import time from .models import SimpleTree, MPTTTree, TBMP, TBNS def timeit(method): """ Measure time of method's execution. """ def timed(*args, **kw): ts = time.time() result = method(*args, **kw) te = time.time() print '\n%r: %2.2f sec' % \ (method.__name__, te - ts) return result return timed CYCLES = 8 class Benchmark(object): @timeit def test_creation(self): self._create_tree() def test_delete(self): self._create_tree(cycles=7) @timeit def test_deletion(): for _ in xrange(pow(2, CYCLES) / 2): self._delete_last() test_deletion() def test_get(self): self._create_tree(cycles=7) @timeit def test_get_tree(): root = self._get_root() for _ in xrange(100): self._get_tree(root) test_get_tree() def _create_tree(self, cycles=CYCLES): root = self._create_root(title='root1') nodes = [root] for _ in xrange(CYCLES): new_nodes = [] for node in nodes: new_nodes.append(self._create_child(parent=node)) new_nodes.append(self._create_child(parent=node)) nodes = new_nodes return nodes def _create_root(self, **params): pass def _create_child(self, parent, **params): pass def _delete_last(self): pass def _get_root(self): pass def _get_tree(self, parent): pass class SimpleTest(TestCase, Benchmark): def setUp(self): print "\nSimpleTree benchmark" def _create_root(self, **params): return SimpleTree.objects.create(**params) def _create_child(self, parent, **params): return SimpleTree.objects.create(parent=parent, **params) def _delete_last(self): SimpleTree.objects.order_by('-id')[0].delete() def _get_root(self): return SimpleTree.objects.get(parent=None) def _get_tree(self, parent): return parent.get_tree() class MPTTTest(TestCase, Benchmark): def setUp(self): print "\nMPTT benchmark" def _create_root(self, **params): return MPTTTree.objects.create(**params) def _create_child(self, parent, **params): return MPTTTree.objects.create(parent=parent, **params) def _delete_last(self): MPTTTree.objects.order_by('-id')[0].delete() def _get_root(self): return MPTTTree.objects.get(parent=None) def _get_tree(self, parent): return list(parent.get_ancestors()) + list(parent.get_descendants(include_self=False)) class TreeBeardMP(TestCase, Benchmark): def setUp(self): print "\nTreebeard MP benchmark" def _create_root(self, **params): return TBMP.add_root(**params) def _create_child(self, parent, **params): return parent.add_child(**params) def _delete_last(self): TBMP.objects.order_by('-id')[0].delete() def _get_root(self): return TBMP.get_root_nodes()[0] def _get_tree(self, parent): TBMP.get_tree(parent=parent) class TreeBeardNS(TreeBeardMP): def setUp(self): print "\nTreebeard NS benchmark" def _create_root(self, **params): return TBNS.add_root(**params) def _delete_last(self): TBNS.objects.order_by('-id')[0].delete() def _get_root(self): return TBNS.get_root_nodes()[0] def _get_tree(self, parent): TBNS.get_tree(parent=parent)
klen/simpletree
benchmark/main/tests.py
Python
bsd-3-clause
3,596
0.000278
#!/usr/bin/env python # Copyright 2014 the V8 project authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script retrieves the history of all V8 branches and trunk revisions and # their corresponding Chromium revisions. # Requires a chromium checkout with branch heads: # gclient sync --with_branch_heads # gclient fetch import argparse import csv import itertools import json import os import re import sys from common_includes import * CONFIG = { "BRANCHNAME": "retrieve-v8-releases", "PERSISTFILE_BASENAME": "/tmp/v8-releases-tempfile", } # Expression for retrieving the bleeding edge revision from a commit message. PUSH_MSG_SVN_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$") PUSH_MSG_GIT_RE = re.compile(r".* \(based on ([a-fA-F0-9]+)\)$") # Expression for retrieving the merged patches from a merge commit message # (old and new format). MERGE_MESSAGE_RE = re.compile(r"^.*[M|m]erged (.+)(\)| into).*$", re.M) CHERRY_PICK_TITLE_GIT_RE = re.compile(r"^.* \(cherry\-pick\)\.?$") # New git message for cherry-picked CLs. One message per line. MERGE_MESSAGE_GIT_RE = re.compile(r"^Merged ([a-fA-F0-9]+)\.?$") # Expression for retrieving reverted patches from a commit message (old and # new format). ROLLBACK_MESSAGE_RE = re.compile(r"^.*[R|r]ollback of (.+)(\)| in).*$", re.M) # New git message for reverted CLs. One message per line. ROLLBACK_MESSAGE_GIT_RE = re.compile(r"^Rollback of ([a-fA-F0-9]+)\.?$") # Expression for retrieving the code review link. REVIEW_LINK_RE = re.compile(r"^Review URL: (.+)$", re.M) # Expression with three versions (historical) for extracting the v8 revision # from the chromium DEPS file. DEPS_RE = re.compile(r"""^\s*(?:["']v8_revision["']: ["']""" """|\(Var\("googlecode_url"\) % "v8"\) \+ "\/trunk@""" """|"http\:\/\/v8\.googlecode\.com\/svn\/trunk@)""" """([^"']+)["'].*$""", re.M) # Expression to pick tag and revision for bleeding edge tags. To be used with # output of 'svn log'. BLEEDING_EDGE_TAGS_RE = re.compile( r"A \/tags\/([^\s]+) \(from \/branches\/bleeding_edge\:(\d+)\)") def SortBranches(branches): """Sort branches with version number names.""" return sorted(branches, key=SortingKey, reverse=True) def FilterDuplicatesAndReverse(cr_releases): """Returns the chromium releases in reverse order filtered by v8 revision duplicates. cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev. """ last = "" result = [] for release in reversed(cr_releases): if last == release[1]: continue last = release[1] result.append(release) return result def BuildRevisionRanges(cr_releases): """Returns a mapping of v8 revision -> chromium ranges. The ranges are comma-separated, each range has the form R1:R2. The newest entry is the only one of the form R1, as there is no end range. cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev. cr_rev either refers to a chromium svn revision or a chromium branch number. """ range_lists = {} cr_releases = FilterDuplicatesAndReverse(cr_releases) # Visit pairs of cr releases from oldest to newest. for cr_from, cr_to in itertools.izip( cr_releases, itertools.islice(cr_releases, 1, None)): # Assume the chromium revisions are all different. assert cr_from[0] != cr_to[0] # TODO(machenbach): Subtraction is not git friendly. ran = "%s:%d" % (cr_from[0], int(cr_to[0]) - 1) # Collect the ranges in lists per revision. range_lists.setdefault(cr_from[1], []).append(ran) # Add the newest revision. if cr_releases: range_lists.setdefault(cr_releases[-1][1], []).append(cr_releases[-1][0]) # Stringify and comma-separate the range lists. return dict((rev, ", ".join(ran)) for rev, ran in range_lists.iteritems()) def MatchSafe(match): if match: return match.group(1) else: return "" class Preparation(Step): MESSAGE = "Preparation." def RunStep(self): self.CommonPrepare() self.PrepareBranch() class RetrieveV8Releases(Step): MESSAGE = "Retrieve all V8 releases." def ExceedsMax(self, releases): return (self._options.max_releases > 0 and len(releases) > self._options.max_releases) def GetBleedingEdgeFromPush(self, title): return MatchSafe(PUSH_MSG_SVN_RE.match(title)) def GetBleedingEdgeGitFromPush(self, title): return MatchSafe(PUSH_MSG_GIT_RE.match(title)) def GetMergedPatches(self, body): patches = MatchSafe(MERGE_MESSAGE_RE.search(body)) if not patches: patches = MatchSafe(ROLLBACK_MESSAGE_RE.search(body)) if patches: # Indicate reverted patches with a "-". patches = "-%s" % patches return patches def GetMergedPatchesGit(self, body): patches = [] for line in body.splitlines(): patch = MatchSafe(MERGE_MESSAGE_GIT_RE.match(line)) if patch: patches.append(patch) patch = MatchSafe(ROLLBACK_MESSAGE_GIT_RE.match(line)) if patch: patches.append("-%s" % patch) return ", ".join(patches) def GetReleaseDict( self, git_hash, bleeding_edge_rev, bleeding_edge_git, branch, version, patches, cl_body): revision = self.vc.GitSvn(git_hash) return { # The SVN revision on the branch. "revision": revision, # The git revision on the branch. "revision_git": git_hash, # The SVN revision on bleeding edge (only for newer trunk pushes). "bleeding_edge": bleeding_edge_rev, # The same for git. "bleeding_edge_git": bleeding_edge_git, # The branch name. "branch": branch, # The version for displaying in the form 3.26.3 or 3.26.3.12. "version": version, # The date of the commit. "date": self.GitLog(n=1, format="%ci", git_hash=git_hash), # Merged patches if available in the form 'r1234, r2345'. "patches_merged": patches, # Default for easier output formatting. "chromium_revision": "", # Default for easier output formatting. "chromium_branch": "", # Link to the CL on code review. Trunk pushes are not uploaded, so this # field will be populated below with the recent roll CL link. "review_link": MatchSafe(REVIEW_LINK_RE.search(cl_body)), # Link to the commit message on google code. "revision_link": ("https://code.google.com/p/v8/source/detail?r=%s" % revision), } def GetRelease(self, git_hash, branch): self.ReadAndPersistVersion() base_version = [self["major"], self["minor"], self["build"]] version = ".".join(base_version) body = self.GitLog(n=1, format="%B", git_hash=git_hash) patches = "" if self["patch"] != "0": version += ".%s" % self["patch"] if CHERRY_PICK_TITLE_GIT_RE.match(body.splitlines()[0]): patches = self.GetMergedPatchesGit(body) else: patches = self.GetMergedPatches(body) title = self.GitLog(n=1, format="%s", git_hash=git_hash) bleeding_edge_revision = self.GetBleedingEdgeFromPush(title) bleeding_edge_git = "" if bleeding_edge_revision: bleeding_edge_git = self.vc.SvnGit(bleeding_edge_revision, self.vc.RemoteMasterBranch()) else: bleeding_edge_git = self.GetBleedingEdgeGitFromPush(title) return self.GetReleaseDict( git_hash, bleeding_edge_revision, bleeding_edge_git, branch, version, patches, body), self["patch"] def GetReleasesFromMaster(self): tag_text = self.SVN("log https://v8.googlecode.com/svn/tags -v --limit 20") releases = [] for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text): git_hash = self.vc.SvnGit(revision) # Add bleeding edge release. It does not contain patches or a code # review link, as tags are not uploaded. releases.append(self.GetReleaseDict( git_hash, revision, git_hash, self.vc.MasterBranch(), tag, "", "")) return releases def GetReleasesFromBranch(self, branch): self.GitReset(self.vc.RemoteBranch(branch)) if branch == self.vc.MasterBranch(): return self.GetReleasesFromMaster() releases = [] try: for git_hash in self.GitLog(format="%H").splitlines(): if VERSION_FILE not in self.GitChangedFiles(git_hash): continue if self.ExceedsMax(releases): break # pragma: no cover if not self.GitCheckoutFileSafe(VERSION_FILE, git_hash): break # pragma: no cover release, patch_level = self.GetRelease(git_hash, branch) releases.append(release) # Follow branches only until their creation point. # TODO(machenbach): This omits patches if the version file wasn't # manipulated correctly. Find a better way to detect the point where # the parent of the branch head leads to the trunk branch. if branch != self.vc.CandidateBranch() and patch_level == "0": break # Allow Ctrl-C interrupt. except (KeyboardInterrupt, SystemExit): # pragma: no cover pass # Clean up checked-out version file. self.GitCheckoutFileSafe(VERSION_FILE, "HEAD") return releases def RunStep(self): self.GitCreateBranch(self._config["BRANCHNAME"]) branches = self.vc.GetBranches() releases = [] if self._options.branch == 'recent': # Get only recent development on trunk, beta and stable. if self._options.max_releases == 0: # pragma: no cover self._options.max_releases = 10 beta, stable = SortBranches(branches)[0:2] releases += self.GetReleasesFromBranch(stable) releases += self.GetReleasesFromBranch(beta) releases += self.GetReleasesFromBranch(self.vc.CandidateBranch()) releases += self.GetReleasesFromBranch(self.vc.MasterBranch()) elif self._options.branch == 'all': # pragma: no cover # Retrieve the full release history. for branch in branches: releases += self.GetReleasesFromBranch(branch) releases += self.GetReleasesFromBranch(self.vc.CandidateBranch()) releases += self.GetReleasesFromBranch(self.vc.MasterBranch()) else: # pragma: no cover # Retrieve history for a specified branch. assert self._options.branch in (branches + [self.vc.CandidateBranch(), self.vc.MasterBranch()]) releases += self.GetReleasesFromBranch(self._options.branch) self["releases"] = sorted(releases, key=lambda r: SortingKey(r["version"]), reverse=True) class SwitchChromium(Step): MESSAGE = "Switch to Chromium checkout." def RunStep(self): cwd = self._options.chromium # Check for a clean workdir. if not self.GitIsWorkdirClean(cwd=cwd): # pragma: no cover self.Die("Workspace is not clean. Please commit or undo your changes.") # Assert that the DEPS file is there. if not os.path.exists(os.path.join(cwd, "DEPS")): # pragma: no cover self.Die("DEPS file not present.") class UpdateChromiumCheckout(Step): MESSAGE = "Update the checkout and create a new branch." def RunStep(self): cwd = self._options.chromium self.GitCheckout("master", cwd=cwd) self.GitPull(cwd=cwd) self.GitCreateBranch(self.Config("BRANCHNAME"), cwd=cwd) def ConvertToCommitNumber(step, revision): # Simple check for git hashes. if revision.isdigit() and len(revision) < 8: return revision return step.GitConvertToSVNRevision( revision, cwd=os.path.join(step._options.chromium, "v8")) class RetrieveChromiumV8Releases(Step): MESSAGE = "Retrieve V8 releases from Chromium DEPS." def RunStep(self): cwd = self._options.chromium releases = filter( lambda r: r["branch"] in [self.vc.CandidateBranch(), self.vc.MasterBranch()], self["releases"]) if not releases: # pragma: no cover print "No releases detected. Skipping chromium history." return True # Update v8 checkout in chromium. self.GitFetchOrigin(cwd=os.path.join(cwd, "v8")) oldest_v8_rev = int(releases[-1]["revision"]) cr_releases = [] try: for git_hash in self.GitLog( format="%H", grep="V8", cwd=cwd).splitlines(): if "DEPS" not in self.GitChangedFiles(git_hash, cwd=cwd): continue if not self.GitCheckoutFileSafe("DEPS", git_hash, cwd=cwd): break # pragma: no cover deps = FileToText(os.path.join(cwd, "DEPS")) match = DEPS_RE.search(deps) if match: cr_rev = self.GetCommitPositionNumber(git_hash, cwd=cwd) if cr_rev: v8_rev = ConvertToCommitNumber(self, match.group(1)) cr_releases.append([cr_rev, v8_rev]) # Stop after reaching beyond the last v8 revision we want to update. # We need a small buffer for possible revert/reland frenzies. # TODO(machenbach): Subtraction is not git friendly. if int(v8_rev) < oldest_v8_rev - 100: break # pragma: no cover # Allow Ctrl-C interrupt. except (KeyboardInterrupt, SystemExit): # pragma: no cover pass # Clean up. self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd) # Add the chromium ranges to the v8 trunk and bleeding_edge releases. all_ranges = BuildRevisionRanges(cr_releases) releases_dict = dict((r["revision"], r) for r in releases) for revision, ranges in all_ranges.iteritems(): releases_dict.get(revision, {})["chromium_revision"] = ranges # TODO(machenbach): Unify common code with method above. class RietrieveChromiumBranches(Step): MESSAGE = "Retrieve Chromium branch information." def RunStep(self): cwd = self._options.chromium trunk_releases = filter(lambda r: r["branch"] == self.vc.CandidateBranch(), self["releases"]) if not trunk_releases: # pragma: no cover print "No trunk releases detected. Skipping chromium history." return True oldest_v8_rev = int(trunk_releases[-1]["revision"]) # Filter out irrelevant branches. branches = filter(lambda r: re.match(r"branch-heads/\d+", r), self.GitRemotes(cwd=cwd)) # Transform into pure branch numbers. branches = map(lambda r: int(re.match(r"branch-heads/(\d+)", r).group(1)), branches) branches = sorted(branches, reverse=True) cr_branches = [] try: for branch in branches: if not self.GitCheckoutFileSafe("DEPS", "branch-heads/%d" % branch, cwd=cwd): break # pragma: no cover deps = FileToText(os.path.join(cwd, "DEPS")) match = DEPS_RE.search(deps) if match: v8_rev = ConvertToCommitNumber(self, match.group(1)) cr_branches.append([str(branch), v8_rev]) # Stop after reaching beyond the last v8 revision we want to update. # We need a small buffer for possible revert/reland frenzies. # TODO(machenbach): Subtraction is not git friendly. if int(v8_rev) < oldest_v8_rev - 100: break # pragma: no cover # Allow Ctrl-C interrupt. except (KeyboardInterrupt, SystemExit): # pragma: no cover pass # Clean up. self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd) # Add the chromium branches to the v8 trunk releases. all_ranges = BuildRevisionRanges(cr_branches) trunk_dict = dict((r["revision"], r) for r in trunk_releases) for revision, ranges in all_ranges.iteritems(): trunk_dict.get(revision, {})["chromium_branch"] = ranges class CleanUp(Step): MESSAGE = "Clean up." def RunStep(self): self.GitCheckout("master", cwd=self._options.chromium) self.GitDeleteBranch(self.Config("BRANCHNAME"), cwd=self._options.chromium) self.CommonCleanup() class WriteOutput(Step): MESSAGE = "Print output." def Run(self): if self._options.csv: with open(self._options.csv, "w") as f: writer = csv.DictWriter(f, ["version", "branch", "revision", "chromium_revision", "patches_merged"], restval="", extrasaction="ignore") for release in self["releases"]: writer.writerow(release) if self._options.json: with open(self._options.json, "w") as f: f.write(json.dumps(self["releases"])) if not self._options.csv and not self._options.json: print self["releases"] # pragma: no cover class Releases(ScriptsBase): def _PrepareOptions(self, parser): parser.add_argument("-b", "--branch", default="recent", help=("The branch to analyze. If 'all' is specified, " "analyze all branches. If 'recent' (default) " "is specified, track beta, stable and trunk.")) parser.add_argument("-c", "--chromium", help=("The path to your Chromium src/ " "directory to automate the V8 roll.")) parser.add_argument("--csv", help="Path to a CSV file for export.") parser.add_argument("-m", "--max-releases", type=int, default=0, help="The maximum number of releases to track.") parser.add_argument("--json", help="Path to a JSON file for export.") def _ProcessOptions(self, options): # pragma: no cover return True def _Config(self): return { "BRANCHNAME": "retrieve-v8-releases", "PERSISTFILE_BASENAME": "/tmp/v8-releases-tempfile", } def _Steps(self): return [ Preparation, RetrieveV8Releases, SwitchChromium, UpdateChromiumCheckout, RetrieveChromiumV8Releases, RietrieveChromiumBranches, CleanUp, WriteOutput, ] if __name__ == "__main__": # pragma: no cover sys.exit(Releases().Run())
hellotomfan/v8-coroutine
deps/v8/tools/push-to-trunk/releases.py
Python
gpl-2.0
18,090
0.007573
#!/usr/bin/env python # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Michael A.G. Aivazis # California Institute of Technology # (C) 1998-2005 All Rights Reserved # # <LicenseText> # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # from pyre.components.Component import Component class Quadratic(Component): class Inventory(Component.Inventory): import pyre.inventory a = pyre.inventory.float("a", default=0.0) b = pyre.inventory.float("b", default=0.0) c = pyre.inventory.float("c", default=0.0) def initialize(self): self.a = self.inventory.a self.b = self.inventory.b self.c = self.inventory.c import tabulator._tabulator tabulator._tabulator.quadraticSet(self.a, self.b, self.c) return def __init__(self): Component.__init__(self, "quadratic", "functor") self.a = 0.0 self.b = 0.0 self.c = 0.0 import tabulator._tabulator self.handle = tabulator._tabulator.quadratic() return def _init(self): Component._init(self) self.initialize() return # version __id__ = "$Id: Quadratic.py,v 1.1.1.1 2005/03/17 20:03:02 aivazis Exp $" # End of file
bmi-forum/bmi-pyre
pythia-0.8/examples/tabulator/tabulator/Quadratic.py
Python
gpl-2.0
1,399
0.005004
# -*- coding: utf-8 -*- ''' Module for Sending Messages via XMPP (a.k.a. Jabber) .. versionadded:: 2014.1.0 :depends: - sleekxmpp python module :configuration: This module can be used by either passing a jid and password directly to send_message, or by specifying the name of a configuration profile in the minion config, minion pillar, or master config. For example: .. code-block:: yaml my-xmpp-login: xmpp.jid: myuser@jabber.example.org/resourcename xmpp.password: verybadpass The resourcename refers to the resource that is using this account. It is user-definable, and optional. The following configurations are both valid: .. code-block:: yaml my-xmpp-login: xmpp.jid: myuser@jabber.example.org/salt xmpp.password: verybadpass my-xmpp-login: xmpp.jid: myuser@jabber.example.org xmpp.password: verybadpass ''' # Import Python Libs from __future__ import absolute_import import logging HAS_LIBS = False try: from sleekxmpp import ClientXMPP as _ClientXMPP from sleekxmpp.exceptions import XMPPError HAS_LIBS = True except ImportError: class _ClientXMPP(object): ''' Fake class in order not to raise errors ''' log = logging.getLogger(__name__) __virtualname__ = 'xmpp' MUC_DEPRECATED = "Use of send mask waiters is deprecated." def __virtual__(): ''' Only load this module if sleekxmpp is installed on this minion. ''' if HAS_LIBS: return __virtualname__ return False class SleekXMPPMUC(logging.Filter): def filter(self, record): return not record.getMessage() == MUC_DEPRECATED class SendMsgBot(_ClientXMPP): def __init__(self, jid, password, recipient, msg): # pylint: disable=E1002 # PyLint wrongly reports an error when calling super, hence the above # disable call super(SendMsgBot, self).__init__(jid, password) self.recipients = [] if recipient is None else [recipient] self.rooms = [] self.msg = msg self.add_event_handler('session_start', self.start) @classmethod def create_multi(cls, jid, password, msg, recipients=None, rooms=None, nick="SaltStack Bot"): ''' Alternate constructor that accept multiple recipients and rooms ''' obj = SendMsgBot(jid, password, None, msg) obj.recipients = [] if recipients is None else recipients obj.rooms = [] if rooms is None else rooms obj.nick = nick return obj def start(self, event): self.send_presence() self.get_roster() for recipient in self.recipients: self.send_message(mto=recipient, mbody=self.msg, mtype='chat') for room in self.rooms: self.plugin['xep_0045'].joinMUC(room, self.nick, wait=True) self.send_message(mto=room, mbody=self.msg, mtype='groupchat') self.disconnect(wait=True) def send_msg(recipient, message, jid=None, password=None, profile=None): ''' Send a message to an XMPP recipient. Designed for use in states. CLI Examples:: xmpp.send_msg 'admins@xmpp.example.com' 'This is a salt module test' \ profile='my-xmpp-account' xmpp.send_msg 'admins@xmpp.example.com' 'This is a salt module test' \ jid='myuser@xmpp.example.com/salt' password='verybadpass' ''' if profile: creds = __salt__['config.option'](profile) jid = creds.get('xmpp.jid') password = creds.get('xmpp.password') xmpp = SendMsgBot(jid, password, recipient, message) xmpp.register_plugin('xep_0030') # Service Discovery xmpp.register_plugin('xep_0199') # XMPP Ping if xmpp.connect(): xmpp.process(block=True) return True return False def send_msg_multi(message, recipients=None, rooms=None, jid=None, password=None, nick="SaltStack Bot", profile=None): ''' Send a message to an XMPP recipient, support send message to multiple recipients or chat room. CLI Examples:: xmpp.send_msg recipients=['admins@xmpp.example.com'] \ rooms=['secret@conference.xmpp.example.com'] \ 'This is a salt module test' \ profile='my-xmpp-account' xmpp.send_msg recipients=['admins@xmpp.example.com'] \ rooms=['secret@conference.xmpp.example.com'] \ 'This is a salt module test' \ jid='myuser@xmpp.example.com/salt' password='verybadpass' ''' # Remove: [WARNING ] Use of send mask waiters is deprecated. for handler in logging.root.handlers: handler.addFilter(SleekXMPPMUC()) if profile: creds = __salt__['config.option'](profile) jid = creds.get('xmpp.jid') password = creds.get('xmpp.password') xmpp = SendMsgBot.create_multi( jid, password, message, recipients=recipients, rooms=rooms) if rooms: xmpp.register_plugin('xep_0045') # MUC plugin if xmpp.connect(): try: xmpp.process(block=True) return True except XMPPError as err: log.error("Could not send message, error: %s", err) else: log.error("Could not connect to XMPP server") return False
rtx3/saltstack-deyunio
srv/salt/modules/xmpp.py
Python
apache-2.0
5,646
0
# RUN: python %s | llvm-mc -filetype=obj -triple i686-pc-win32 - | llvm-readobj -h | FileCheck %s from __future__ import print_function # This test checks that the COFF object emitter can produce objects with # more than 65279 sections. # While we only generate 65277 sections, an implicit .text, .data and .bss will # also be emitted. This brings the total to 65280. num_sections = 65277 # CHECK: ImageFileHeader { # CHECK-NEXT: Machine: IMAGE_FILE_MACHINE_I386 # CHECK-NEXT: SectionCount: 65280 # CHECK-NEXT: TimeDateStamp: {{[0-9]+}} # CHECK-NEXT: PointerToSymbolTable: 0x{{[0-9A-F]+}} # CHECK-NEXT: SymbolCount: 195837 # CHECK-NEXT: OptionalHeaderSize: 0 # CHECK-NEXT: Characteristics [ (0x0) # CHECK-NEXT: ] # CHECK-NEXT: } for i in range(0, num_sections): print(""" .section .bss,"bw",discard,_b%d .globl _b%d # @b%d _b%d: .byte 0 # 0x0 """ % (i, i, i, i))
endlessm/chromium-browser
third_party/llvm/llvm/test/MC/COFF/bigobj.py
Python
bsd-3-clause
935
0.004278
import os # toolchains options ARCH = 'arm' CPU = 'cortex-m3' CROSS_TOOL = 'gcc' if os.getenv('RTT_CC'): CROSS_TOOL = os.getenv('RTT_CC') if CROSS_TOOL == 'gcc': PLATFORM = 'gcc' EXEC_PATH = 'C:\Program Files (x86)\CodeSourcery\Sourcery G++ Lite\bin' #EXEC_PATH = 'C:\Program Files (x86)\yagarto\bin' elif CROSS_TOOL == 'keil': print '================ERROR============================' print 'Not support keil yet!' print '=================================================' exit(0) elif CROSS_TOOL == 'iar': print '================ERROR============================' print 'Not support iar yet!' print '=================================================' exit(0) if os.getenv('RTT_EXEC_PATH'): EXEC_PATH = os.getenv('RTT_EXEC_PATH') BUILD = 'debug' # EFM32_BOARD = 'EFM32_G8XX_STK' # EFM32_BOARD = 'EFM32_GXXX_DK' EFM32_BOARD = 'EFM32GG_DK3750' if EFM32_BOARD == 'EFM32_G8XX_STK': EFM32_FAMILY = 'Gecko' EFM32_TYPE = 'EFM32G890F128' EFM32_LCD = 'none' elif EFM32_BOARD == 'EFM32_GXXX_DK': EFM32_FAMILY = 'Gecko' EFM32_TYPE = 'EFM32G290F128' EFM32_LCD = 'none' elif EFM32_BOARD == 'EFM32GG_DK3750': EFM32_FAMILY = 'Giant Gecko' EFM32_TYPE = 'EFM32GG990F1024' # EFM32_LCD = 'LCD_MAPPED' EFM32_LCD = 'LCD_DIRECT' if PLATFORM == 'gcc': # toolchains PREFIX = 'arm-none-eabi-' CC = PREFIX + 'gcc' AS = PREFIX + 'gcc' AR = PREFIX + 'ar' LINK = PREFIX + 'gcc' TARGET_EXT = 'axf' SIZE = PREFIX + 'size' OBJDUMP = PREFIX + 'objdump' OBJCPY = PREFIX + 'objcopy' DEVICE = ' -mcpu=cortex-m3 -mthumb -ffunction-sections -fdata-sections' CFLAGS = DEVICE AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp' LFLAGS = DEVICE + ' -Wl,--gc-sections,-Map=rtthread-efm32.map,-cref,-u,__cs3_reset -T' if EFM32_BOARD == 'EFM32_G8XX_STK' or EFM32_BOARD == 'EFM32_GXXX_DK': LFLAGS += ' efm32g_rom.ld' elif EFM32_BOARD == 'EFM32GG_DK3750': LFLAGS += ' efm32gg_rom.ld' CPATH = '' LPATH = '' if BUILD == 'debug': CFLAGS += ' -O0 -gdwarf-2' AFLAGS += ' -gdwarf-2' else: CFLAGS += ' -O2' POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
poranmeloge/test-github
stm32_rtt_wifi/bsp/efm32/rtconfig.py
Python
gpl-2.0
2,289
0.009611
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ By default, this module uses the pre-built Ember model from https://pubdata.endgame.com/ember/ember_dataset.tar.bz2. Documentation about training a new model can be found on the Ember GitHub page (https://github.com/endgameinc/ember). After training a new model, place the resulting txt file in `multiscanner/etc` and update `config.ini` with the new filename. """ from __future__ import division, absolute_import, with_statement, print_function, unicode_literals import os from pathlib import Path from multiscanner import CONFIG __authors__ = "Patrick Copeland" __license__ = "MPL 2.0" TYPE = "MachineLearning" NAME = "EndgameEmber" REQUIRES = ['libmagic'] DEFAULTCONF = { 'ENABLED': False, 'path-to-model': os.path.join(os.path.split(CONFIG)[0], 'etc', 'ember', 'ember_model_2017.txt'), } LGBM_MODEL = None try: import ember has_ember = True except ImportError as e: print("ember module not installed...") has_ember = False try: import lightgbm as lgb except ImportError as e: print("lightgbm module needed for ember. Not installed...") has_ember = False def check(conf=DEFAULTCONF): if not conf['ENABLED']: return False if not has_ember: return False if not Path(conf['path-to-model']).is_file(): print("'{}' does not exist. Check config.ini for model location.".format(conf['path-to-model'])) return False try: global LGBM_MODEL LGBM_MODEL = lgb.Booster(model_file=conf['path-to-model']) except lgb.LightGBMError as e: print("Unable to load model, {}. ({})".format(conf['path-to-model'], e)) return False return True def scan(filelist, conf=DEFAULTCONF): results = [] for fname in filelist: # Ensure libmagic returns results if REQUIRES[0] is not None: # only run the analytic if it is an Office document file_type = _get_libmagicresults(REQUIRES[0][0], fname) if file_type.startswith('PE32'): with open(fname, 'rb') as fh: ember_result = ember.predict_sample(LGBM_MODEL, fh.read()) results.append( (fname, {'Prediction': ember_result}) ) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE return (results, metadata) def _get_libmagicresults(results, fname): libmagicdict = dict(results) return libmagicdict.get(fname)
mitre/multiscanner
multiscanner/modules/MachineLearning/EndgameEmber.py
Python
mpl-2.0
2,656
0.001506