text
stringlengths
6
947k
repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013, Big Switch Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack_dashboard import api from openstack_dashboard.test import helpers as test from neutronclient.v2_0 import client neutronclient = client.Client class LbaasApiTests(test.APITestCase): @test.create_stubs({neutronclient: ('create_vip',)}) def test_vip_create(self): vip1 = self.api_vips.first() form_data = {'address': vip1['address'], 'name': vip1['name'], 'description': vip1['description'], 'subnet_id': vip1['subnet_id'], 'protocol_port': vip1['protocol_port'], 'protocol': vip1['protocol'], 'pool_id': vip1['pool_id'], 'session_persistence': vip1['session_persistence'], 'connection_limit': vip1['connection_limit'], 'admin_state_up': vip1['admin_state_up'] } vip = {'vip': self.api_vips.first()} neutronclient.create_vip({'vip': form_data}).AndReturn(vip) self.mox.ReplayAll() ret_val = api.lbaas.vip_create(self.request, **form_data) self.assertIsInstance(ret_val, api.lbaas.Vip) @test.create_stubs({neutronclient: ('create_vip',)}) def test_vip_create_skip_address_if_empty(self): vip1 = self.api_vips.first() vipform_data = {'name': vip1['name'], 'description': vip1['description'], 'subnet_id': vip1['subnet_id'], 'protocol_port': vip1['protocol_port'], 'protocol': vip1['protocol'], 'pool_id': vip1['pool_id'], 'session_persistence': vip1['session_persistence'], 'connection_limit': vip1['connection_limit'], 'admin_state_up': vip1['admin_state_up'] } neutronclient.create_vip({'vip': vipform_data}).AndReturn(vipform_data) self.mox.ReplayAll() form_data = dict(vipform_data) form_data['address'] = "" ret_val = api.lbaas.vip_create(self.request, **form_data) self.assertIsInstance(ret_val, api.lbaas.Vip) @test.create_stubs({neutronclient: ('list_vips',)}) def test_vip_list(self): vips = {'vips': [{'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'address': '10.0.0.100', 'name': 'vip1name', 'description': 'vip1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol_port': '80', 'protocol': 'HTTP', 'pool_id': '8913dde8-4915-4b90-8d3e-b95eeedb0d49', 'connection_limit': '10', 'admin_state_up': True }, ]} neutronclient.list_vips().AndReturn(vips) self.mox.ReplayAll() ret_val = api.lbaas.vip_list(self.request) for v in ret_val: self.assertIsInstance(v, api.lbaas.Vip) self.assertTrue(v.id) @test.create_stubs({neutronclient: ('show_vip', 'show_pool'), api.neutron: ('subnet_get', 'port_get')}) def test_vip_get(self): vip = self.api_vips.first() neutronclient.show_vip(vip['id']).AndReturn({'vip': vip}) api.neutron.subnet_get(self.request, vip['subnet_id'] ).AndReturn(self.subnets.first()) api.neutron.port_get(self.request, vip['port_id'] ).AndReturn(self.ports.first()) neutronclient.show_pool(vip['pool_id'] ).AndReturn({'pool': self.api_pools.first()}) self.mox.ReplayAll() ret_val = api.lbaas.vip_get(self.request, vip['id']) self.assertIsInstance(ret_val, api.lbaas.Vip) self.assertIsInstance(ret_val.subnet, api.neutron.Subnet) self.assertEqual(vip['subnet_id'], ret_val.subnet.id) self.assertIsInstance(ret_val.port, api.neutron.Port) self.assertEqual(vip['port_id'], ret_val.port.id) self.assertIsInstance(ret_val.pool, api.lbaas.Pool) self.assertEqual(self.api_pools.first()['id'], ret_val.pool.id) @test.create_stubs({neutronclient: ('update_vip',)}) def test_vip_update(self): form_data = {'address': '10.0.0.100', 'name': 'vip1name', 'description': 'vip1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol_port': '80', 'protocol': 'HTTP', 'pool_id': '8913dde8-4915-4b90-8d3e-b95eeedb0d49', 'connection_limit': '10', 'admin_state_up': True } vip = {'vip': {'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'address': '10.0.0.100', 'name': 'vip1name', 'description': 'vip1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol_port': '80', 'protocol': 'HTTP', 'pool_id': '8913dde8-4915-4b90-8d3e-b95eeedb0d49', 'connection_limit': '10', 'admin_state_up': True }} neutronclient.update_vip(vip['vip']['id'], form_data).AndReturn(vip) self.mox.ReplayAll() ret_val = api.lbaas.vip_update(self.request, vip['vip']['id'], **form_data) self.assertIsInstance(ret_val, api.lbaas.Vip) @test.create_stubs({neutronclient: ('create_pool',)}) def test_pool_create(self): form_data = {'name': 'pool1name', 'description': 'pool1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol': 'HTTP', 'lb_method': 'ROUND_ROBIN', 'admin_state_up': True, 'provider': 'dummy' } pool = {'pool': {'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'name': 'pool1name', 'description': 'pool1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol': 'HTTP', 'lb_method': 'ROUND_ROBIN', 'admin_state_up': True, 'provider': 'dummy' }} neutronclient.create_pool({'pool': form_data}).AndReturn(pool) self.mox.ReplayAll() ret_val = api.lbaas.pool_create(self.request, **form_data) self.assertIsInstance(ret_val, api.lbaas.Pool) @test.create_stubs({neutronclient: ('list_pools', 'list_vips'), api.neutron: ('subnet_list',)}) def test_pool_list(self): pools = {'pools': self.api_pools.list()} subnets = self.subnets.list() vips = {'vips': self.api_vips.list()} neutronclient.list_pools().AndReturn(pools) api.neutron.subnet_list(self.request).AndReturn(subnets) neutronclient.list_vips().AndReturn(vips) self.mox.ReplayAll() ret_val = api.lbaas.pool_list(self.request) for v in ret_val: self.assertIsInstance(v, api.lbaas.Pool) self.assertTrue(v.id) @test.create_stubs({neutronclient: ('show_pool', 'show_vip', 'list_members', 'list_health_monitors',), api.neutron: ('subnet_get',)}) def test_pool_get(self): pool = self.pools.first() subnet = self.subnets.first() pool_dict = {'pool': self.api_pools.first()} vip_dict = {'vip': self.api_vips.first()} neutronclient.show_pool(pool.id).AndReturn(pool_dict) api.neutron.subnet_get(self.request, subnet.id).AndReturn(subnet) neutronclient.show_vip(pool.vip_id).AndReturn(vip_dict) neutronclient.list_members(pool_id=pool.id).AndReturn( {'members': self.members.list()}) neutronclient.list_health_monitors(id=pool.health_monitors).AndReturn( {'health_monitors': [self.monitors.first()]}) self.mox.ReplayAll() ret_val = api.lbaas.pool_get(self.request, pool.id) self.assertIsInstance(ret_val, api.lbaas.Pool) self.assertIsInstance(ret_val.vip, api.lbaas.Vip) self.assertEqual(ret_val.vip.id, vip_dict['vip']['id']) self.assertIsInstance(ret_val.subnet, api.neutron.Subnet) self.assertEqual(ret_val.subnet.id, subnet.id) self.assertEqual(2, len(ret_val.members)) self.assertIsInstance(ret_val.members[0], api.lbaas.Member) self.assertEqual(1, len(ret_val.health_monitors)) self.assertIsInstance(ret_val.health_monitors[0], api.lbaas.PoolMonitor) @test.create_stubs({neutronclient: ('update_pool',)}) def test_pool_update(self): form_data = {'name': 'pool1name', 'description': 'pool1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol': 'HTTPS', 'lb_method': 'LEAST_CONNECTION', 'admin_state_up': True } pool = {'pool': {'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'name': 'pool1name', 'description': 'pool1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol': 'HTTPS', 'lb_method': 'LEAST_CONNECTION', 'admin_state_up': True }} neutronclient.update_pool(pool['pool']['id'], form_data).AndReturn(pool) self.mox.ReplayAll() ret_val = api.lbaas.pool_update(self.request, pool['pool']['id'], **form_data) self.assertIsInstance(ret_val, api.lbaas.Pool) @test.create_stubs({neutronclient: ('create_health_monitor',)}) def test_pool_health_monitor_create(self): form_data = {'type': 'PING', 'delay': '10', 'timeout': '10', 'max_retries': '10', 'admin_state_up': True } monitor = {'health_monitor': { 'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'type': 'PING', 'delay': '10', 'timeout': '10', 'max_retries': '10', 'admin_state_up': True}} neutronclient.create_health_monitor({ 'health_monitor': form_data}).AndReturn(monitor) self.mox.ReplayAll() ret_val = api.lbaas.pool_health_monitor_create( self.request, **form_data) self.assertIsInstance(ret_val, api.lbaas.PoolMonitor) @test.create_stubs({neutronclient: ('list_health_monitors',)}) def test_pool_health_monitor_list(self): monitors = {'health_monitors': [ {'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'type': 'PING', 'delay': '10', 'timeout': '10', 'max_retries': '10', 'http_method': 'GET', 'url_path': '/monitor', 'expected_codes': '200', 'admin_state_up': True}, ]} neutronclient.list_health_monitors().AndReturn(monitors) self.mox.ReplayAll() ret_val = api.lbaas.pool_health_monitor_list(self.request) for v in ret_val: self.assertIsInstance(v, api.lbaas.PoolMonitor) self.assertTrue(v.id) @test.create_stubs({neutronclient: ('show_health_monitor', 'list_pools')}) def test_pool_health_monitor_get(self): monitor = self.api_monitors.first() neutronclient.show_health_monitor( monitor['id']).AndReturn({'health_monitor': monitor}) neutronclient.list_pools(id=[p['pool_id'] for p in monitor['pools']] ).AndReturn({'pools': self.api_pools.list()}) self.mox.ReplayAll() ret_val = api.lbaas.pool_health_monitor_get( self.request, monitor['id']) self.assertIsInstance(ret_val, api.lbaas.PoolMonitor) self.assertEqual(2, len(ret_val.pools)) self.assertIsInstance(ret_val.pools[0], api.lbaas.Pool) @test.create_stubs({neutronclient: ('create_member', )}) def test_member_create(self): form_data = {'pool_id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'address': '10.0.1.2', 'protocol_port': '80', 'weight': '10', 'admin_state_up': True } member = {'member': {'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'pool_id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'address': '10.0.1.2', 'protocol_port': '80', 'weight': '10', 'admin_state_up': True}} neutronclient.create_member({'member': form_data}).AndReturn(member) self.mox.ReplayAll() ret_val = api.lbaas.member_create(self.request, **form_data) self.assertIsInstance(ret_val, api.lbaas.Member) @test.create_stubs({neutronclient: ('list_members', 'list_pools')}) def test_member_list(self): members = {'members': self.api_members.list()} pools = {'pools': self.api_pools.list()} neutronclient.list_members().AndReturn(members) neutronclient.list_pools().AndReturn(pools) self.mox.ReplayAll() ret_val = api.lbaas.member_list(self.request) for v in ret_val: self.assertIsInstance(v, api.lbaas.Member) self.assertTrue(v.id) @test.create_stubs({neutronclient: ('show_member', 'show_pool')}) def test_member_get(self): member = self.members.first() member_dict = {'member': self.api_members.first()} pool_dict = {'pool': self.api_pools.first()} neutronclient.show_member(member.id).AndReturn(member_dict) neutronclient.show_pool(member.pool_id).AndReturn(pool_dict) self.mox.ReplayAll() ret_val = api.lbaas.member_get(self.request, member.id) self.assertIsInstance(ret_val, api.lbaas.Member) @test.create_stubs({neutronclient: ('update_member',)}) def test_member_update(self): form_data = {'pool_id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'address': '10.0.1.4', 'protocol_port': '80', 'weight': '10', 'admin_state_up': True } member = {'member': {'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'pool_id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'address': '10.0.1.2', 'protocol_port': '80', 'weight': '10', 'admin_state_up': True }} neutronclient.update_member(member['member']['id'], form_data).AndReturn(member) self.mox.ReplayAll() ret_val = api.lbaas.member_update(self.request, member['member']['id'], **form_data) self.assertIsInstance(ret_val, api.lbaas.Member)
tanglei528/horizon
openstack_dashboard/test/api_tests/lbaas_tests.py
Python
apache-2.0
16,445
0
# test-icmp6.py: count types of icmp6 packets # Copyright (C) 2016, Nevil Brownlee, U Auckland | CAIDA | Wand from plt_testing import * from array import * icmp_info = {} # Empty dictionary t = get_example_trace('icmp6-sample.pcap') out_uri = 'pcapfile:icmp6-out.pcap' of = plt.output_trace(out_uri) of.start_output() n = 0 # 1: 28535 2: 20 3: 89094 4: 4413 128: 46447 129: 36085 134: 1382 135: 148720 136: 16188 #t = get_rlt_example_file('icmp6.pcap') # 1: 371 3: 32 4: 12 128: 37 129: 36 134: 5 135: 468 136: 39 for pkt in t: n += 1 icmp6 = pkt.icmp6 if not icmp6: continue it = icmp6.type if it in icmp_info: icmp_info[it] += 1 else: icmp_info[it] = 1 if icmp_info[it] <= 4: of.write_packet(pkt) t.close() of.close_output() print "%d packets examined\n" % (n) print "icmp6 types = ", for type in sorted(icmp_info): print "%d: %d " % (type, icmp_info[type]), print
nevil-brownlee/pypy-libtrace
doc/examples/copy-icmp6.py
Python
gpl-3.0
961
0.004162
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-12-28 02:21 from __future__ import unicode_literals import datetime from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app', '0006_auto_20161228_1016'), ] operations = [ migrations.AlterField( model_name='hostrequest', name='lease_time', field=models.IntegerField(default=30), ), migrations.AlterField( model_name='idc', name='create_time', field=models.DateField(default=datetime.datetime(2016, 12, 28, 10, 21, 52, 541000), verbose_name='\u521b\u5efa\u65f6\u95f4'), ), migrations.AlterField( model_name='lease', name='lease_time', field=models.IntegerField(default=30), ), ]
SnowRomance/CMDB
app/migrations/0007_auto_20161228_1021.py
Python
gpl-3.0
869
0.001151
# ################################################################################ # ## # ## https://github.com/NetASM/NetASM-python # ## # ## File: # ## optimize.py # ## # ## Project: # ## NetASM: A Network Assembly Language for Programmable Dataplanes # ## # ## Author: # ## Muhammad Shahbaz # ## # ## Copyright notice: # ## Copyright (C) 2014 Princeton University # ## Network Operations and Internet Security Lab # ## # ## Licence: # ## This file is a part of the NetASM development base package. # ## # ## This file is free code: you can redistribute it and/or modify it under # ## the terms of the GNU Lesser General Public License version 2.1 as # ## published by the Free Software Foundation. # ## # ## This package is distributed in the hope that it will be useful, but # ## WITHOUT ANY WARRANTY; without even the implied warranty of # ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # ## Lesser General Public License for more details. # ## # ## You should have received a copy of the GNU Lesser General Public # ## License along with the NetASM source package. If not, see # ## http://www.gnu.org/licenses/. __author__ = 'shahbaz' from netasm.netasm.core.utilities.profile import time_usage from netasm.netasm.core.syntax import InstructionCollection as I from netasm.netasm.core.transformations import dead_code_elimination as dce from netasm.netasm.core.transformations import redundant_code_elimination as rce from netasm.netasm.core.transformations import add_code_motion as acm from netasm.netasm.core.transformations import rmv_code_motion as rcm # from netasm.netasm.core.transformations import rmv_code_insertion as rci def _optimize_Code(code): code = acm.transform(code) code = rcm.transform(code) code = dce.transform(code) code = rce.transform(code) # code = rci.transform(code) return code def optimize_Code(code): for instruction in code.instructions: if isinstance(instruction, I.CNC): codes = I.Codes() for _code in instruction.codes: codes.append(optimize_Code(_code)) instruction.codes = codes elif isinstance(instruction, I.ATM): instruction.code = optimize_Code(instruction.code) elif isinstance(instruction, I.SEQ): instruction.code = optimize_Code(instruction.code) return _optimize_Code(code) def optimize_Policy(policy): policy.code = optimize_Code(policy.code) return policy @time_usage def optimize_Policy__time_usage(policy): return optimize_Policy(policy)
NetASM/NetASM-python
netasm/netasm/core/optimize.py
Python
gpl-2.0
2,688
0.001488
from setuptools import setup setup(name='KMOL 2016 project', version='1.0', description='OpenShift App', author='KMOL', author_email='course@mde.tw', url='https://www.python.org/community/sigs/current/distutils-sig', install_requires=['Flask>=0.10.1'], )
smpss91341/2016springcd_aG8
users/a/g8/setup.py
Python
agpl-3.0
297
0.003367
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Getting Things GNOME! - a personal organizer for the GNOME desktop # Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau # # This program is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see <http://www.gnu.org/licenses/>. # ----------------------------------------------------------------------------- """ Tests for interrupting cooperative threads """ from threading import Thread, Event import time import unittest from GTG.tools.interruptible import interruptible, _cancellation_point class TestInterruptible(unittest.TestCase): """ Tests for interrupting cooperative threads """ def test_interruptible_decorator(self): """ Tests for the @interruptible decorator. """ self.quit_condition = False cancellation_point = lambda: _cancellation_point( lambda: self.quit_condition) self.thread_started = Event() @interruptible def never_ending(cancellation_point): self.thread_started.set() while True: time.sleep(0.1) cancellation_point() thread = Thread(target=never_ending, args=(cancellation_point, )) thread.start() self.thread_started.wait() self.quit_condition = True countdown = 10 while thread.is_alive() and countdown > 0: time.sleep(0.1) countdown -= 1 self.assertFalse(thread.is_alive()) def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(TestInterruptible)
elianerpereira/gtg
GTG/tests/test_interruptible.py
Python
gpl-3.0
2,149
0.000465
# -*- coding: utf-8 -*- from __future__ import unicode_literals import django.utils.timezone import model_utils.fields from django.conf import settings from django.db import migrations, models import waldur_core.core.fields import waldur_core.structure.models class Migration(migrations.Migration): replaces = [('users', '0001_initial'), ('users', '0002_invitation_error_message'), ('users', '0003_invitation_civil_number'), ('users', '0004_migrate_to_new_permissions_model')] initial = True dependencies = [ ('structure', '0001_squashed_0054'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Invitation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')), ('uuid', waldur_core.core.fields.UUIDField()), ('error_message', models.TextField(blank=True)), ('customer_role', waldur_core.structure.models.CustomerRole(blank=True, choices=[('owner', 'Owner'), ( 'support', 'Support')], max_length=30, null=True, verbose_name='organization role')), ('project_role', waldur_core.structure.models.ProjectRole(blank=True, choices=[('admin', 'Administrator'), ('manager', 'Manager'), ('support', 'Support')], max_length=30, null=True)), ('state', models.CharField( choices=[('accepted', 'Accepted'), ('canceled', 'Canceled'), ('pending', 'Pending'), ('expired', 'Expired')], default='pending', max_length=8)), ('link_template', models.CharField( help_text='The template must include {uuid} parameter e.g. http://example.com/invitation/{uuid}', max_length=255)), ('email', models.EmailField( help_text='Invitation link will be sent to this email. Note that user can accept invitation with different email.', max_length=254)), ('civil_number', models.CharField(blank=True, help_text='Civil number of invited user. If civil number is not defined any user can accept invitation.', max_length=50)), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invitations', to='structure.Customer', verbose_name='organization')), ('project', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='invitations', to='structure.Project')), ], options={ 'abstract': False, }, ), ]
opennode/nodeconductor
waldur_core/users/migrations/0001_squashed_0004.py
Python
mit
3,908
0.005885
from rest_framework import generics from .models import Proj from .serializers import ProjSerializer # Create your views here. class ProjList(generics.ListCreateAPIView): """be report project list""" queryset = Proj.objects.all() serializer_class = ProjSerializer class ProjDetail(generics.RetrieveUpdateDestroyAPIView): """be report project detail""" queryset = Proj.objects.all() serializer_class = ProjSerializer
cmos3511/cmos_linux
python/op/op_site/proj_checker/views.py
Python
gpl-3.0
442
0.004525
# vim:ts=4:et # ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # <pep8 compliant> from struct import unpack import os.path from math import pi, sqrt import bpy from bpy_extras.object_utils import object_data_add from mathutils import Vector,Matrix,Quaternion from .mu import MuEnum, Mu, MuColliderMesh, MuColliderSphere, MuColliderCapsule from .mu import MuColliderBox, MuColliderWheel from .shader import make_shader from . import collider, properties def create_uvs(mu, uvs, mesh, name): uvlay = mesh.uv_textures.new(name) uvloop = mesh.uv_layers[name] for i, uvl in enumerate(uvloop.data): v = mesh.loops[i].vertex_index uvl.uv = uvs[v] def create_mesh(mu, mumesh, name): mesh = bpy.data.meshes.new(name) faces = [] for sm in mumesh.submeshes: faces.extend(sm) mesh.from_pydata(mumesh.verts, [], faces) if mumesh.uvs: create_uvs(mu, mumesh.uvs, mesh, name + ".UV") if mumesh.uv2s: create_uvs(mu, mumesh.uv2s, mesh, name + ".UV2") return mesh def create_mesh_object(name, mesh, transform): obj = bpy.data.objects.new(name, mesh) obj.rotation_mode = 'QUATERNION' if transform: obj.location = Vector(transform.localPosition) obj.rotation_quaternion = Quaternion(transform.localRotation) obj.scale = Vector(transform.localScale) else: obj.location = Vector((0, 0, 0)) obj.rotation_quaternion = Quaternion((1,0,0,0)) obj.scale = Vector((1,1,1)) bpy.context.scene.objects.link(obj) return obj def copy_spring(dst, src): dst.spring = src.spring dst.damper = src.damper dst.targetPosition = src.targetPosition def copy_friction(dst, src): dst.extremumSlip = src.extremumSlip dst.extremumValue = src.extremumValue dst.asymptoteSlip = src.asymptoteSlip dst.extremumValue = src.extremumValue dst.stiffness = src.stiffness def create_light(mu, mulight, transform): ltype = ('SPOT', 'SUN', 'POINT', 'AREA')[mulight.type] light = bpy.data.lamps.new(transform.name, ltype) light.color = mulight.color[:3] light.distance = mulight.range light.energy = mulight.intensity if ltype == 'SPOT' and hasattr(mulight, "spotAngle"): light.spot_size = mulight.spotAngle * pi / 180 obj = bpy.data.objects.new(transform.name, light) obj.rotation_mode = 'QUATERNION' obj.location = Vector(transform.localPosition) # Blender points spotlights along local -Z, unity along local +Z # which is Blender's +Y, so rotate 90 degrees around local X to # go from Unity to Blender rot = Quaternion((0.5**0.5,0.5**0.5,0,0)) obj.rotation_quaternion = rot * Quaternion(transform.localRotation) obj.scale = Vector(transform.localScale) properties.SetPropMask(obj.muproperties.cullingMask, mulight.cullingMask) bpy.context.scene.objects.link(obj) return obj property_map = { "m_LocalPosition.x": ("location", 0, 1), "m_LocalPosition.y": ("location", 2, 1), "m_LocalPosition.z": ("location", 1, 1), "m_LocalRotation.x": ("rotation_quaternion", 1, -1), "m_LocalRotation.y": ("rotation_quaternion", 3, -1), "m_LocalRotation.z": ("rotation_quaternion", 2, -1), "m_LocalRotation.w": ("rotation_quaternion", 0, 1), "m_LocalScale.x": ("scale", 0, 1), "m_LocalScale.y": ("scale", 2, 1), "m_LocalScale.z": ("scale", 1, 1), } def create_fcurve(action, curve): try: dp, ind, mult = property_map[curve.property] except KeyError: print("%s: Unknown property: %s" % (curve.path, curve.property)) return False fps = bpy.context.scene.render.fps fc = action.fcurves.new(data_path = dp, index = ind) fc.keyframe_points.add(len(curve.keys)) for i, key in enumerate(curve.keys): x,y = key.time * fps, key.value * mult fc.keyframe_points[i].co = x, y fc.keyframe_points[i].handle_left_type = 'FREE' fc.keyframe_points[i].handle_right_type = 'FREE' if i > 0: dist = (key.time - curve.keys[i - 1].time) / 3 dx, dy = dist * fps, key.tangent[0] * dist * mult else: dx, dy = 10, 0.0 fc.keyframe_points[i].handle_left = x - dx, y - dy if i < len(curve.keys) - 1: dist = (curve.keys[i + 1].time - key.time) / 3 dx, dy = dist * fps, key.tangent[1] * dist * mult else: dx, dy = 10, 0.0 fc.keyframe_points[i].handle_right = x + dx, y + dy return True def create_action(mu, path, clip): #print(clip.name) actions = {} for curve in clip.curves: if not curve.path: #FIXME need to look into this more as I'm not sure if the animation # is broken or if the property is somewhere weird continue name = ".".join([clip.name, curve.path]) if name not in actions: mu_path = "/".join([path, curve.path]) try: obj = mu.objects[mu_path] except KeyError: print("Unknown path: %s" % (mu_path)) continue actions[name] = bpy.data.actions.new(name), obj act, obj = actions[name] if not create_fcurve(act, curve): continue for name in actions: act, obj = actions[name] if not obj.animation_data: obj.animation_data_create() track = obj.animation_data.nla_tracks.new() track.name = clip.name track.strips.new(act.name, 1.0, act) def create_collider(mu, muobj): col = muobj.collider name = muobj.transform.name if type(col) == MuColliderMesh: name = name + ".collider" mesh = create_mesh(mu, col.mesh, name) elif type(col) == MuColliderSphere: mesh = collider.sphere(name, col.center, col.radius) elif type(col) == MuColliderCapsule: mesh = collider.capsule(name, col.center, col.radius, col.height, col.direction) elif type(col) == MuColliderBox: mesh = collider.box(name, col.center, col.size) elif type(col) == MuColliderWheel: mesh = collider.wheel(name, col.center, col.radius) obj = create_mesh_object(name, mesh, None) obj.muproperties.isTrigger = False if type(col) != MuColliderWheel: obj.muproperties.isTrigger = col.isTrigger if type(col) == MuColliderMesh: obj.muproperties.collider = 'MU_COL_MESH' elif type(col) == MuColliderSphere: obj.muproperties.collider = 'MU_COL_SPHERE' obj.muproperties.radius = col.radius obj.muproperties.center = col.center elif type(col) == MuColliderCapsule: obj.muproperties.collider = 'MU_COL_CAPSULE' obj.muproperties.radius = col.radius obj.muproperties.height = col.height obj.muproperties.direction = properties.dir_map[col.direction] obj.muproperties.center = col.center elif type(col) == MuColliderBox: obj.muproperties.collider = 'MU_COL_BOX' obj.muproperties.size = col.size obj.muproperties.center = col.center elif type(col) == MuColliderWheel: obj.muproperties.collider = 'MU_COL_WHEEL' obj.muproperties.radius = col.radius obj.muproperties.suspensionDistance = col.suspensionDistance obj.muproperties.center = col.center copy_spring(obj.muproperties.suspensionSpring, col.suspensionSpring) copy_friction(obj.muproperties.forwardFriction, col.forwardFriction) copy_friction(obj.muproperties.sideFriction, col.sidewaysFriction) return obj def create_object(mu, muobj, parent, create_colliders, parents): obj = None mesh = None if hasattr(muobj, "shared_mesh"): mesh = create_mesh(mu, muobj.shared_mesh, muobj.transform.name) for poly in mesh.polygons: poly.use_smooth = True obj = create_mesh_object(muobj.transform.name, mesh, muobj.transform) if hasattr(muobj, "renderer"): if mesh: mumat = mu.materials[muobj.renderer.materials[0]] mesh.materials.append(mumat.material) if not obj: if hasattr(muobj, "light"): obj = create_light(mu, muobj.light, muobj.transform) if not obj: obj = create_mesh_object(muobj.transform.name, None, muobj.transform) parents.append(muobj.transform.name) path = "/".join(parents) mu.objects[path] = obj if hasattr(muobj, "tag_and_layer"): obj.muproperties.tag = muobj.tag_and_layer.tag obj.muproperties.layer = muobj.tag_and_layer.layer if create_colliders and hasattr(muobj, "collider"): cobj = create_collider(mu, muobj) cobj.parent = obj obj.parent = parent for child in muobj.children: create_object(mu, child, obj, create_colliders, parents) if hasattr(muobj, "animation"): for clip in muobj.animation.clips: create_action(mu, path, clip) parents.remove(muobj.transform.name) return obj def convert_bump(pixels, width, height): outp = list(pixels) for y in range(1, height - 1): for x in range(1, width - 1): index = (y * width + x) * 4 p = pixels[index:index + 4] nx = (p[3]-128) / 127. nz = (p[2]-128) / 127. #n = [p[3],p[2],int(sqrt(1-nx**2-nz**2)*127 + 128),255] n = [p[3],p[2],255,255] outp[index:index + 4] = n return outp def load_mbm(mbmpath): mbmfile = open(mbmpath, "rb") header = mbmfile.read(20) magic, width, height, bump, bpp = unpack("<5i", header) if magic != 0x50534b03: # "\x03KSP" as little endian raise if bpp == 32: pixels = mbmfile.read(width * height * 4) elif bpp == 24: pixels = [0, 0, 0, 255] * width * height for i in range(width * height): p = mbmfile.read(3) l = i * 4 pixels[l:l+3] = list(p) else: raise if bump: pixels = convert_bump(pixels, width, height) return width, height, pixels def load_image(name, path): if name[-4:].lower() in [".dds", ".png", ".tga"]: bpy.data.images.load(os.path.join(path, name)) elif name[-4:].lower() == ".mbm": w,h, pixels = load_mbm(os.path.join(path, name)) img = bpy.data.images.new(name, w, h) img.pixels[:] = map(lambda x: x / 255.0, pixels) img.pack(True) def create_textures(mu, path): extensions = [".dds", ".mbm", ".tga", ".png"] #texture info is in the top level object for tex in mu.textures: base, ext = os.path.splitext(tex.name) ind = 0 if ext in extensions: ind = extensions.index(ext) lst = extensions[ind:] + extensions[:ind] for e in lst: try: name = base+e load_image(name, path) tx = bpy.data.textures.new(tex.name, 'IMAGE') tx.use_preview_alpha = True tx.image = bpy.data.images[name] break except FileNotFoundError: continue except RuntimeError: continue pass def add_texture(mu, mat, mattex): i, s, o = mattex.index, mattex.scale, mattex.offset mat.texture_slots.add() ts = mat.texture_slots[0] ts.texture = bpy.data.textures[mu.textures[i].name] ts.use_map_alpha = True ts.texture_coords = 'UV' ts.scale = s + (1,) ts.offset = o + (0,) def create_materials(mu): #material info is in the top level object for mumat in mu.materials: mumat.material = make_shader(mumat, mu) def import_mu(self, context, filepath, create_colliders): operator = self undo = bpy.context.user_preferences.edit.use_global_undo bpy.context.user_preferences.edit.use_global_undo = False for obj in bpy.context.scene.objects: obj.select = False mu = Mu() if not mu.read(filepath): bpy.context.user_preferences.edit.use_global_undo = undo operator.report({'ERROR'}, "Unrecognized format: %s %d" % (mu.magic, mu.version)) return {'CANCELLED'} create_textures(mu, os.path.dirname(filepath)) create_materials(mu) mu.objects = {} obj = create_object(mu, mu.obj, None, create_colliders, []) bpy.context.scene.objects.active = obj obj.select = True bpy.context.user_preferences.edit.use_global_undo = undo return {'FINISHED'}
zlsa/io_object_mu
import_mu.py
Python
gpl-2.0
13,186
0.003337
# -*- coding: utf-8 -*- import pytest from model_mommy import mommy from conftest import create_user from core.models import Class, Course @pytest.mark.django_db def test_lesson(admin_client): lesson = mommy.make('Lesson', slug='lesson', status='published') response = admin_client.get('/course/' + lesson.course.slug + '/lesson/' + lesson.slug + '/') assert response.status_code == 200 assert lesson.name.encode('utf-8') in response.content def assign_professor_to_course(course, existing_professor=None, new_professor_username=None, role=None): """ :param existing_professor: Existing object TimtecUser :param new_professor_user_name: Non-existing professor username, if not defined, it will be created :param course: The course to assign professor to :param role: Role to assign to professor :return: The created/existing professor """ if existing_professor is not None: professor = existing_professor else: professor = create_user(new_professor_username) mommy.make('CourseProfessor', user=professor, course=course, role=role) return professor @pytest.mark.django_db def test_assistant_professor_cannot_change_class_professor(client): course = mommy.make('Course', slug='dbsql', name='Test course name') coordinator_professor = assign_professor_to_course(course, new_professor_username='coordinator_professor', role='coordinator') assistant_professor = assign_professor_to_course(course, new_professor_username='assistant_professor', role='assistant') another_assistant = assign_professor_to_course(course, new_professor_username='another_assistant', role='assistant') klass = mommy.make('Class', name='Test class name', course=course, assistant=coordinator_professor) client.login(username=assistant_professor.username, password='password') response = client.post('/class/' + str(klass.id) + '/', {'name': 'A class', 'assistant': another_assistant.id}) assert response.status_code == 403 @pytest.mark.django_db def test_coordinator_professor_can_change_class_professor(client): course = mommy.make('Course', slug='dbsql', name='Test course name') coordinator_professor = assign_professor_to_course(course, new_professor_username='coordinator_professor', role='coordinator') assistant_professor = assign_professor_to_course(course, new_professor_username='assistant_professor', role='assistant') another_assistant = assign_professor_to_course(course, new_professor_username='another_assistant', role='assistant') klass = mommy.make('Class', name='A class', course=course, assistant=assistant_professor) client.login(username=coordinator_professor.username, password='password') response = client.post('/class/' + str(klass.id) + '/', {'name': 'A class', 'assistant': another_assistant.id}) # A página redireciona para outro lugar em caso de sucesso assert response.status_code == 302 changed_class = Class.objects.get(id=klass.id) assert changed_class.assistant == another_assistant @pytest.mark.django_db def test_assistant_professor_can_change_other_data_than_professor_on_its_own_class(client): course = mommy.make('Course', slug='dbsql', name='Another course') assign_professor_to_course(course, new_professor_username='coordinator_professor', role='coordinator') assistant_professor = assign_professor_to_course(course, new_professor_username='assistant_professor', role='assistant') klass = mommy.make('Class', name='Old class name', course=course, assistant=assistant_professor) client.login(username=assistant_professor.username, password='password') response = client.post('/class/' + str(klass.id) + '/', {'name': 'New class name', 'assistant': assistant_professor.id}) # A página redireciona para outro lugar em caso de sucesso assert response.status_code == 302 changed_class = Class.objects.get(id=klass.id) assert changed_class.name == 'New class name' @pytest.mark.django_db def test_get_courses_user_has_role(client): course = mommy.make('Course', slug='dbsql', name='A course') another_course = mommy.make('Course', slug='mysql', name='Another course') course_whose_professor_coordinate = mommy.make('Course', slug='coordinatedcourse', name='Course whose professor coordinate') another_course_whose_professor_coordinate = mommy.make('Course', slug='anothercoordinatedcourse', name='Another course whose professor coordinate') professor1 = assign_professor_to_course(course, new_professor_username='professor1', role='assistant') assign_professor_to_course(another_course, existing_professor=professor1, role='assistant') assign_professor_to_course(course_whose_professor_coordinate, existing_professor=professor1, role='coordinator') assign_professor_to_course(another_course_whose_professor_coordinate, existing_professor=professor1, role='coordinator') client.login(username=professor1.username, password='password') response = client.get('/my-courses/') assert response.status_code == 200 courses_user_assist = response.context[-1]['courses_user_assist'] assert courses_user_assist courses_user_coordinate = response.context[-1]['courses_user_coordinate'] assert courses_user_coordinate @pytest.mark.django_db def test_cannot_remove_courses_default_class(admin_client): course = mommy.make('Course', slug='mysql', name='A course') klass = course.default_class response = admin_client.post('/class/' + str(klass.id) + '/delete/') assert response.status_code == 403 assert Class.objects.filter(id=klass.id).exists() assert Course.objects.filter(id=course.id).exists() @pytest.mark.django_db def test_course_average_lessons_users_progress_should_return_zero_with_no_students_on_course(): course = mommy.make('Course', slug='dbsql', name='A course') lesson1 = mommy.make('Lesson', course=course, slug='lesson1') mommy.make('Lesson', course=course, slug='lesson2') mommy.make('Unit', lesson=lesson1, title='Title 1') progress_list = course.avg_lessons_users_progress() assert progress_list[0]['slug'] == 'lesson1' assert progress_list[0]['progress'] == 0 assert progress_list[1]['slug'] == 'lesson2' assert progress_list[1]['progress'] == 0 @pytest.mark.django_db def test_user_courses_cannot_show_assistant_and_coordinator_tabs_for_students(client): student = create_user('student') client.login(username=student.username, password='password') response = client.get('/my-courses/') assert 'href="#course-as-teacher"' not in response.content assert 'href="#course-as-coordinator"' not in response.content @pytest.mark.django_db def test_user_courses_must_show_assistant_tab_for_assistant(client): course = mommy.make('Course', slug='dbsql', name='A course') professor = assign_professor_to_course(course, new_professor_username='assistant_professor', role='assistant') client.login(username=professor.username, password='password') response = client.get('/my-courses/') assert 'href="#course-as-teacher"' in response.content assert 'href="#course-as-coordinator"' not in response.content @pytest.mark.django_db def test_user_courses_must_show_coordinator_tab_for_coordinator(client): course = mommy.make('Course', slug='dbsql', name='A course') professor = assign_professor_to_course(course, new_professor_username='coordinator_professor', role='coordinator') client.login(username=professor.username, password='password') response = client.get('/my-courses/') assert 'href="#course-as-teacher"' not in response.content assert 'href="#course-as-coordinator"' in response.content @pytest.mark.django_db def test_user_courses_must_show_assistant_and_coordinator_tabs_for_assistant_and_coordinator_professor(client): course_assisted = mommy.make('Course', slug='dbsql', name='Assisted course') professor = assign_professor_to_course(course_assisted, new_professor_username='professor', role='assistant') course_coordinated = mommy.make('Course', slug='coordinatedcourse', name='Coordinated course') assign_professor_to_course(course_coordinated, existing_professor=professor, role='coordinator') client.login(username=professor.username, password='password') response = client.get('/my-courses/') assert 'href="#course-as-teacher"' in response.content assert 'href="#course-as-coordinator"' in response.content @pytest.mark.django_db def test_course_professor_get_bio_or_pic_should_be_user_bio__or_pic_when_not_defined(admin_client): course = mommy.make('Course', slug='dbsql', name='A course') professor = create_user('professor') professor.biography = 'Professor Biography' # professor.picture = 'Some valid image' course_professor = mommy.make('CourseProfessor', user=professor, course=course, role='assistant') # course_professor = mommy.make('CourseProfessor', user=professor, course=course, picture=?, role='assistant') assert course_professor.get_biography() == 'Professor Biography' # assert course_professor.get_picture() == 'Same image as above' @pytest.mark.django_db def test_course_professor_get_bio_or_pic_should_be_course_professor_bio_or_pic_when_defined(admin_client): course = mommy.make('Course', slug='dbsql', name='A course') professor = create_user('professor') professor.biography = 'Professor Biography' # professor.picture = 'Some valid image' course_professor = mommy.make('CourseProfessor', user=professor, course=course, role='assistant') course_professor.biography = 'Course professor Biography' # course_professor.picture = 'Another valid image' assert course_professor.get_biography() == 'Course professor Biography' # assert course_professor.get_picture() == 'Course professor Picture' @pytest.mark.django_db def test_only_admin_or_coordinator_can_edit_course(client, admin_client): course = mommy.make('Course', slug='dbsql', name='A course', abstract='asdf') professor = create_user('professor') client.login(username=professor.username, password='password') response = client.post('/api/course/' + str(course.id), {'id': str(course.id), 'slug': course.slug, 'abstract': 'A abstract'}) assert response.status_code == 403 assert course.abstract == 'asdf' mommy.make('CourseProfessor', user=professor, course=course, role='coordinator') response = client.post('/api/course/' + str(course.id), {'id': str(course.id), 'slug': course.slug, 'abstract': 'A abstract'}) changed_course = Course.objects.get(id=course.id) assert response.status_code == 200 assert changed_course.abstract == 'A abstract' response = admin_client.post('/api/course/' + str(course.id), {'id': str(course.id), 'slug': course.slug, 'abstract': 'Another abstract'}) changed_course = Course.objects.get(id=course.id) assert response.status_code == 200 assert changed_course.abstract == 'Another abstract' @pytest.mark.django_db def test_only_admin_or_coordinator_can_edit_courseprofessors(client, admin_client): from core.models import CourseProfessor import json course = mommy.make('Course', slug='dbsql', name='A course', abstract='asdf') professor = create_user('professor') course_professor = mommy.make('CourseProfessor', course=course, biography='asdf') client.login(username=professor.username, password='password') response = client.put('/api/course_professor/' + str(course_professor.id), {'id': str(course_professor.id), 'biography': 'A biography'}, content_type='application/json;charset=UTF-8') assert response.status_code == 403 assert course_professor.biography == 'asdf' # set user as coordinator mommy.make('CourseProfessor', user=professor, course=course, role='coordinator') response = client.put('/api/course_professor/' + str(course_professor.id), json.dumps({'id': str(course_professor.id), 'course': course.id, 'biography': 'A biography'}), content_type='application/json;charset=UTF-8') changed_course_professor = CourseProfessor.objects.get(id=course_professor.id) assert response.status_code == 200 assert changed_course_professor.biography == 'A biography' response = admin_client.put('/api/course_professor/' + str(course_professor.id), json.dumps({'id': str(course_professor.id), 'course': course.id, 'biography': 'Another biography as admin'}), content_type='application/json;charset=UTF-8') changed_course_professor = CourseProfessor.objects.get(id=course_professor.id) assert response.status_code == 200 assert changed_course_professor.biography == 'Another biography as admin'
mupi/tecsaladeaula
core/tests/test_views.py
Python
agpl-3.0
13,951
0.00423
# encoding: utf-8 # module apt_pkg # from /usr/lib/python3/dist-packages/apt_pkg.cpython-34m-x86_64-linux-gnu.so # by generator 1.135 """ Classes and functions wrapping the apt-pkg library. The apt_pkg module provides several classes and functions for accessing the functionality provided by the apt-pkg library. Typical uses might include reading APT index files and configuration files and installing or removing packages. """ # no imports from .object import object class Cdrom(object): """ Cdrom() Cdrom objects can be used to identify Debian installation media and to add them to /etc/apt/sources.list. """ def add(self, progress): # real signature unknown; restored from __doc__ """ add(progress: apt_pkg.CdromProgress) -> bool Add the given CD-ROM to the sources.list. Return True on success; raise an error on failure or return False. """ return False def ident(self, progress): # real signature unknown; restored from __doc__ """ ident(progress: apt_pkg.CdromProgress) -> str Try to identify the CD-ROM and if successful return the hexadecimal CDROM-ID (and a integer version suffix separated by -) as a string. Otherwise, return None or raise an error. The ID is created by hashing all file and directory names on the CD-ROM and appending the version. """ return "" def __init__(self): # real signature unknown; restored from __doc__ pass @staticmethod # known case of __new__ def __new__(*args, **kwargs): # real signature unknown """ Create and return a new object. See help(type) for accurate signature. """ pass
ProfessorX/Config
.PyCharm30/system/python_stubs/-1247971765/apt_pkg/Cdrom.py
Python
gpl-2.0
1,751
0.006853
__author__ = 'Sarath' from pyNN import * import time from pyNN.optimization.optimization import * from pyNN.util.Initializer import * import pickle class DeepCorrNet2(object): def init(self, numpy_rng, theano_rng=None, l_rate=0.01, optimization="sgd", tied=False, n_visible_left=None, n_visible_right=None, n_hidden=None, n_hidden2=None, n_hidden3=None, lamda=5, W_left=None, W_right=None, b_left=None, b_right=None, W_left_prime=None, W_right_prime=None, b_prime_left=None, b_prime_right=None, W_left2=None, W_right2=None, b_left2=None, b_right2=None, W_left_prime2=None, W_right_prime2=None, b_prime_left2=None, b_prime_right2=None, W_left3=None, W_right3=None, b3=None, W_left_prime3=None, W_right_prime3=None, b_prime_left3=None, b_prime_right3=None, input_left=None, input_right=None, hidden_activation="sigmoid", output_activation="sigmoid", loss_fn = "squarrederror", op_folder=None): self.numpy_rng = numpy_rng if not theano_rng: theano_rng = RandomStreams(numpy_rng.randint(2 ** 30)) self.theano_rng = theano_rng self.optimization = optimization self.l_rate = l_rate self.optimizer = get_optimizer(self.optimization, self.l_rate) self.Initializer = Initializer(self.numpy_rng) self.n_visible_left = n_visible_left self.n_visible_right = n_visible_right self.n_hidden = n_hidden self.n_hidden2 = n_hidden2 self.n_hidden3 = n_hidden3 self.lamda = lamda self.hidden_activation = hidden_activation self.output_activation = output_activation self.loss_fn = loss_fn self.tied = tied self.op_folder = op_folder self.W_left = self.Initializer.fan_based_sigmoid("W_left", W_left, n_visible_left, n_hidden) self.optimizer.register_variable("W_left",n_visible_left,n_hidden) self.W_right = self.Initializer.fan_based_sigmoid("W_right", W_right, n_visible_right, n_hidden) self.optimizer.register_variable("W_right",n_visible_right,n_hidden) self.W_left2 = self.Initializer.fan_based_sigmoid("W_left2", W_left2, n_hidden, n_hidden2) self.optimizer.register_variable("W_left2",n_hidden, n_hidden2) self.W_right2 = self.Initializer.fan_based_sigmoid("W_right2", W_right2, n_hidden, n_hidden2) self.optimizer.register_variable("W_right2", n_hidden, n_hidden2) self.W_left3 = self.Initializer.fan_based_sigmoid("W_left3", W_left3, n_hidden2, n_hidden3) self.optimizer.register_variable("W_left3",n_hidden2, n_hidden3) self.W_right3 = self.Initializer.fan_based_sigmoid("W_right3", W_right3, n_hidden2, n_hidden3) self.optimizer.register_variable("W_right3", n_hidden2, n_hidden3) if not tied: self.W_left_prime = self.Initializer.fan_based_sigmoid("W_left_prime", W_left_prime, n_hidden, n_visible_left) self.optimizer.register_variable("W_left_prime",n_hidden, n_visible_left) self.W_right_prime = self.Initializer.fan_based_sigmoid("W_right_prime", W_right_prime, n_hidden, n_visible_right) self.optimizer.register_variable("W_right_prime",n_hidden, n_visible_right) self.W_left_prime2 = self.Initializer.fan_based_sigmoid("W_left_prime2", W_left_prime2, n_hidden2, n_hidden) self.optimizer.register_variable("W_left_prime2",n_hidden2, n_hidden) self.W_right_prime2 = self.Initializer.fan_based_sigmoid("W_right_prime2", W_right_prime2, n_hidden2, n_hidden) self.optimizer.register_variable("W_right_prime2",n_hidden2, n_hidden) self.W_left_prime3 = self.Initializer.fan_based_sigmoid("W_left_prime3", W_left_prime3, n_hidden3, n_hidden2) self.optimizer.register_variable("W_left_prime3",n_hidden3, n_hidden2) self.W_right_prime3 = self.Initializer.fan_based_sigmoid("W_right_prime3", W_right_prime3, n_hidden3, n_hidden2) self.optimizer.register_variable("W_right_prime3",n_hidden3, n_hidden2) else: self.W_left_prime = self.W_left.T self.W_right_prime = self.W_right.T self.W_left_prime2 = self.W_left2.T self.W_right_prime2 = self.W_right2.T self.W_left_prime3 = self.W_left3.T self.W_right_prime3 = self.W_right3.T self.b_left = self.Initializer.zero_vector("b_left", b_left, n_hidden) self.optimizer.register_variable("b_left",1,n_hidden) self.b_right = self.Initializer.zero_vector("b_right", b_right, n_hidden) self.optimizer.register_variable("b_right",1,n_hidden) self.b_prime_left = self.Initializer.zero_vector("b_prime_left", b_prime_left, n_visible_left) self.optimizer.register_variable("b_prime_left",1,n_visible_left) self.b_prime_right = self.Initializer.zero_vector("b_prime_right", b_prime_right, n_visible_right) self.optimizer.register_variable("b_prime_right",1,n_visible_right) self.b_left2 = self.Initializer.zero_vector("b_left2", b_left2, n_hidden2) self.optimizer.register_variable("b_left2",1,n_hidden2) self.b_right2 = self.Initializer.zero_vector("b_right2", b_right2, n_hidden2) self.optimizer.register_variable("b_right2",1,n_hidden2) self.b_prime_left2 = self.Initializer.zero_vector("b_prime_left2", b_prime_left2, n_hidden) self.optimizer.register_variable("b_prime_left2",1,n_hidden) self.b_prime_right2 = self.Initializer.zero_vector("b_prime_right2", b_prime_right2, n_hidden) self.optimizer.register_variable("b_prime_right2",1,n_hidden) self.b3 = self.Initializer.zero_vector("b3", b3, n_hidden3) self.optimizer.register_variable("b3",1,n_hidden3) self.b_prime_left3 = self.Initializer.zero_vector("b_prime_left3", b_prime_left3, n_hidden2) self.optimizer.register_variable("b_prime_left3",1,n_hidden2) self.b_prime_right3 = self.Initializer.zero_vector("b_prime_right3", b_prime_right3, n_hidden2) self.optimizer.register_variable("b_prime_right3",1,n_hidden2) if input_left is None: self.x_left = T.matrix(name='x_left') else: self.x_left = input_left if input_right is None: self.x_right = T.matrix(name='x_right') else: self.x_right = input_right if tied: self.params = [self.W_left, self.W_right, self.b_left, self.b_right, self.b_prime_left, self.b_prime_right, self.W_left2, self.W_right2, self.b_left2, self.b_right2, self.b_prime_left2, self.b_prime_right2, self.W_left3, self.W_right3, self.b3, self.b_prime_left3, self.b_prime_right3] self.param_names = ["W_left", "W_right", "b_left", "b_right", "b_prime_left", "b_prime_right", "W_left2", "W_right2", "b_left2", "b_right2" , "b_prime_left2", "b_prime_right2", "W_left3", "W_right3", "b3" , "b_prime_left3", "b_prime_right3"] else: self.params = [self.W_left, self.W_right, self.b_left, self.b_right, self.b_prime_left, self.b_prime_right, self.W_left_prime, self.W_right_prime, self.W_left2, self.W_right2, self.b_left2, self.b_right2, self.b_prime_left2, self.b_prime_right2, self.W_left_prime2, self.W_right_prime2, self.W_left3, self.W_right3, self.b3, self.b_prime_left3, self.b_prime_right3, self.W_left_prime3, self.W_right_prime3] self.param_names = ["W_left", "W_right", "b_left", "b_right", "b_prime_left", "b_prime_right", "W_left_prime", "W_right_prime", "W_left2", "W_right2", "b_left2", "b_right2", "b_prime_left2", "b_prime_right2", "W_left_prime2", "W_right_prime2", "W_left3", "W_right3", "b3", "b_prime_left3", "b_prime_right3", "W_left_prime3", "W_right_prime3"] self.proj_from_left = theano.function([self.x_left],self.project_from_left()) self.proj_from_right = theano.function([self.x_right],self.project_from_right()) self.recon_from_left = theano.function([self.x_left],self.reconstruct_from_left()) self.recon_from_right = theano.function([self.x_right],self.reconstruct_from_right()) self.save_params() def train_common(self,mtype="1111"): y1_pre = T.dot(self.x_left, self.W_left) + self.b_left y1 = activation(y1_pre, self.hidden_activation) yy1_pre = T.dot(y1, self.W_left2) + self.b_left2 yy1 = activation(yy1_pre, self.hidden_activation) yy12_pre = T.dot(yy1, self.W_left3) + self.b3 yy12 = activation(yy12_pre, self.hidden_activation) z10_left_pre = T.dot(yy12, self.W_left_prime3) + self.b_prime_left3 z10_right_pre = T.dot(yy12,self.W_right_prime3) + self.b_prime_right3 z10_left = activation(z10_left_pre, self.output_activation) z10_right = activation(z10_right_pre, self.output_activation) z1_left_pre = T.dot(z10_left, self.W_left_prime2) + self.b_prime_left2 z1_right_pre = T.dot(z10_right,self.W_right_prime2) + self.b_prime_right2 z1_left = activation(z1_left_pre, self.output_activation) z1_right = activation(z1_right_pre, self.output_activation) zz1_left_pre = T.dot(z1_left, self.W_left_prime) + self.b_prime_left zz1_right_pre = T.dot(z1_right,self.W_right_prime) + self.b_prime_right zz1_left = activation(zz1_left_pre, self.output_activation) zz1_right = activation(zz1_right_pre, self.output_activation) L1 = loss(zz1_left, self.x_left, self.loss_fn) + loss(zz1_right, self.x_right, self.loss_fn) y2_pre = T.dot(self.x_right, self.W_right) + self.b_right y2 = activation(y2_pre, self.hidden_activation) yy2_pre = T.dot(y2, self.W_right2) + self.b_right2 yy2 = activation(yy2_pre, self.hidden_activation) yy22_pre = T.dot(yy2, self.W_right3) + self.b3 yy22 = activation(yy22_pre, self.hidden_activation) z20_left_pre = T.dot(yy22, self.W_left_prime3) + self.b_prime_left3 z20_right_pre = T.dot(yy22,self.W_right_prime3) + self.b_prime_right3 z20_left = activation(z20_left_pre, self.output_activation) z20_right = activation(z20_right_pre, self.output_activation) z2_left_pre = T.dot(z20_left, self.W_left_prime2) + self.b_prime_left2 z2_right_pre = T.dot(z20_right,self.W_right_prime2) + self.b_prime_right2 z2_left = activation(z2_left_pre, self.output_activation) z2_right = activation(z2_right_pre, self.output_activation) zz2_left_pre = T.dot(z2_left, self.W_left_prime) + self.b_prime_left zz2_right_pre = T.dot(z2_right,self.W_right_prime) + self.b_prime_right zz2_left = activation(zz2_left_pre, self.output_activation) zz2_right = activation(zz2_right_pre, self.output_activation) L2 = loss(zz2_left, self.x_left, self.loss_fn) + loss(zz2_right, self.x_right, self.loss_fn) y3left_pre = T.dot(self.x_left, self.W_left) + self.b_left y3right_pre = T.dot(self.x_right, self.W_right) + self.b_right y3left = activation(y3left_pre, self.hidden_activation) y3right = activation(y3right_pre, self.hidden_activation) yy3left_pre = T.dot(y3left, self.W_left2) + self.b_left2 yy3right_pre = T.dot(y3right, self.W_right2) + self.b_right2 yy3left = activation(yy3left_pre, self.hidden_activation) yy3right = activation(yy3right_pre, self.hidden_activation) y3_pre = T.dot(yy3left, self.W_left3) + T.dot(yy3right, self.W_right3) + self.b3 y3 = activation(y3_pre, self.hidden_activation) z30_left_pre = T.dot(y3, self.W_left_prime3) + self.b_prime_left3 z30_right_pre = T.dot(y3,self.W_right_prime3) + self.b_prime_right3 z30_left = activation(z30_left_pre, self.output_activation) z30_right = activation(z30_right_pre, self.output_activation) z3_left_pre = T.dot(z30_left, self.W_left_prime2) + self.b_prime_left2 z3_right_pre = T.dot(z30_right,self.W_right_prime2) + self.b_prime_right2 z3_left = activation(z3_left_pre, self.output_activation) z3_right = activation(z3_right_pre, self.output_activation) zz3_left_pre = T.dot(z3_left, self.W_left_prime) + self.b_prime_left zz3_right_pre = T.dot(z3_right,self.W_right_prime) + self.b_prime_right zz3_left = activation(zz3_left_pre, self.output_activation) zz3_right = activation(zz3_right_pre, self.output_activation) L3 = loss(zz3_left, self.x_left, self.loss_fn) + loss(zz3_right, self.x_right, self.loss_fn) y1_mean = T.mean(yy12, axis=0) y1_centered = yy12 - y1_mean y2_mean = T.mean(yy22, axis=0) y2_centered = yy22 - y2_mean corr_nr = T.sum(y1_centered * y2_centered, axis=0) corr_dr1 = T.sqrt(T.sum(y1_centered * y1_centered, axis=0)+1e-8) corr_dr2 = T.sqrt(T.sum(y2_centered * y2_centered, axis=0)+1e-8) corr_dr = corr_dr1 * corr_dr2 corr = corr_nr/corr_dr L4 = T.sum(corr) * self.lamda if mtype=="1111": print "1111" L = L1 + L2 + L3 - L4 elif mtype=="1110": print "1110" L = L1 + L2 + L3 elif mtype=="1101": print "1101" L = L1 + L2 - L4 elif mtype == "0011": print "0011" L = L3 - L4 elif mtype == "1100": print "1100" L = L1 + L2 elif mtype == "0010": print "0010" L = L3 cost = T.mean(L) gradients = T.grad(cost, self.params) updates = [] for p,g,n in zip(self.params, gradients, self.param_names): gr, upd = self.optimizer.get_grad_update(n,g) updates.append((p,p+gr)) updates.extend(upd) return cost, updates def project_from_left(self): y1_pre = T.dot(self.x_left, self.W_left) + self.b_left y1 = activation(y1_pre, self.hidden_activation) yy1_pre = T.dot(y1, self.W_left2) + self.b_left2 yy1 = activation(yy1_pre, self.hidden_activation) yy12_pre = T.dot(yy1, self.W_left3) + self.b3 yy12 = activation(yy12_pre, self.hidden_activation) return yy12 def project_from_right(self): y2_pre = T.dot(self.x_right, self.W_right) + self.b_right y2 = activation(y2_pre, self.hidden_activation) yy2_pre = T.dot(y2, self.W_right2) + self.b_right2 yy2 = activation(yy2_pre, self.hidden_activation) yy22_pre = T.dot(yy2, self.W_right3) + self.b3 yy22 = activation(yy22_pre, self.hidden_activation) return yy22 def reconstruct_from_left(self): y1_pre = T.dot(self.x_left, self.W_left) + self.b_left y1 = activation(y1_pre, self.hidden_activation) yy1_pre = T.dot(y1, self.W_left2) + self.b_left2 yy1 = activation(yy1_pre, self.hidden_activation) yy12_pre = T.dot(yy1, self.W_left3) + self.b3 yy12 = activation(yy12_pre, self.hidden_activation) z10_left_pre = T.dot(yy12, self.W_left_prime3) + self.b_prime_left3 z10_right_pre = T.dot(yy12,self.W_right_prime3) + self.b_prime_right3 z10_left = activation(z10_left_pre, self.output_activation) z10_right = activation(z10_right_pre, self.output_activation) z1_left_pre = T.dot(z10_left, self.W_left_prime2) + self.b_prime_left2 z1_right_pre = T.dot(z10_right,self.W_right_prime2) + self.b_prime_right2 z1_left = activation(z1_left_pre, self.output_activation) z1_right = activation(z1_right_pre, self.output_activation) zz1_left_pre = T.dot(z1_left, self.W_left_prime) + self.b_prime_left zz1_right_pre = T.dot(z1_right,self.W_right_prime) + self.b_prime_right zz1_left = activation(zz1_left_pre, self.output_activation) zz1_right = activation(zz1_right_pre, self.output_activation) return zz1_left, zz1_right def reconstruct_from_right(self): y2_pre = T.dot(self.x_right, self.W_right) + self.b_right y2 = activation(y2_pre, self.hidden_activation) yy2_pre = T.dot(y2, self.W_right2) + self.b_right2 yy2 = activation(yy2_pre, self.hidden_activation) yy22_pre = T.dot(yy2, self.W_right3) + self.b3 yy22 = activation(yy22_pre, self.hidden_activation) z20_left_pre = T.dot(yy22, self.W_left_prime3) + self.b_prime_left3 z20_right_pre = T.dot(yy22,self.W_right_prime3) + self.b_prime_right3 z20_left = activation(z20_left_pre, self.output_activation) z20_right = activation(z20_right_pre, self.output_activation) z2_left_pre = T.dot(z20_left, self.W_left_prime2) + self.b_prime_left2 z2_right_pre = T.dot(z20_right,self.W_right_prime2) + self.b_prime_right2 z2_left = activation(z2_left_pre, self.output_activation) z2_right = activation(z2_right_pre, self.output_activation) zz2_left_pre = T.dot(z2_left, self.W_left_prime) + self.b_prime_left zz2_right_pre = T.dot(z2_right,self.W_right_prime) + self.b_prime_right zz2_left = activation(zz2_left_pre, self.output_activation) zz2_right = activation(zz2_right_pre, self.output_activation) return zz2_left, zz2_right def get_lr_rate(self): return self.optimizer.get_l_rate() def set_lr_rate(self,new_lr): self.optimizer.set_l_rate(new_lr) def save_matrices(self): for p,nm in zip(self.params, self.param_names): numpy.save(self.op_folder+nm, p.get_value(borrow=True)) def save_params(self): params = {} params["optimization"] = self.optimization params["l_rate"] = self.l_rate params["n_visible_left"] = self.n_visible_left params["n_visible_right"] = self.n_visible_right params["n_hidden"] = self.n_hidden params["n_hidden2"] = self.n_hidden2 params["n_hidden3"] = self.n_hidden3 params["lamda"] = self.lamda params["hidden_activation"] = self.hidden_activation params["output_activation"] = self.output_activation params["loss_fn"] = self.loss_fn params["tied"] = self.tied params["numpy_rng"] = self.numpy_rng params["theano_rng"] = self.theano_rng pickle.dump(params,open(self.op_folder+"params.pck","wb"),-1) def load(self, folder, input_left=None, input_right=None): plist = pickle.load(open(folder+"params.pck","rb")) print plist["n_hidden"] print type(plist["n_hidden"]) self.init(plist["numpy_rng"], theano_rng=plist["theano_rng"], l_rate=plist["l_rate"], optimization=plist["optimization"], tied=plist["tied"], n_visible_left=plist["n_visible_left"], n_visible_right=plist["n_visible_right"], n_hidden=plist["n_hidden"], n_hidden2=plist["n_hidden2"], n_hidden3=plist["n_hidden3"], lamda=plist["lamda"], W_left=folder+"W_left", W_right=folder+"W_right", b_left=folder+"b_left", b_right=folder+"b_right", W_left_prime=folder+"W_left_prime", W_right_prime=folder+"W_right_prime", b_prime_left=folder+"b_prime_left", b_prime_right=folder+"b_prime_right", W_left2=folder+"W_left2", W_right2=folder+"W_right2", b_left2=folder+"b_left2", b_right2=folder+"b_right2", W_left_prime2=folder+"W_left_prime2", W_right_prime2=folder+"W_right_prime2", b_prime_left2=folder+"b_prime_left2", b_prime_right2=folder+"b_prime_right2", W_left3=folder+"W_left3", W_right3=folder+"W_right3", b3=folder+"b3", W_left_prime3=folder+"W_left_prime3", W_right_prime3=folder+"W_right_prime3", b_prime_left3=folder+"b_prime_left3", b_prime_right3=folder+"b_prime_right3", input_left=input_left, input_right=input_right, hidden_activation=plist["hidden_activation"], output_activation=plist["output_activation"], loss_fn = plist["loss_fn"], op_folder=folder) def trainCorrNet(src_folder, sct_folder, tgt_folder, batch_size = 20, training_epochs=40, use_valid=False, l_rate=0.01, optimization="sgd", tied=False, n_visible_left=None, n_visible_right=None, n_hidden=None, n_hidden2=None, n_hidden3=None, lamda=5, W_left=None, W_right=None, b_left=None, b_right=None, W_left_prime=None, W_right_prime=None, b_prime_left=None, b_prime_right=None, W_left2=None, W_right2=None, b_left2=None, b_right2=None, W_left_prime2=None, W_right_prime2=None, b_prime_left2=None, b_prime_right2=None, W_left3=None, W_right3=None, b3=None, W_left_prime3=None, W_right_prime3=None, b_prime_left3=None, b_prime_right3=None, hidden_activation="sigmoid", output_activation="sigmoid", loss_fn = "squarrederror"): index = T.lscalar() x_left = T.matrix('x_left') x_right = T.matrix('x_right') rng = numpy.random.RandomState(123) theano_rng = RandomStreams(rng.randint(2 ** 30)) model = DeepCorrNet2() model.init(numpy_rng=rng, theano_rng=theano_rng, l_rate=l_rate, optimization=optimization, tied=tied, n_visible_left=n_visible_left, n_visible_right=n_visible_right, n_hidden=n_hidden,n_hidden2=n_hidden2, n_hidden3=n_hidden3, lamda=lamda, W_left=W_left, W_right=W_right, b_left=b_left, b_right=b_right, W_left_prime=W_left_prime, W_right_prime=W_right_prime, b_prime_left=b_prime_left, b_prime_right=b_prime_right, W_left2=W_left2, W_right2=W_right2, b_left2=b_left2, b_right2=b_right2, W_left_prime2=W_left_prime2, W_right_prime2=W_right_prime2, b_prime_left2=b_prime_left2, b_prime_right2=b_prime_right2, W_left3=W_left3, W_right3=W_right3, b3=b3, W_left_prime3=W_left_prime3, W_right_prime3=W_right_prime3, b_prime_left3=b_prime_left3, b_prime_right3=b_prime_right3, input_left=x_left, input_right=x_right, hidden_activation=hidden_activation, output_activation=output_activation, loss_fn =loss_fn, op_folder=tgt_folder) #model.load(tgt_folder,x_left,x_right) start_time = time.clock() train_set_x_left = theano.shared(numpy.asarray(numpy.zeros((1000,n_visible_left)), dtype=theano.config.floatX), borrow=True) train_set_x_right = theano.shared(numpy.asarray(numpy.zeros((1000,n_visible_right)), dtype=theano.config.floatX), borrow=True) common_cost, common_updates = model.train_common("1111") mtrain_common = theano.function([index], common_cost,updates=common_updates,givens=[(x_left, train_set_x_left[index * batch_size:(index + 1) * batch_size]),(x_right, train_set_x_right[index * batch_size:(index + 1) * batch_size])]) """left_cost, left_updates = model.train_left() mtrain_left = theano.function([index], left_cost,updates=left_updates,givens=[(x_left, train_set_x_left[index * batch_size:(index + 1) * batch_size])]) right_cost, right_updates = model.train_right() mtrain_right = theano.function([index], right_cost,updates=right_updates,givens=[(x_right, train_set_x_right[index * batch_size:(index + 1) * batch_size])])""" diff = 0 flag = 1 detfile = open(tgt_folder+"details.txt","w") detfile.close() oldtc = float("inf") for epoch in xrange(training_epochs): print "in epoch ", epoch c = [] ipfile = open(src_folder+"train/ip.txt","r") for line in ipfile: next = line.strip().split(",") if(next[0]=="xy"): if(next[1]=="dense"): denseTheanoloader(next[2]+"_left",train_set_x_left,"float32") denseTheanoloader(next[2]+"_right",train_set_x_right, "float32") else: sparseTheanoloader(next[2]+"_left",train_set_x_left,"float32",1000,n_visible_left) sparseTheanoloader(next[2]+"_right",train_set_x_right, "float32", 1000, n_visible_right) for batch_index in range(0,int(next[3])/batch_size): c.append(mtrain_common(batch_index)) if(flag==1): flag = 0 diff = numpy.mean(c) di = diff else: di = numpy.mean(c) - diff diff = numpy.mean(c) print 'Difference between 2 epochs is ', di print 'Training epoch %d, cost ' % epoch, diff ipfile.close() detfile = open(tgt_folder+"details.txt","a") detfile.write("train\t"+str(diff)+"\n") detfile.close() # save the parameters for every 5 epochs if((epoch+1)%5==0): model.save_matrices() end_time = time.clock() training_time = (end_time - start_time) print ' code ran for %.2fm' % (training_time / 60.) model.save_matrices()
apsarath/pyNN
model/crl/deepcorrnet2.py
Python
apache-2.0
24,479
0.007394
# GUI Application automation and testing library # Copyright (C) 2006-2018 Mark Mc Mahon and Contributors # https://github.com/pywinauto/pywinauto/graphs/contributors # http://pywinauto.readthedocs.io/en/latest/credits.html # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of pywinauto nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Miscellaneous Control properties Test **What is checked** This checks various values related to a control in windows. The values tested are class_name The class type of the control style The Style of the control (GetWindowLong) exstyle The Extended Style of the control (GetWindowLong) help_id The Help ID of the control (GetWindowLong) control_id The Control ID of the control (GetWindowLong) user_data The User Data of the control (GetWindowLong) Visibility Whether the control is visible or not **How is it checked** After retrieving the information for the control we compare it to the same information from the reference control. **When is a bug reported** If the information does not match then a bug is reported. **Bug Extra Information** The bug contains the following extra information Name Description ValueType What value is incorrect (see above), String Ref The reference value converted to a string, String Loc The localised value converted to a string, String **Is Reference dialog needed** This test will not run if the reference controls are not available. **False positive bug reports** Some values can change easily without any bug being caused, for example User Data is actually meant for programmers to store information for the control and this can change every time the software is run. **Test Identifier** The identifier for this test/bug is "MiscValues" """ testname = "MiscValues" import six def MiscValuesTest(windows): """Return the bugs from checking miscelaneous values of a control""" bugs = [] for win in windows: if not win.ref: continue diffs = {} if win.class_name() != win.ref.class_name(): diffs[u"class_name"] = (win.class_name(), win.ref.class_name()) if win.style() != win.ref.style(): diffs[u"style"] = (win.style(), win.ref.style()) if win.exstyle() != win.ref.exstyle(): diffs[u"exstyle"] = (win.exstyle(), win.ref.exstyle()) if win.context_help_id() != win.ref.context_help_id(): diffs[u"help_id"] = (win.context_help_id(), win.ref.context_help_id()) if win.control_id() != win.ref.control_id(): diffs[u"control_id"] = (win.control_id(), win.ref.control_id()) if win.is_visible() != win.ref.is_visible(): diffs[u"Visibility"] = (win.is_visible(), win.ref.is_visible()) if win.user_data() != win.ref.user_data(): diffs[u"user_data"] = (win.user_data(), win.ref.user_data()) for diff, vals in diffs.items(): bugs.append(( [win, ], { "ValueType": diff, "Ref": six.text_type(vals[1]), "Loc": six.text_type(vals[0]), }, testname, 0,) ) return bugs
vasily-v-ryabov/pywinauto
pywinauto/tests/miscvalues.py
Python
bsd-3-clause
4,700
0.001064
""" Makes possible reporter classes, which are triggered on particular events and may provide information to the user, may do something else such as checkpointing, or may do both. """ from __future__ import division, print_function import time from neat.math_util import mean, stdev from neat.six_util import itervalues, iterkeys # TODO: Add a curses-based reporter. class ReporterSet(object): """ Keeps track of the set of reporters and gives methods to dispatch them at appropriate points. """ def __init__(self): self.reporters = [] def add(self, reporter): self.reporters.append(reporter) def remove(self, reporter): self.reporters.remove(reporter) def start_generation(self, gen): for r in self.reporters: r.start_generation(gen) def end_generation(self, config, population, species_set): for r in self.reporters: r.end_generation(config, population, species_set) def post_evaluate(self, config, population, species, best_genome): for r in self.reporters: r.post_evaluate(config, population, species, best_genome) def post_reproduction(self, config, population, species): for r in self.reporters: r.post_reproduction(config, population, species) def complete_extinction(self): for r in self.reporters: r.complete_extinction() def found_solution(self, config, generation, best): for r in self.reporters: r.found_solution(config, generation, best) def species_stagnant(self, sid, species): for r in self.reporters: r.species_stagnant(sid, species) def info(self, msg): for r in self.reporters: r.info(msg) class BaseReporter(object): """Definition of the reporter interface expected by ReporterSet.""" def start_generation(self, generation): pass def end_generation(self, config, population, species_set): pass def post_evaluate(self, config, population, species, best_genome): pass def post_reproduction(self, config, population, species): pass def complete_extinction(self): pass def found_solution(self, config, generation, best): pass def species_stagnant(self, sid, species): pass def info(self, msg): pass class StdOutReporter(BaseReporter): """Uses `print` to output information about the run; an example reporter class.""" def __init__(self, show_species_detail): self.show_species_detail = show_species_detail self.generation = None self.generation_start_time = None self.generation_times = [] self.num_extinctions = 0 def start_generation(self, generation): self.generation = generation print('\n ****** Running generation {0} ****** \n'.format(generation)) self.generation_start_time = time.time() def end_generation(self, config, population, species_set): ng = len(population) ns = len(species_set.species) if self.show_species_detail: print('Population of {0:d} members in {1:d} species:'.format(ng, ns)) sids = list(iterkeys(species_set.species)) sids.sort() print(" ID age size fitness adj fit stag") print(" ==== === ==== ======= ======= ====") for sid in sids: s = species_set.species[sid] a = self.generation - s.created n = len(s.members) f = "--" if s.fitness is None else "{:.1f}".format(s.fitness) af = "--" if s.adjusted_fitness is None else "{:.3f}".format(s.adjusted_fitness) st = self.generation - s.last_improved print( " {: >4} {: >3} {: >4} {: >7} {: >7} {: >4}".format(sid, a, n, f, af, st)) else: print('Population of {0:d} members in {1:d} species'.format(ng, ns)) elapsed = time.time() - self.generation_start_time self.generation_times.append(elapsed) self.generation_times = self.generation_times[-10:] average = sum(self.generation_times) / len(self.generation_times) print('Total extinctions: {0:d}'.format(self.num_extinctions)) if len(self.generation_times) > 1: print("Generation time: {0:.3f} sec ({1:.3f} average)".format(elapsed, average)) else: print("Generation time: {0:.3f} sec".format(elapsed)) def post_evaluate(self, config, population, species, best_genome): # pylint: disable=no-self-use fitnesses = [c.fitness for c in itervalues(population)] fit_mean = mean(fitnesses) fit_std = stdev(fitnesses) best_species_id = species.get_species_id(best_genome.key) print('Population\'s average fitness: {0:3.5f} stdev: {1:3.5f}'.format(fit_mean, fit_std)) print( 'Best fitness: {0:3.5f} - size: {1!r} - species {2} - id {3}'.format(best_genome.fitness, best_genome.size(), best_species_id, best_genome.key)) def complete_extinction(self): self.num_extinctions += 1 print('All species extinct.') def found_solution(self, config, generation, best): print('\nBest individual in generation {0} meets fitness threshold - complexity: {1!r}'.format( self.generation, best.size())) def species_stagnant(self, sid, species): if self.show_species_detail: print("\nSpecies {0} with {1} members is stagnated: removing it".format(sid, len(species.members))) def info(self, msg): print(msg)
drallensmith/neat-python
neat/reporting.py
Python
bsd-3-clause
5,924
0.002363
import urllib, urllib2, sys, httplib url = "/MELA/REST_WS" #HOST_IP="128.130.172.191:8180" newName="EventProcessingTopology_STRATEGY_MELA_COST_RECOMMENDATION_EFFICIENCY_Larger_VMs" HOST_IP="localhost:8480" if __name__=='__main__': connection = httplib.HTTPConnection(HOST_IP) description_file = open("./20hstruct_LAST_ADDED_LARGER_VMS.xml", "r") body_content = description_file.read() headers={ 'Content-Type':'application/xml; charset=utf-8', 'Accept':'application/json, multipart/related' } connection.request('PUT', url+'/service/emulate/'+newName, body=body_content,headers=headers,) result = connection.getresponse() print result.read()
tuwiendsg/MELA
MELA-Extensions/MELA-ComplexCostEvaluationService/tests/mela-clients/emulateCost.py
Python
apache-2.0
719
0.044506
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Operations for TPUs.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import platform from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops if platform.system() != "Windows": # pylint: disable=wildcard-import,unused-import,g-import-not-at-top from tensorflow.contrib.tpu.ops import gen_tpu_ops from tensorflow.contrib.tpu.ops.gen_tpu_ops import * from tensorflow.contrib.util import loader from tensorflow.python.platform import resource_loader # pylint: enable=wildcard-import,unused-import,g-import-not-at-top _tpu_ops = loader.load_op_library( resource_loader.get_path_to_datafile("_tpu_ops.so")) @ops.RegisterGradient("CrossReplicaSum") def _cross_replica_sum_grad(op, grad): del op # Unused # The gradient of a cross replica sum is also a cross-replica sum. return gen_tpu_ops.cross_replica_sum(grad) # This extra type checking exists to give a more helpful error message in # the common case that uint8 and int64 values are infed. Remove when both # types are supported. _SUPPORTED_INFEED_DTYPES = set([ dtypes.bool, dtypes.int32, dtypes.bfloat16, dtypes.float32 ]) def infeed_dequeue(dtype, shape, name=None): """A placeholder op for a value that will be fed into the computation. Args: dtype: A `tf.DType`. The type of elements in the tensor. shape: A `tf.TensorShape` or list of `ints`. The shape of the tensor. name: A name for the operation (optional). Returns: A `Tensor` of type `dtype`. A tensor that will be provided using the infeed mechanism. Raises: TypeError: If 'dtype` is not a supported infeed type. """ if dtype not in _SUPPORTED_INFEED_DTYPES: raise TypeError( "{} is not a supported TPU infeed type. Supported types are: " "{}".format(dtype, list(_SUPPORTED_INFEED_DTYPES))) return gen_tpu_ops.infeed_dequeue(dtype, shape, name=name) # pylint: disable=redefined-outer-name def infeed_dequeue_tuple(dtypes, shapes, name=None): """A placeholder op for values fed into the TPU simultaneously as a tuple. Args: dtypes: A list of `tf.DType`s that has length `>= 1`. The element types of each element in `outputs`. shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`). The shapes of each tensor in `outputs`. name: A name for the operation (optional). Returns: A list of `Tensor` objects of type `dtypes`. A list of tensors that will be provided using the infeed mechanism. Raises: TypeError: If a type in 'dtypes` is not a supported infeed type. """ for dtype in dtypes: if dtype not in _SUPPORTED_INFEED_DTYPES: raise TypeError( "{} is not a supported TPU infeed type. Supported types are: " "{}".format(dtype, list(_SUPPORTED_INFEED_DTYPES))) return gen_tpu_ops.infeed_dequeue_tuple(dtypes, shapes, name=name) # pylint: enable=redefined-outer-name else: # We have already built the appropriate libraries into the binary via CMake # if we have built contrib, so we don't need this pass
rabipanda/tensorflow
tensorflow/contrib/tpu/python/ops/tpu_ops.py
Python
apache-2.0
3,909
0.005628
""" Using dates with timeseries models """ import statsmodels.api as sm import numpy as np import pandas # Getting started # --------------- data = sm.datasets.sunspots.load() # Right now an annual date series must be datetimes at the end of the year. from datetime import datetime dates = sm.tsa.datetools.dates_from_range('1700', length=len(data.endog)) # Using Pandas # ------------ # Make a pandas TimeSeries or DataFrame endog = pandas.TimeSeries(data.endog, index=dates) # and instantiate the model ar_model = sm.tsa.AR(endog, freq='A') pandas_ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1) # Let's do some out-of-sample prediction pred = pandas_ar_res.predict(start='2005', end='2015') print pred # Using explicit dates # -------------------- ar_model = sm.tsa.AR(data.endog, dates=dates, freq='A') ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1) pred = ar_res.predict(start='2005', end='2015') print pred # This just returns a regular array, but since the model has date information # attached, you can get the prediction dates in a roundabout way. print ar_res._data.predict_dates # This attribute only exists if predict has been called. It holds the dates # associated with the last call to predict. #..TODO: should this be attached to the results instance?
pprett/statsmodels
examples/tsa/ex_dates.py
Python
bsd-3-clause
1,296
0.001543
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import Manufacturer class ManufacturerListView(PicotableListView): model = Manufacturer default_columns = [ Column( "name", _(u"Name"), sort_field="name", display="name", filter_config=TextFilter( filter_field="name", placeholder=_("Filter by name...") ) ), ]
shawnadelic/shuup
shuup/admin/modules/manufacturers/views/list.py
Python
agpl-3.0
872
0
import os from app import config """ Revision ID: 0133_set_services_sms_prefix Revises: 0132_add_sms_prefix_setting Create Date: 2017-11-03 15:55:35.657488 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql revision = '0133_set_services_sms_prefix' down_revision = '0132_add_sms_prefix_setting' config = config.configs[os.environ['NOTIFY_ENVIRONMENT']] default_sms_sender = config.FROM_NUMBER def upgrade(): op.execute(""" update services set prefix_sms = True where id in ( select service_id from service_sms_senders where is_default = True and sms_sender = '{}' ) """.format(default_sms_sender)) op.execute(""" update services set prefix_sms = False where id in ( select service_id from service_sms_senders where is_default = True and sms_sender != '{}' ) """.format(default_sms_sender)) def downgrade(): op.execute(""" UPDATE services set prefix_sms = null """)
alphagov/notifications-api
migrations/versions/0133_set_services_sms_prefix.py
Python
mit
1,045
0
from django.utils.translation import ugettext_lazy as _ class PersonGender(): NOT_SPECIFIED = 0 MALE = 1 FEMALE = 2 GENDER_OPTIONS = ( (NOT_SPECIFIED, _('Not specified')), (MALE, _('Male')), (FEMALE, _('Female')), ) class PersonReligion(): NOT_SPECIFIED = 0 CHRISTIANITY = 1 ISLAM = 2 HINDUISM = 3 BUDDHISM = 4 RELIGION_OPTIONS = ( (NOT_SPECIFIED, _('Not specified')), (CHRISTIANITY, _('Christianity')), (ISLAM, _('Islam')), (HINDUISM, _('Hinduism')), (BUDDHISM, _('Buddhism')), )
BontaVlad/ExpirationDate
expirationDate/persons/constants.py
Python
mit
599
0
# -*- coding: utf-8 -*- """ ============================= Chemtrails ============================= The past trajectory of an animated plot can be visualized with the chemtrails argument. This displays a low opacity version of the trace behind the current points being plotted. This can be used in conjunction with the precog argument to plot a low-opacity trace of the entire timeseries. """ # Code source: Andrew Heusser # License: MIT # import import hypertools as hyp # load example data geo = hyp.load('weights_avg') # plot geo.plot(animate=True, chemtrails=True)
ContextLab/hypertools
examples/chemtrails.py
Python
mit
575
0
# # test_codecmaps_tw.py # Codec mapping tests for ROC encodings # from test import support from test import multibytecodec_support import unittest class TestBIG5Map(multibytecodec_support.TestBase_Mapping, unittest.TestCase): encoding = 'big5' mapfileurl = 'http://www.unicode.org/Public/MAPPINGS/OBSOLETE/' \ 'EASTASIA/OTHER/BIG5.TXT' class TestCP950Map(multibytecodec_support.TestBase_Mapping, unittest.TestCase): encoding = 'cp950' mapfileurl = 'http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/' \ 'WINDOWS/CP950.TXT' pass_enctest = [ (b'\xa2\xcc', '\u5341'), (b'\xa2\xce', '\u5345'), ] codectests = ( (b"\xFFxy", "replace", "\ufffdxy"), ) if __name__ == "__main__": unittest.main()
jiangzhuo/kbengine
kbe/src/lib/python/Lib/test/test_codecmaps_tw.py
Python
lgpl-3.0
831
0.00361
import unittest import singer.statediff as statediff from singer.statediff import Add, Remove, Change class TestPaths(unittest.TestCase): def test_simple_dict(self): self.assertEqual( [(('a',), 1), (('b',), 2)], statediff.paths({'a': 1, 'b': 2})) def test_nested_dict(self): self.assertEqual( [(('a', 'b'), 1), (('a', 'c'), 2), (('d', 'e'), 3)], statediff.paths( { 'a': { 'b': 1, 'c': 2 }, 'd': { 'e': 3 } } ) ) def test_simple_array(self): self.assertEqual( [((0,), 'blue'), ((1,), 'green')], statediff.paths( ['blue', 'green'])) def test_nested_array(self): self.assertEqual( [((0, 0), 'blue'), ((0, 1), 'red'), ((1, 0), 'green')], statediff.paths([['blue', 'red'], ['green']])) def test_arrays_in_dicts(self): self.assertEqual( [(('a', 0), 'blue'), (('a', 1), 'red'), (('b', 0), 'green')], statediff.paths( { 'a': ['blue', 'red'], 'b': ['green'] } ) ) def test_none(self): self.assertEqual([], statediff.paths(None)) class TestDiff(unittest.TestCase): def test_add(self): self.assertEqual( [Add(('a',), 1), Add(('b',), 2)], statediff.diff({}, {'a': 1, 'b': 2})) def test_remove(self): self.assertEqual( [Remove(('a',), 1), Remove(('b',), 2)], statediff.diff({'a': 1, 'b': 2}, {})) def test_change(self): self.assertEqual( [Change(('a',), 1, 100), Change(('b',), 2, 200)], statediff.diff({'a': 1, 'b': 2}, {'a': 100, 'b': 200})) def test_null_input_for_old(self): self.assertEqual( [Add(('a',), 1)], statediff.diff(None, {'a': 1})) def test_null_input_for_new(self): self.assertEqual( [Remove(('a',), 1)], statediff.diff({'a': 1}, None)) def test_null_input_for_both(self): self.assertEqual([], statediff.diff(None, None)) def test_null_at_leaf(self): self.assertEqual( [Change(('a',), 1, None), Change(('b',), None, 2)], statediff.diff({'a': 1, 'b': None}, {'a': None, 'b': 2}))
singer-io/singer-python
tests/test_statediff.py
Python
apache-2.0
2,807
0.002494
import six, json from .logclient_operator import list_more from .logexception import LogException from .pluralize import pluralize from .common_response import * DEFAULT_MAX_LIST_PAGING_SIZE = 500 def create_entity(entity_name, root_resource=None): def fn(self, project, detail): """ Create {entity_title}. Unsuccessful opertaion will cause an LogException. :type project: string :param project: project name :type detail: dict/string :param detail: json string :return: CreateEntityResponse :raise: LogException """ params = {} resource_path = (root_resource and root_resource.lstrip('/')) or "/" + pluralize(entity_name) headers = {"x-log-bodyrawsize": '0', "Content-Type": "application/json"} if hasattr(detail, 'to_json'): detail = detail.to_json() body_str = six.b(json.dumps(detail)) elif isinstance(detail, six.binary_type): body_str = detail elif isinstance(detail, six.text_type): body_str = detail.encode('utf8') else: body_str = six.b(json.dumps(detail)) (resp, header) = self._send("POST", project, body_str, resource_path, params, headers) return GetEntityResponse(header, resp) fn.__name__ = 'create_' + entity_name fn.__doc__ = fn.__doc__.format(entity_title=entity_name.title()) return fn def get_entity(entity_name, root_resource=None): def fn(self, project, entity): """Get {entity_title}. Unsuccessful opertaion will cause an LogException. :type project: string :param project: project name :type entity: string :param entity: {entity_name} name :return: GetEntityResponse :raise: LogException """ headers = dict() params = dict() resource_path = ((root_resource and root_resource.rstrip('/')) or ('/' + pluralize(entity_name) + '/')) + entity (resp, header) = self._send("GET", project, None, resource_path, params, headers) return GetEntityResponse(header, resp) fn.__name__ = 'get_' + entity_name fn.__doc__ = fn.__doc__.format(entity_name=entity_name, entity_title=entity_name.title()) return fn def delete_entity(entity_name, root_resource=None): def fn(self, project, entity): """Delete {entity_title}. Unsuccessful opertaion will cause an LogException. :type project: string :param project: project name :type entity: string :param entity: {entity_name} name :return: DeleteEntityResponse :raise: LogException """ headers = {} params = {} resource_path = ((root_resource and root_resource.rstrip('/')) or ('/' + pluralize(entity_name) + '/')) + entity (resp, header) = self._send("DELETE", project, None, resource_path, params, headers) return DeleteEntityResponse(header, resp) fn.__name__ = 'delete_' + entity_name fn.__doc__ = fn.__doc__.format(entity_name=entity_name, entity_title=entity_name.title()) return fn def list_entity(entity_name, root_resource=None, max_batch_size=DEFAULT_MAX_LIST_PAGING_SIZE): def fn(self, project, offset=0, size=100): """ list the {entity_title}, get first 100 items by default Unsuccessful opertaion will cause an LogException. :type project: string :param project: the Project name :type offset: int :param offset: the offset of all the matched names :type size: int :param size: the max return names count, -1 means all :return: ListLogStoreResponse :raise: LogException """ # need to use extended method to get more if int(size) == -1 or int(size) > max_batch_size: return list_more(fn, int(offset), int(size), max_batch_size, project) headers = {} params = {} resource_path = (root_resource and root_resource.lstrip('/')) or "/" + pluralize(entity_name) params['offset'] = str(offset) params['size'] = str(size) (resp, header) = self._send("GET", project, None, resource_path, params, headers) return ListEntityResponse(header, resp, resource_name=resource_path.strip('/')) fn.__name__ = 'list_' + entity_name fn.__doc__ = fn.__doc__.format(entity_title=entity_name.title()) return fn def update_entity(entity_name, name_field=None, root_resource=None): def fn(self, project, detail): """ Update {entity_title}. Unsuccessful opertaion will cause an LogException. :type project: string :param project: project name :type detail: dict/string :param detail: json string :return: UpdateEntityResponse :raise: LogException """ params = {} headers = {} # parse entity value entity = None if hasattr(detail, 'to_json'): detail = detail.to_json() body_str = six.b(json.dumps(detail)) entity = detail.get(name_field or 'name', '') elif isinstance(detail, six.binary_type): body_str = detail elif isinstance(detail, six.text_type): body_str = detail.encode('utf8') else: body_str = six.b(json.dumps(detail)) entity = detail.get(name_field or 'name', '') if entity is None: entity = json.loads(body_str).get(name_field, '') assert entity, LogException('InvalidParameter', 'unknown entity name "{0}" in "{1}"'.format(name_field, detail)) resource_path = ((root_resource and root_resource.rstrip('/')) or ('/' + pluralize(entity_name) + '/')) + entity headers['Content-Type'] = 'application/json' headers['x-log-bodyrawsize'] = str(len(body_str)) (resp, headers) = self._send("PUT", project, body_str, resource_path, params, headers) return UpdateEntityResponse(headers, resp) fn.__name__ = 'update_' + entity_name fn.__doc__ = fn.__doc__.format(entity_title=entity_name.title()) return fn def make_lcrud_methods(obj, entity_name, name_field=None, root_resource=None): setattr(obj, 'list_' + entity_name, list_entity(entity_name, root_resource=root_resource)) setattr(obj, 'get_' + entity_name, get_entity(entity_name, root_resource=root_resource)) setattr(obj, 'delete_' + entity_name, delete_entity(entity_name, root_resource=root_resource)) setattr(obj, 'update_' + entity_name, update_entity(entity_name, root_resource=root_resource, name_field=name_field)) setattr(obj, 'create_' + entity_name, create_entity(entity_name, root_resource=root_resource))
wjo1212/aliyun-log-python-sdk
aliyun/log/logclient_core.py
Python
mit
6,959
0.003592
from .modutil import ( submodules, submodule_tree, submodule_leaf_tree, ) __all__ = [ "submodules", "submodule_tree", "submodule_leaf_tree", ]
sejust/pykit
modutil/__init__.py
Python
mit
168
0
import uuid import requests import logging import logging.handlers import flask_restful from functools import wraps from flask import request from flask_cors import CORS from flask_restful import Resource, reqparse from sqlalchemy.orm.relationships import RelationshipProperty from .. import db, app from ..models import Input restapi = flask_restful.Api(app) cors = CORS(app) QUERY_PARSER = reqparse.RequestParser() QUERY_PARSER.add_argument("filter", action="append", help="Filter") QUERY_PARSER.add_argument("order", help="Ordering", default="id") QUERY_PARSER.add_argument("page", type=int, default=1, help="Page #") QUERY_PARSER.add_argument("count", type=int, default=1000, help="Items per page") # All defalut time range arguments RANGE_PARSER = reqparse.RequestParser() RANGE_PARSER.add_argument("start", type=int, default=0) RANGE_PARSER.add_argument("end", type=int, default=0) INPUT_PARSER = reqparse.RequestParser() INPUT_PARSER.add_argument("name", location="args", required=True) PACKAGE = '' if "ERSA_REPORTING_PACKAGE" in app.config: PACKAGE = app.config["ERSA_REPORTING_PACKAGE"] AUTH_TOKEN = None if "ERSA_AUTH_TOKEN" in app.config: AUTH_TOKEN = app.config["ERSA_AUTH_TOKEN"] if AUTH_TOKEN is not None: AUTH_TOKEN = AUTH_TOKEN.lower() UUID_NAMESPACE = uuid.UUID("aeb7cf1c-a842-4592-82e9-55d2dad00150") if "LOG_DIR" in app.config: LOG_DIR = app.config["LOG_DIR"] else: LOG_DIR = "." if "LOG_LEVEL" in app.config: LOG_LEVEL = getattr(logging, app.config["LOG_LEVEL"].upper(), logging.DEBUG) else: LOG_LEVEL = logging.DEBUG if "LOG_SIZE" in app.config: LOG_SIZE = app.config["LOG_SIZE"] else: LOG_SIZE = 30000000 LOG_FORMAT = '%(asctime)s %(levelname)s %(module)s %(filename)s %(lineno)d: %(message)s' SAN_MS_DATE = '%Y-%m-%d %H:%M:%S' LOG_FORMATTER = logging.Formatter(LOG_FORMAT, SAN_MS_DATE) top_logger = logging.getLogger(__name__) # Logger is created by the calling module with the calling module's name as log name # All other modules use this log def create_logger(module_name): log_name = "%s/%s.log" % (LOG_DIR, module_name) file_handler = logging.handlers.RotatingFileHandler(log_name, maxBytes=LOG_SIZE) file_handler.setFormatter(LOG_FORMATTER) logger = logging.getLogger(__name__) logger.addHandler(file_handler) logger.setLevel(LOG_LEVEL) return logger def identifier(content): """A generator for consistent IDs.""" return str(uuid.uuid5(UUID_NAMESPACE, str(content))) def is_uuid(id): """Verify if a string is an UUID""" try: v = uuid.UUID(id) except ValueError: v = None return isinstance(v, uuid.UUID) def github(deps): """ Format GitHub dependencies. For example: deps = [ ("eresearchsa/flask-util", "ersa-flask-util", "0.4"), ("foo/bar", "my-package-name", "3.141") ] """ return ["https://github.com/%s/archive/v%s.tar.gz#egg=%s-%s" % (dep[0], dep[2], dep[1], dep[2]) for dep in deps] def get_or_create(model, **kwargs): """Fetch object if returned by filter query, else create new.""" item = get(model, **kwargs) if not item: item = model(**kwargs) db.session.add(item) return item def get(model, **kwargs): """Fetch object by query parameters.""" return db.session.query(model).filter_by(**kwargs).first() def commit(): """Commit session.""" db.session.commit() def rollback(): """Rollback session.""" db.session.rollback() def add(item): """Add object.""" db.session.add(item) def delete(item): """Delete object.""" db.session.delete(item) def fetch(model, key): """Fetch by ID.""" return db.session.query(model).get(key) def flush(): """Flush session.""" db.session.flush() def constant_time_compare(val1, val2): """ Borrowed from Django! Returns True if the two strings are equal, False otherwise. The time taken is independent of the number of characters that match. For the sake of simplicity, this function executes in constant time only when the two strings have the same length. It short-circuits when they have different lengths. Since Django only uses it to compare hashes of known expected length, this is acceptable. """ if len(val1) != len(val2): return False result = 0 for x, y in zip(val1, val2): result |= ord(x) ^ ord(y) return result == 0 def require_auth(func): """ Authenticate via the external reporting-auth service. For dev/test purposes: if ERSA_AUTH_TOKEN environment variable exists, check against that instead. """ @wraps(func) def decorated(*args, **kwargs): """Check the header.""" success = False try: token = str(uuid.UUID(request.headers.get("x-ersa-auth-token", ""))).lower() except: # noqa: E722 return "", 403 if AUTH_TOKEN is not None: if constant_time_compare(token, AUTH_TOKEN): success = True else: auth_response = requests.get( "https://reporting.ersa.edu.au/auth?secret=%s" % token) if auth_response.status_code == 200: auth_data = auth_response.json() for endpoint in auth_data["endpoints"]: if endpoint["name"] == PACKAGE: success = True break if success: return func(*args, **kwargs) else: return "", 403 return decorated def dynamic_query(model, query, expression): """ Construct query based on: attribute.operation.expression For example: foo.eq.42 """ key, op, value = expression.split(".", 2) column = getattr(model, key, None) if isinstance(column.property, RelationshipProperty): column = getattr(model, key + "_id", None) if op == "in": query_filter = column.in_(value.split(",")) else: attr = None for candidate in ["%s", "%s_", "__%s__"]: if hasattr(column, candidate % op): attr = candidate % op break if value == "null": value = None query_filter = getattr(column, attr)(value) return query.filter(query_filter) def name_or_id(model, name): """Return an _id attribute if one exists.""" name_id = name + "_id" if hasattr(model, name_id): return getattr(model, name_id) elif hasattr(model, name): return getattr(model, name) else: return None def do_query(model): """Perform a query with request-specified filtering and ordering.""" args = QUERY_PARSER.parse_args() query = model.query # filter if args["filter"]: for query_filter in args["filter"]: query = dynamic_query(model, query, query_filter) # order order = [] for order_spec in args["order"].split(","): if not order_spec.startswith("-"): order.append(name_or_id(model, order_spec)) else: order.append(name_or_id(model, order_spec[1:]).desc()) query = query.order_by(*order) # execute return query.paginate(args["page"], per_page=args["count"], error_out=False).items def instance_method(model, method, id, default=[], **kwargs): """Get an instance by an id and call the given method of the instance""" if not (is_uuid(id) and hasattr(model, method)): return default rslt = default instance = model.query.get(id) if instance: imethod = getattr(instance, method) rslt = imethod(**kwargs) return rslt class QueryResource(Resource): """Generic Query""" def get_raw(self): """Query""" try: top_logger.debug("Query: %s" % self.query_class.query) return do_query(self.query_class) except Exception as e: top_logger.error("Query %s failed. Detail: %s" % (self.query_class.query, str(e))) return [] @require_auth def get(self): """Query""" return [item.json() for item in self.get_raw()] @require_auth def post(self): return self.get() class RangeQuery(Resource): """Query with time range in arguments for filtering. This is the base of the queries on collections. It parses two optional arguments in request: start and end, both are timestamps. QueryResource inherits it can modify the parser to make them required and add more arguments. Typical use of it is to get a collection filtered by snapshot between start and end timestamps. In the return all linked objects are populated with necessary attributes either in a list or an object (dict). Use QueryResource for filtering snapshots or other simple collections. """ default = [] arg_parser = RANGE_PARSER @require_auth def get(self, **kwargs): """Get method""" kwargs.update(self.arg_parser.parse_args()) try: return self._get(**kwargs) except Exception as e: top_logger.error("Query of summary failed. Detail: %s" % str(e)) return self.default def record_input(): """Record the name of an ingestion.""" args = INPUT_PARSER.parse_args() add(Input(name=args["name"])) class BaseIngestResource(Resource): """Base Ingestion""" @require_auth def put(self): record_input() return self.ingest() class InputResource(QueryResource): """Input""" query_class = Input @require_auth def put(self): """Record a processed input.""" record_input() commit() return "", 204 class PingResource(Resource): """Basic liveness test.""" def get(self): """Hello?""" return "pong" def configure(resources): restapi.add_resource(PingResource, "/ping") restapi.add_resource(InputResource, "/input") for (endpoint, cls) in resources.items(): restapi.add_resource(cls, endpoint)
eResearchSA/reporting-unified
unified/apis/__init__.py
Python
apache-2.0
10,242
0.000683
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the LGPLv3 or higher. ## The point of a SceneNodeDecorator is that it can be added to a SceneNode, where it then provides decorations # Decorations are functions of a SceneNodeDecorator that can be called (except for functions already defined # in SceneNodeDecorator). # \sa SceneNode class SceneNodeDecorator: def __init__(self, node = None): super().__init__() self._node = node def setNode(self, node): self._node = node def getNode(self): return self._node ## Clear all data associated with this decorator. This will be called before the decorator is removed def clear(self): pass def __deepcopy__(self, memo): raise NotImplementedError("Subclass {0} of SceneNodeDecorator should implement their own __deepcopy__() method.".format(str(self)))
thopiekar/Uranium
UM/Scene/SceneNodeDecorator.py
Python
lgpl-3.0
934
0.009636
import json from copy import deepcopy from datetime import datetime from django.core.files.storage import default_storage as storage from django.urls import reverse from unittest import mock import waffle from pyquery import PyQuery as pq from olympia import amo from olympia.addons.models import Addon, AddonUser from olympia.amo.tests.test_helpers import get_addon_file from olympia.amo.tests import addon_factory, TestCase, user_factory from olympia.devhub.tests.test_tasks import ValidatorTestCase from olympia.files.models import File, FileUpload, FileValidation from olympia.files.tests.test_models import UploadMixin from olympia.files.utils import check_xpi_info, parse_addon from olympia.reviewers.templatetags.code_manager import code_manager_url from olympia.users.models import UserProfile class TestUploadValidation(ValidatorTestCase, UploadMixin, TestCase): fixtures = ['base/users'] def setUp(self): super().setUp() self.user = UserProfile.objects.get(email='regular@mozilla.com') assert self.client.login(email=self.user.email) self.validation = { 'errors': 1, 'detected_type': 'extension', 'success': False, 'warnings': 0, 'message_tree': { 'testcases_targetapplication': { '__warnings': 0, '__errors': 1, '__messages': [], '__infos': 0, 'test_targetedapplications': { '__warnings': 0, '__errors': 1, '__messages': [], '__infos': 0, 'invalid_min_version': { '__warnings': 0, '__errors': 1, '__messages': ['d67edb08018411e09b13c42c0301fe38'], '__infos': 0, }, }, } }, 'infos': 0, 'messages': [ { 'uid': 'd67edb08018411e09b13c42c0301fe38', 'tier': 1, 'id': [ 'testcases_targetapplication', 'test_targetedapplications', 'invalid_min_version', ], 'file': 'install.rdf', 'message': 'The value of <em:id> is invalid. See ' '<a href="https://mozilla.org">mozilla.org</a> ' 'for more information', 'context': ['<em:description>...', '<foo/>'], 'type': 'error', 'line': 0, 'description': [ '<iframe>', 'Version "3.0b3" isn\'t compatible with ' '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}.', ], 'signing_help': ['<script>&amp;'], } ], 'rejected': False, } def test_only_safe_html_in_messages(self): upload = self.get_upload( abspath=get_addon_file('invalid_webextension.xpi'), user=self.user, with_validation=True, validation=json.dumps(self.validation), ) response = self.client.get( reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json']) ) assert response.status_code == 200 data = json.loads(response.content) msg = data['validation']['messages'][0] assert msg['message'] == ( 'The value of &lt;em:id&gt; is invalid. ' 'See <a href="https://mozilla.org" rel="nofollow">mozilla.org</a> ' 'for more information' ) assert msg['description'][0] == '&lt;iframe&gt;' assert msg['context'] == (['<em:description>...', '<foo/>']) def test_date_on_upload(self): upload = self.get_upload( abspath=get_addon_file('invalid_webextension.xpi'), user=self.user, with_validation=True, validation=json.dumps(self.validation), ) upload.update(created=datetime.fromisoformat('2010-12-06 14:04:46')) response = self.client.get( reverse('devhub.upload_detail', args=[upload.uuid.hex]) ) assert response.status_code == 200 doc = pq(response.content) assert doc('td').text() == 'Dec. 6, 2010' def test_upload_processed_validation_error(self): addon_file = open(get_addon_file('invalid_webextension.xpi'), 'rb') response = self.client.post( reverse('devhub.upload'), {'name': 'addon.xpi', 'upload': addon_file} ) uuid = response.url.split('/')[-2] upload = FileUpload.objects.get(uuid=uuid) assert upload.processed_validation['errors'] == 1 assert upload.processed_validation['messages'][0]['id'] == [ 'validator', 'unexpected_exception', ] def test_login_required(self): upload = self.get_upload( abspath=get_addon_file('invalid_webextension.xpi'), user=self.user, with_validation=True, validation=json.dumps(self.validation), ) url = reverse('devhub.upload_detail', args=[upload.uuid.hex]) assert self.client.head(url).status_code == 200 self.client.logout() assert self.client.head(url).status_code == 302 class TestUploadErrors(UploadMixin, TestCase): fixtures = ('base/addon_3615', 'base/users') def setUp(self): super().setUp() self.user = UserProfile.objects.get(email='regular@mozilla.com') self.client.login(email=self.user.email) @mock.patch.object(waffle, 'flag_is_active') def test_dupe_uuid(self, flag_is_active): flag_is_active.return_value = True addon = Addon.objects.get(pk=3615) data = parse_addon(self.get_upload('webextension.xpi'), user=self.user) addon.update(guid=data['guid']) dupe_xpi = self.get_upload('webextension.xpi', user=self.user) res = self.client.get( reverse('devhub.upload_detail', args=[dupe_xpi.uuid, 'json']) ) assert res.status_code == 400, res.content data = json.loads(res.content) assert data['validation']['messages'] == ( [ { 'tier': 1, 'message': 'Duplicate add-on ID found.', 'type': 'error', 'fatal': True, } ] ) assert data['validation']['ending_tier'] == 1 def test_long_uuid(self): """An add-on uuid may be more than 64 chars, see bug 1203915.""" long_guid = ( 'this_guid_is_longer_than_the_limit_of_64_chars_see_' 'bug_1201176_but_should_not_fail_see_bug_1203915@xpi' ) xpi_info = check_xpi_info({'guid': long_guid, 'version': '1.0'}) assert xpi_info['guid'] == long_guid def test_mv3_error_added(self): validation = deepcopy(amo.VALIDATOR_SKELETON_EXCEPTION_WEBEXT) validation['metadata']['manifestVersion'] = 3 xpi = self.get_upload( 'webextension_mv3.xpi', with_validation=True, validation=json.dumps(validation), user=self.user, ) res = self.client.get(reverse('devhub.upload_detail', args=[xpi.uuid, 'json'])) assert b'https://blog.mozilla.org/addons/2021/05/27/manifest-v3-update/' in ( res.content ) class TestFileValidation(TestCase): fixtures = ['base/users', 'devhub/addon-validation-1'] def setUp(self): super().setUp() assert self.client.login(email='del@icio.us') self.user = UserProfile.objects.get(email='del@icio.us') self.file_validation = FileValidation.objects.get(pk=1) self.file = self.file_validation.file with storage.open(self.file.file_path, 'wb') as f: f.write(b'<pretend this is an xpi>\n') self.addon = self.file.version.addon args = [self.addon.slug, self.file.id] self.url = reverse('devhub.file_validation', args=args) self.json_url = reverse('devhub.json_file_validation', args=args) def test_version_list(self): response = self.client.get(self.addon.get_dev_url('versions')) assert response.status_code == 200 link = pq(response.content)('td.file-validation a') assert link.text() == '0 errors, 0 warnings' assert link.attr('href') == self.url def test_results_page(self): response = self.client.get(self.url, follow=True) assert response.status_code == 200 assert response.context['addon'] == self.addon doc = pq(response.content) assert not doc('#site-nav').hasClass('app-nav'), 'Expected add-ons devhub nav' assert doc('header h2').text() == ( 'Validation Results for testaddon-20101217.xpi' ) assert doc('#addon-validator-suite').attr('data-validateurl') == (self.json_url) def test_only_dev_can_see_results(self): self.client.logout() assert self.client.login(email='regular@mozilla.com') assert self.client.head(self.url, follow=False).status_code == 403 def test_only_dev_can_see_json_results(self): self.client.logout() assert self.client.login(email='regular@mozilla.com') assert self.client.head(self.json_url, follow=False).status_code == 403 def test_reviewer_can_see_results(self): self.client.logout() assert self.client.login(email='reviewer@mozilla.com') assert self.client.head(self.url, follow=False).status_code == 200 def test_reviewer_can_see_json_results(self): self.client.logout() assert self.client.login(email='reviewer@mozilla.com') assert self.client.head(self.json_url, follow=False).status_code == 200 def test_reviewer_tools_view_can_see_results(self): self.client.logout() assert self.client.login(email='regular@mozilla.com') self.grant_permission( UserProfile.objects.get(email='regular@mozilla.com'), 'ReviewerTools:View' ) assert self.client.head(self.url, follow=False).status_code == 200 def test_reviewer_tools_view_can_see_json_results(self): self.client.logout() assert self.client.login(email='regular@mozilla.com') self.grant_permission( UserProfile.objects.get(email='regular@mozilla.com'), 'ReviewerTools:View' ) assert self.client.head(self.json_url, follow=False).status_code == 200 def test_reviewer_tools_view_can_see_json_results_incomplete_addon(self): self.addon.update(status=amo.STATUS_NULL) assert self.addon.should_redirect_to_submit_flow() self.client.logout() assert self.client.login(email='regular@mozilla.com') self.grant_permission( UserProfile.objects.get(email='regular@mozilla.com'), 'ReviewerTools:View' ) assert self.client.head(self.json_url, follow=False).status_code == 200 def test_admin_can_see_json_results_incomplete_addon(self): self.addon.update(status=amo.STATUS_NULL) assert self.addon.should_redirect_to_submit_flow() self.client.logout() assert self.client.login(email='admin@mozilla.com') assert self.client.head(self.json_url, follow=False).status_code == 200 def test_reviewer_cannot_see_files_not_validated(self): file_not_validated = File.objects.get(pk=100400) json_url = reverse( 'devhub.json_file_validation', args=[self.addon.slug, file_not_validated.id] ) self.client.logout() assert self.client.login(email='reviewer@mozilla.com') assert self.client.head(json_url, follow=False).status_code == 404 def test_developer_cant_see_results_from_other_addon(self): other_addon = addon_factory(users=[self.user]) url = reverse('devhub.file_validation', args=[other_addon.slug, self.file.id]) assert self.client.get(url, follow=False).status_code == 404 def test_developer_cant_see_json_results_from_other_addon(self): other_addon = addon_factory(users=[self.user]) url = reverse( 'devhub.json_file_validation', args=[other_addon.slug, self.file.id] ) assert self.client.get(url, follow=False).status_code == 404 def test_developer_cant_see_json_results_from_deleted_addon(self): self.addon.delete() url = reverse('devhub.json_file_validation', args=[self.addon.pk, self.file.id]) assert self.client.get(url, follow=False).status_code == 404 def test_only_safe_html_in_messages(self): response = self.client.post(self.json_url, follow=False) assert response.status_code == 200 data = json.loads(response.content) msg = data['validation']['messages'][0] assert msg['message'] == ( 'The value of &lt;em:id&gt; is invalid. ' 'See <a href="https://mozilla.org" rel="nofollow">mozilla.org</a> ' 'for more information' ) assert msg['description'][0] == '&lt;iframe&gt;' assert msg['context'] == (['<em:description>...', '<foo/>']) def test_linkify_validation_messages(self): self.file_validation.update( validation=json.dumps( { 'errors': 0, 'success': True, 'warnings': 1, 'notices': 0, 'message_tree': {}, 'messages': [ { 'context': ['<code>', None], 'description': [ 'Something something, see https://bugzilla.mozilla.org/' ], 'column': 0, 'line': 1, 'file': 'chrome/content/down.html', 'tier': 2, 'message': 'Some warning', 'type': 'warning', 'id': [], 'uid': 'bb9948b604b111e09dfdc42c0301fe38', } ], 'metadata': {}, } ) ) response = self.client.get(self.json_url, follow=False) assert response.status_code == 200 data = json.loads(response.content) doc = pq(data['validation']['messages'][0]['description'][0]) link = doc('a')[0] assert link.text == 'https://bugzilla.mozilla.org/' assert link.attrib['href'] == 'https://bugzilla.mozilla.org/' def test_file_url(self): file_url = code_manager_url( 'browse', addon_id=self.addon.pk, version_id=self.file.version.pk ) response = self.client.get(self.url, follow=False) doc = pq(response.content) assert doc('#addon-validator-suite').attr['data-file-url'] == file_url def test_reviewers_can_see_json_results_for_deleted_addon(self): self.client.logout() assert self.client.login(email='reviewer@mozilla.com') self.grant_permission( UserProfile.objects.get(email='reviewer@mozilla.com'), 'Addons:ReviewUnlisted', ) self.addon.delete() args = [self.addon.pk, self.file.id] json_url = reverse('devhub.json_file_validation', args=args) assert self.client.head(json_url, follow=False).status_code == 200 def test_unlisted_viewers_can_see_json_results_for_deleted_addon(self): unlisted_viewer = user_factory(email='unlisted_viewer@mozilla.com') self.grant_permission(unlisted_viewer, 'ReviewerTools:ViewUnlisted') self.client.logout() self.client.login(email='unlisted_viewer@mozilla.com') self.addon.versions.all()[0].update(channel=amo.RELEASE_CHANNEL_UNLISTED) self.addon.delete() args = [self.addon.pk, self.file.id] json_url = reverse('devhub.json_file_validation', args=args) assert self.client.head(json_url, follow=False).status_code == 200 class TestValidateAddon(TestCase): fixtures = ['base/users'] def setUp(self): super().setUp() assert self.client.login(email='regular@mozilla.com') def test_login_required(self): self.client.logout() response = self.client.get(reverse('devhub.validate_addon')) assert response.status_code == 302 def test_context_and_content(self): response = self.client.get(reverse('devhub.validate_addon')) assert response.status_code == 200 assert b'this tool only works with legacy' not in response.content doc = pq(response.content) assert doc('#upload-addon').attr('data-upload-url') == ( reverse('devhub.standalone_upload') ) assert doc('#upload-addon').attr('data-upload-url-listed') == ( reverse('devhub.standalone_upload') ) assert doc('#upload-addon').attr('data-upload-url-unlisted') == ( reverse('devhub.standalone_upload_unlisted') ) @mock.patch('olympia.devhub.tasks.run_addons_linter') def test_filename_not_uuidfied(self, validate_mock): validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS) url = reverse('devhub.upload') fpath = 'src/olympia/files/fixtures/files/webextension_no_id.xpi' with open(fpath, 'rb') as file_: self.client.post(url, {'upload': file_}) upload = FileUpload.objects.get() response = self.client.get( reverse('devhub.upload_detail', args=(upload.uuid.hex,)) ) assert b'Validation Results for webextension_no_id' in response.content @mock.patch('olympia.devhub.tasks.run_addons_linter') def test_upload_listed_addon(self, validate_mock): """Listed addons are not validated as "self-hosted" addons.""" validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS) url = reverse('devhub.upload') fpath = 'src/olympia/files/fixtures/files/webextension_no_id.xpi' with open(fpath, 'rb') as file_: self.client.post(url, {'upload': file_}) assert validate_mock.call_args[1]['channel'] == amo.RELEASE_CHANNEL_LISTED # No automated signing for listed add-ons. assert FileUpload.objects.get().automated_signing is False @mock.patch('olympia.devhub.tasks.run_addons_linter') def test_upload_unlisted_addon(self, validate_mock): """Unlisted addons are validated as "self-hosted" addons.""" validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS) url = reverse('devhub.upload_unlisted') fpath = 'src/olympia/files/fixtures/files/webextension_no_id.xpi' with open(fpath, 'rb') as file_: self.client.post(url, {'upload': file_}) assert validate_mock.call_args[1]['channel'] == amo.RELEASE_CHANNEL_UNLISTED # Automated signing enabled for unlisted add-ons. assert FileUpload.objects.get().automated_signing is True class TestUploadURLs(TestCase): fixtures = ('base/users',) def setUp(self): super().setUp() user = UserProfile.objects.get(email='regular@mozilla.com') self.client.login(email='regular@mozilla.com') self.addon = Addon.objects.create( guid='thing@stuff', slug='thing-stuff', status=amo.STATUS_APPROVED ) AddonUser.objects.create(addon=self.addon, user=user) self.run_addons_linter = self.patch('olympia.devhub.tasks.run_addons_linter') self.run_addons_linter.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS) self.parse_addon = self.patch('olympia.devhub.utils.parse_addon') self.parse_addon.return_value = { 'guid': self.addon.guid, 'version': '1.0', } def patch(self, *args, **kw): patcher = mock.patch(*args, **kw) self.addCleanup(patcher.stop) return patcher.start() def expect_validation(self, listed, automated_signing): call_keywords = self.run_addons_linter.call_args[1] channel = amo.RELEASE_CHANNEL_LISTED if listed else amo.RELEASE_CHANNEL_UNLISTED assert call_keywords['channel'] == channel assert self.file_upload.automated_signing == automated_signing def upload(self, view, **kw): """Send an upload request to the given view, and save the FileUpload object to self.file_upload.""" FileUpload.objects.all().delete() self.run_addons_linter.reset_mock() fpath = 'src/olympia/files/fixtures/files/webextension_validation_error.zip' with open(fpath, 'rb') as file_: resp = self.client.post(reverse(view, kwargs=kw), {'upload': file_}) assert resp.status_code == 302 self.file_upload = FileUpload.objects.get() def upload_addon(self, status=amo.STATUS_APPROVED, listed=True): """Update the test add-on with the given flags and send an upload request for it.""" self.change_channel_for_addon(self.addon, listed=listed) self.addon.update(status=status) channel_text = 'listed' if listed else 'unlisted' return self.upload( 'devhub.upload_for_version', channel=channel_text, addon_id=self.addon.slug ) def test_upload_standalone(self): """Test that the standalone upload URLs result in file uploads with the correct flags.""" self.upload('devhub.standalone_upload') self.expect_validation(listed=True, automated_signing=False) self.upload('devhub.standalone_upload_unlisted'), self.expect_validation(listed=False, automated_signing=True) def test_upload_submit(self): """Test that the add-on creation upload URLs result in file uploads with the correct flags.""" self.upload('devhub.upload') self.expect_validation(listed=True, automated_signing=False) self.upload('devhub.upload_unlisted'), self.expect_validation(listed=False, automated_signing=True) def test_upload_addon_version(self): """Test that the add-on update upload URLs result in file uploads with the correct flags.""" for status in amo.VALID_ADDON_STATUSES: self.upload_addon(listed=True, status=status) self.expect_validation(listed=True, automated_signing=False) self.upload_addon(listed=False, status=amo.STATUS_APPROVED) self.expect_validation(listed=False, automated_signing=True)
mozilla/addons-server
src/olympia/devhub/tests/test_views_validation.py
Python
bsd-3-clause
22,989
0.000957
""" Visualize Genetic Algorithm to find the shortest path for travel sales problem. Visit my tutorial website for more: https://morvanzhou.github.io/tutorials/ """ import matplotlib.pyplot as plt import numpy as np START_POINT = list(input("請輸入起始點")) PASS_POINT = input("輸入要經過的點") PASS_POINT = PASS_POINT.split(" ") # START_POINT = [17] # PASS_POINT = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16] START_POINT.extend(PASS_POINT) N_CITIES = len(START_POINT) # DNA size CROSS_RATE = 0.1 MUTATE_RATE = 0.01 POP_SIZE = 50000 N_GENERATIONS = 5000 class GA(object): def __init__(self, DNA_size, cross_rate, mutation_rate, pop_size,start_point ): self.DNA_size = DNA_size self.cross_rate = cross_rate self.mutate_rate = mutation_rate self.pop_size = pop_size self.start_point = start_point.pop(0) self.pass_point = start_point self.pop = np.vstack([np.hstack(( self.start_point,np.random.permutation(start_point))).astype(np.int64) for _ in range(pop_size)]) def translateDNA(self, DNA, city_position): # get cities' coord in order line_x = np.empty_like(DNA, dtype=np.float64) line_y = np.empty_like(DNA, dtype=np.float64) for i, d in enumerate(DNA): city_coord = city_position[d] line_x[i, :] = city_coord[:, 0] line_y[i, :] = city_coord[:, 1] return line_x, line_y def get_fitness(self, line_x, line_y): total_distance = np.empty((line_x.shape[0],), dtype=np.float64) for i, (xs, ys) in enumerate(zip(line_x, line_y)): total_distance[i] = np.sum(np.sqrt(np.square(np.diff(xs)) + np.square(np.diff(ys)))) fitness = np.exp(self.DNA_size * 2 / total_distance) return fitness, total_distance def select(self, fitness): idx = np.random.choice(np.arange(self.pop_size), size=self.pop_size, replace=True, p=fitness / fitness.sum()) return self.pop[idx] def crossover(self, parent, pop): if np.random.rand() < self.cross_rate: i_ = np.random.randint(0, self.pop_size, size=1) # select another individual from pop cross_points = np.hstack((False, np.random.randint(0, 2, self.DNA_size).astype(np.bool))) # choose crossover points keep_city = parent[~cross_points] # find the city number swap_city = np.setdiff1d(pop[i_, :], keep_city) parent[:] = np.concatenate((keep_city, swap_city)) return parent def mutate(self, child): for point in range(1,self.DNA_size): if np.random.rand() < self.mutate_rate: swap_point = np.random.randint(1, self.DNA_size) swapA, swapB = child[point], child[swap_point] child[point], child[swap_point] = swapB, swapA return child def evolve(self, fitness): pop = self.select(fitness) pop_copy = pop.copy() for parent in pop: # for every parent child = self.crossover(parent, pop_copy) child = self.mutate(child) parent[:] = child self.pop = pop class TravelSalesPerson(object): def __init__(self, n_cities): self.city_position = np.array([[ 0.36774816, 0.48556132], [0.36641813, 0.0957464], [ 0.51269409, 0.60941519], [0.00644122, 0.55532349], [0.6503008 , 0.35550922], [ 0.15919575, 0.72421738], [0.01457005 , 0.76355109], [ 0.08077499, 0.1413901], [0.45753614, 0.04607823], [0.39487359, 0.55118165], [0.75402671 , 0.40564417], [0.61979506, 0.91658641], [ 0.08871058 , 0.4], [0.03156203, 0.05129652], [0.13118489, 0.80425415], [0.96021151, 0.69831614], [0.6, 0.6], [0.82265218, 0.81566013], [0.326443 , 0.98269306], [0.2298539,0.27029802] ]) # self.city_position = np.random.rand(n_cities, 2) plt.ion() def plotting(self, lx, ly, total_d): plt.cla() color = np.linspace(0,1,20) plt.scatter(self.city_position[:, 0].T, self.city_position[:, 1].T, s=100, c=color) plt.plot(lx.T, ly.T, 'k--',) plt.text(-0.05, -0.05, "Total distance=%.2f" % total_d, fontdict={'size': 20, 'color': 'red'}) plt.xlim((-0.1, 1.1)) plt.ylim((-0.1, 1.1)) plt.pause(0.0000000000000000000000000000001) ga = GA(DNA_size=len(PASS_POINT), cross_rate=CROSS_RATE, mutation_rate=MUTATE_RATE, pop_size=POP_SIZE,start_point=START_POINT) env = TravelSalesPerson(N_CITIES) for generation in range(N_GENERATIONS): lx, ly = ga.translateDNA(ga.pop, env.city_position) fitness, total_distance = ga.get_fitness(lx, ly) ga.evolve(fitness) best_idx = np.argmax(fitness) l_pop = ga.pop.tolist() xxx = l_pop[best_idx] print(xxx) print('Gen:', generation, '| best fit: %.2f' % fitness[best_idx], ) env.plotting(lx[best_idx], ly[best_idx], total_distance[best_idx]) plt.ioff() plt.show()
zhu913104/KMdriod
gapathplanning.py
Python
mit
5,686
0.006184
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file '/Users/chernomirdinmacuvele/Documents/workspace/PscArt2.0.X/UserInt/ui_codificadores_POT.ui' # # Created by: PyQt5 UI code generator 5.8.2 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Form(object): def setupUi(self, Form): Form.setObjectName("Form") Form.resize(306, 332) self.gridLayout = QtWidgets.QGridLayout(Form) self.gridLayout.setObjectName("gridLayout") self.label = QtWidgets.QLabel(Form) self.label.setObjectName("label") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.LECodigo = QtWidgets.QLineEdit(Form) self.LECodigo.setMaxLength(3) self.LECodigo.setObjectName("LECodigo") self.gridLayout.addWidget(self.LECodigo, 0, 1, 1, 1) self.label_3 = QtWidgets.QLabel(Form) self.label_3.setObjectName("label_3") self.gridLayout.addWidget(self.label_3, 1, 0, 1, 1) self.LENome = QtWidgets.QLineEdit(Form) self.LENome.setMaxLength(15) self.LENome.setObjectName("LENome") self.gridLayout.addWidget(self.LENome, 1, 1, 1, 1) self.label_4 = QtWidgets.QLabel(Form) self.label_4.setObjectName("label_4") self.gridLayout.addWidget(self.label_4, 2, 0, 1, 1) self.PTEDescricao = QtWidgets.QPlainTextEdit(Form) self.PTEDescricao.setObjectName("PTEDescricao") self.gridLayout.addWidget(self.PTEDescricao, 2, 1, 1, 1) self.label_5 = QtWidgets.QLabel(Form) self.label_5.setObjectName("label_5") self.gridLayout.addWidget(self.label_5, 3, 0, 1, 1) self.PTEComentarios = QtWidgets.QPlainTextEdit(Form) self.PTEComentarios.setObjectName("PTEComentarios") self.gridLayout.addWidget(self.PTEComentarios, 3, 1, 1, 1) self.CHBActivo = QtWidgets.QCheckBox(Form) self.CHBActivo.setObjectName("CHBActivo") self.gridLayout.addWidget(self.CHBActivo, 4, 1, 1, 1) self.splitter = QtWidgets.QSplitter(Form) self.splitter.setOrientation(QtCore.Qt.Horizontal) self.splitter.setObjectName("splitter") self.PBGuardar = QtWidgets.QPushButton(self.splitter) self.PBGuardar.setText("") icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/002-save.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.PBGuardar.setIcon(icon) self.PBGuardar.setObjectName("PBGuardar") self.PBCancelar = QtWidgets.QPushButton(self.splitter) self.PBCancelar.setText("") icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/003-error.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.PBCancelar.setIcon(icon1) self.PBCancelar.setObjectName("PBCancelar") self.gridLayout.addWidget(self.splitter, 5, 0, 1, 2) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): _translate = QtCore.QCoreApplication.translate Form.setWindowTitle(_translate("Form", "Codificador")) self.label.setText(_translate("Form", "Codigo:")) self.LECodigo.setPlaceholderText(_translate("Form", "Ex:AAA")) self.label_3.setText(_translate("Form", "Nome:")) self.LENome.setPlaceholderText(_translate("Form", "Ex:Qualquer Coisa")) self.label_4.setText(_translate("Form", "Descricao:")) self.PTEDescricao.setPlaceholderText(_translate("Form", "Ex:O que faz…")) self.label_5.setText(_translate("Form", "Comentarios:")) self.PTEComentarios.setPlaceholderText(_translate("Form", "Ex:Nota, Obs…")) self.CHBActivo.setText(_translate("Form", "Activo")) import icons_rc
InUrSys/PescArt2.0
GeneratedFiles/ui_codificadores_POT.py
Python
gpl-3.0
3,813
0.0021
import logging import re import socket import binascii import sys import os import time import gevent import subprocess import atexit from Config import config from Crypt import CryptRsa from Site import SiteManager from lib.PySocks import socks from gevent.coros import RLock from util import helper from Debug import Debug class TorManager: def __init__(self, fileserver_ip=None, fileserver_port=None): self.privatekeys = {} # Onion: Privatekey self.site_onions = {} # Site address: Onion self.tor_exe = "tools/tor/tor.exe" self.tor_process = None self.log = logging.getLogger("TorManager") self.start_onions = None self.conn = None self.lock = RLock() if config.tor == "disable": self.enabled = False self.start_onions = False self.status = "Disabled" else: self.enabled = True self.status = "Waiting" if fileserver_port: self.fileserver_port = fileserver_port else: self.fileserver_port = config.fileserver_port self.ip, self.port = config.tor_controller.split(":") self.port = int(self.port) self.proxy_ip, self.proxy_port = config.tor_proxy.split(":") self.proxy_port = int(self.proxy_port) # Test proxy port if config.tor != "disable": try: assert self.connect(), "No connection" self.log.debug("Tor proxy port %s check ok" % config.tor_proxy) except Exception, err: self.log.debug("Tor proxy port %s check error: %s" % (config.tor_proxy, err)) self.enabled = False # Change to self-bundled Tor ports from lib.PySocks import socks self.port = 49051 self.proxy_port = 49050 socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) if os.path.isfile(self.tor_exe): # Already, downloaded: sync mode self.startTor() else: # Not downloaded yet: Async mode gevent.spawn(self.startTor) def startTor(self): if sys.platform.startswith("win"): try: if not os.path.isfile(self.tor_exe): self.downloadTor() self.log.info("Starting Tor client %s..." % self.tor_exe) tor_dir = os.path.dirname(self.tor_exe) self.tor_process = subprocess.Popen(r"%s -f torrc" % self.tor_exe, cwd=tor_dir, close_fds=True) for wait in range(1,10): # Wait for startup time.sleep(wait * 0.5) self.enabled = True if self.connect(): break # Terminate on exit atexit.register(self.stopTor) except Exception, err: self.log.error("Error starting Tor client: %s" % Debug.formatException(err)) self.enabled = False return False def stopTor(self): self.log.debug("Stopping...") self.tor_process.terminate() def downloadTor(self): self.log.info("Downloading Tor...") # Check Tor webpage for link download_page = helper.httpRequest("https://www.torproject.org/download/download.html").read() download_url = re.search('href="(.*?tor.*?win32.*?zip)"', download_page).group(1) if not download_url.startswith("http"): download_url = "https://www.torproject.org/download/" + download_url # Download Tor client self.log.info("Downloading %s" % download_url) data = helper.httpRequest(download_url, as_file=True) data_size = data.tell() # Handle redirect if data_size < 1024 and "The document has moved" in data.getvalue(): download_url = re.search('href="(.*?tor.*?win32.*?zip)"', data.getvalue()).group(1) data = helper.httpRequest(download_url, as_file=True) data_size = data.tell() if data_size > 1024: import zipfile zip = zipfile.ZipFile(data) self.log.info("Unpacking Tor") for inner_path in zip.namelist(): if ".." in inner_path: continue dest_path = inner_path dest_path = re.sub("^Data/Tor/", "tools/tor/data/", dest_path) dest_path = re.sub("^Data/", "tools/tor/data/", dest_path) dest_path = re.sub("^Tor/", "tools/tor/", dest_path) dest_dir = os.path.dirname(dest_path) if dest_dir and not os.path.isdir(dest_dir): os.makedirs(dest_dir) if dest_dir != dest_path.strip("/"): data = zip.read(inner_path) if not os.path.isfile(dest_path): open(dest_path, 'wb').write(data) else: self.log.error("Bad response from server: %s" % data.getvalue()) return False def connect(self): if not self.enabled: return False self.site_onions = {} self.privatekeys = {} if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one conn = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) else: conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.log.debug("Connecting to %s:%s" % (self.ip, self.port)) try: with self.lock: conn.connect((self.ip, self.port)) res_protocol = self.send("PROTOCOLINFO", conn) version = re.search('Tor="([0-9\.]+)"', res_protocol).group(1) # Version 0.2.7.5 required because ADD_ONION support assert int(version.replace(".", "0")) >= 20705, "Tor version >=0.2.7.5 required" # Auth cookie file cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol) if cookie_match: cookie_file = cookie_match.group(1) auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read()) res_auth = self.send("AUTHENTICATE %s" % auth_hex, conn) else: res_auth = self.send("AUTHENTICATE", conn) assert "250 OK" in res_auth, "Authenticate error %s" % res_auth self.status = "Connected (%s)" % res_auth self.conn = conn except Exception, err: self.conn = None self.status = "Error (%s)" % err self.log.error("Tor controller connect error: %s" % err) self.enabled = False return self.conn def disconnect(self): self.conn.close() self.conn = None def startOnions(self): self.log.debug("Start onions") self.start_onions = True # Get new exit node ip def resetCircuits(self): res = self.request("SIGNAL NEWNYM") if "250 OK" not in res: self.status = "Reset circuits error (%s)" % res self.log.error("Tor reset circuits error: %s" % res) def addOnion(self): res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port) match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL) if match: onion_address, onion_privatekey = match.groups() self.privatekeys[onion_address] = onion_privatekey self.status = "OK (%s onion running)" % len(self.privatekeys) SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) return onion_address else: self.status = "AddOnion error (%s)" % res self.log.error("Tor addOnion error: %s" % res) return False def delOnion(self, address): res = self.request("DEL_ONION %s" % address) if "250 OK" in res: del self.privatekeys[address] self.status = "OK (%s onion running)" % len(self.privatekeys) return True else: self.status = "DelOnion error (%s)" % res self.log.error("Tor delOnion error: %s" % res) self.disconnect() return False def request(self, cmd): with self.lock: if not self.enabled: return False if not self.conn: if not self.connect(): return "" return self.send(cmd) def send(self, cmd, conn=None): if not conn: conn = self.conn self.log.debug("> %s" % cmd) conn.send("%s\r\n" % cmd) back = conn.recv(1024 * 64) self.log.debug("< %s" % back.strip()) return back def getPrivatekey(self, address): return self.privatekeys[address] def getPublickey(self, address): return CryptRsa.privatekeyToPublickey(self.privatekeys[address]) def getOnion(self, site_address): with self.lock: if not self.enabled: return None if self.start_onions: # Different onion for every site onion = self.site_onions.get(site_address) else: # Same onion for every site onion = self.site_onions.get("global") site_address = "global" if not onion: self.site_onions[site_address] = self.addOnion() onion = self.site_onions[site_address] self.log.debug("Created new hidden service for %s: %s" % (site_address, onion)) return onion def createSocket(self, onion, port): if not self.enabled: return False self.log.debug("Creating new socket to %s:%s" % (onion, port)) if config.tor == "always": # Every socket is proxied by default sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((onion, int(port))) else: sock = socks.socksocket() sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port) sock.connect((onion, int(port))) return sock
bashrc/zeronet-debian
src/src/Tor/TorManager.py
Python
gpl-2.0
10,295
0.001651
from django.contrib import admin from .models import ( Group, GroupMembership, ) class GroupAdmin(admin.ModelAdmin): list_display = ('name', 'group_type', 'active', 'sequence') list_filter = ('active',) search_fields = ('name',) class GroupMembershipAdmin(admin.ModelAdmin): list_display = ('group', 'user', 'added', 'sequence') admin_list = [ (Group, GroupAdmin), (GroupMembership, GroupMembershipAdmin), ] [admin.site.register(*t) for t in admin_list]
ubiquitypress/rua
src/manager/admin.py
Python
gpl-2.0
496
0
# -*- coding: utf-8 -*- { 'name': 'SaaS Portal Asynchronous database creation', 'version': '1.0.0', 'author': 'IT-Projects LLC', "support": "apps@it-projects.info", 'website': "https://it-projects.info", 'license': 'GPL-3', 'category': 'SaaS', 'depends': [ 'base', 'saas_portal', 'connector', ], 'installable': False, 'application': False, 'data': [ 'views/wizard.xml', ], }
thinkopensolutions/odoo-saas-tools
saas_portal_async/__openerp__.py
Python
lgpl-3.0
459
0
def check_resource_count(expected_count): test.assertEqual(expected_count, len(reality.all_resources())) example_template = Template({ 'A': RsrcDef({}, []), 'B': RsrcDef({'a': '4alpha'}, ['A']), 'C': RsrcDef({'a': 'foo'}, ['B']), 'D': RsrcDef({'a': 'bar'}, ['C']), }) engine.create_stack('foo', example_template) engine.noop(1) example_template2 = Template({ 'A': RsrcDef({}, []), 'B': RsrcDef({'a': '4alpha'}, ['A']), 'C': RsrcDef({'a': 'blarg'}, ['B']), 'D': RsrcDef({'a': 'wibble'}, ['C']), }) engine.update_stack('foo', example_template2) engine.call(check_resource_count, 2) engine.noop(11) engine.call(verify, example_template2)
zaneb/heat-convergence-prototype
scenarios/update_interrupt_create.py
Python
apache-2.0
673
0.001486
#!/usr/bin/env python3 import argparse import struct import sys SPC_START_OFFSET = 0x100 SPC_RAM_SIZE = 0x10000 INST_TBL = 0x6C00 INST_ENTRY_LEN = 0x6 SAMPLE_TBL = 0x6D00 SAMPLE_ENTRY_LEN = 0x4 SAMPLE_MAX_ID = 0x4F # completely arbitrary limit class InstrEntry (object): srcn = None adsr = None gain = None pitch_adj = None @classmethod def decode (cls, entry): u = struct.unpack("<BHBH", entry) return cls(srcn=u[0], adsr=u[1], gain=u[2], pitch_adj=u[3]) def __init__ (self, **kwargs): self.__dict__.update(kwargs) def encode (self): return struct.pack("<BHBH", self.srcn, self.adsr, self.gain, self.pitch_adj) def __str__ (self): m = "InstrEntry<srcn={0:02X} adsr={1:04X} gain={2:02X} pitch_adj={3:04X}" return m.format(self.srcn, self.adsr, self.gain, self.pitch_adj) def parse_fp (f): ram = f.read(SPC_START_OFFSET) ram = f.read(SPC_RAM_SIZE) signatures = [] ptr = INST_TBL for inst in range(0x2a): entry = InstrEntry.decode(ram[ptr:ptr+INST_ENTRY_LEN]) ptr += INST_ENTRY_LEN if (0 <= entry.srcn <= SAMPLE_MAX_ID): signatures.append(entry) return signatures def dump_signature (sig_ary, fn=print): for i, v in enumerate(sig_ary): fn("{0:2X}: {1}".format(i, str(v))) return def main (args, prog='samplecheck'): p = argparse.ArgumentParser() p.add_argument("SPC", help="The SPC file to fingerprint") args = p.parse_args() with open(args.SPC, "rb") as f: dump_signature(parse_fp(f)) if __name__ == "__main__": main(sys.argv[1:], sys.argv[0])
softglow/samplecheck
samplecheck.py
Python
gpl-3.0
1,713
0.008757
# Generated by Django 3.0.4 on 2020-11-05 22:20 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('wagtaildocs', '0010_document_file_hash'), ('pages', '0005_auto_20201105_1414'), ] operations = [ migrations.AlterField( model_name='llphpage', name='book_cover', field=models.ForeignKey(blank=True, help_text='The book cover to be shown on the website.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtaildocs.Document'), ), ]
openstax/openstax-cms
pages/migrations/0006_auto_20201105_1620.py
Python
agpl-3.0
631
0.001585
from ipv8.requestcache import RandomNumberCache from tribler_core.utilities.unicode import hexlify class SearchRequestCache(RandomNumberCache): """ This request cache keeps track of all outstanding search requests within the GigaChannelCommunity. """ def __init__(self, request_cache, uuid, peers): super(SearchRequestCache, self).__init__(request_cache, u"remote-search-request") self.request_cache = request_cache self.requested_peers = {hexlify(peer.mid): False for peer in peers} self.uuid = uuid @property def timeout_delay(self): return 30.0 def on_timeout(self): pass def process_peer_response(self, peer): """ Returns whether to process this response from the given peer in the community. If the peer response has already been processed then it is skipped. Moreover, if all the responses from the expected peers are received, the request is removed from the request cache. :param peer: Peer :return: True if peer has not been processed before, else False """ mid = hexlify(peer.mid) if mid in self.requested_peers and not self.requested_peers[mid]: self.requested_peers[mid] = True # Check if all expected responses are received if all(self.requested_peers.values()): self.remove_request() return True return False def remove_request(self): if self.request_cache.has(self.prefix, self.number): try: self.request_cache.pop(self.prefix, self.number) except KeyError: pass
hbiyik/tribler
src/tribler-core/tribler_core/modules/metadata_store/community/request.py
Python
lgpl-3.0
1,680
0.002381
# Copyright 2016 Euclidean Technologies Management LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys import numpy as np import tensorflow as tf from deep_nn_model import DeepNNModel from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops class DeepMlpModel(DeepNNModel): """ A Deep MLP Model that supports a mult-class output with an arbitrary number of fixed width hidden layers. """ def __init__(self, num_layers, num_inputs, num_hidden, num_outputs, num_unrollings, max_grad_norm=5.0, hidden_dropout=True, input_dropout=False, skip_connections=False, embedding_size=0, optimizer='gd'): """ Initialize the model Args: num_layers: number of hidden layers num_inputs: number input units. this should be less than or or equal to width of feature data in the data file num_hidden: number of hidden units in each hidden layer num_unrollings: the size of the time window processed in each step (see step() function below) batch_size: the size of the data batch processed in each step max_grad_norm: max gardient norm size for gradient clipping input_dropout: perform dropout on input layer """ self._num_unrollings = num_unrollings self._num_inputs = num_inputs total_input_size = num_unrollings * num_inputs batch_size = self._batch_size = tf.placeholder(tf.int32, shape=[]) self._seq_lengths = tf.placeholder(tf.int64, shape=[None]) self._keep_prob = tf.placeholder(tf.float32, shape=[]) self._inputs = list() self._targets = list() self._train_mask = list() # Weights for loss functions per example self._valid_mask = list() # Weights for loss functions per example for _ in range(num_unrollings): self._inputs.append( tf.placeholder(tf.float32, shape=[None,num_inputs]) ) self._targets.append( tf.placeholder(tf.float32, shape=[None,num_outputs]) ) self._train_mask.append(tf.placeholder(tf.float32, shape=[None])) self._valid_mask.append(tf.placeholder(tf.float32, shape=[None])) inputs = tf.reverse_sequence(tf.concat( self._inputs, 1 ), self._seq_lengths*num_inputs, seq_axis=1,batch_axis=0) if input_dropout is True: inputs = self._input_dropout(inputs) num_prev = total_input_size outputs = inputs if embedding_size > 0: time_weights = tf.get_variable("t_weights",[num_unrollings,embedding_size,1]) feature_weights = tf.get_variable("f_weights",[1,embedding_size,num_inputs]) embedding_weights = tf.reshape( time_weights*feature_weights, [num_unrollings*num_inputs, embedding_size] ) biases = tf.get_variable("embedding_biases",[embedding_size]) outputs = tf.nn.relu(tf.nn.xw_plus_b(inputs,embedding_weights,biases)) num_prev = embedding_size for i in range(num_layers): weights = tf.get_variable("hidden_w_%d"%i,[num_prev, num_hidden]) biases = tf.get_variable("hidden_b_%d"%i,[num_hidden]) outputs = tf.nn.relu(tf.nn.xw_plus_b(outputs, weights, biases)) if hidden_dropout is True: outputs = tf.nn.dropout(outputs, self._keep_prob) num_prev = num_hidden if skip_connections is True: num_prev = num_inputs+num_prev skip_inputs = tf.slice(inputs, [0, 0], [batch_size, num_inputs] ) outputs = tf.concat( [ skip_inputs, outputs], 1) softmax_b = tf.get_variable("softmax_b", [num_outputs]) softmax_w = tf.get_variable("softmax_w", [num_prev, num_outputs]) logits = tf.nn.xw_plus_b(outputs, softmax_w, softmax_b) targets = tf.unstack(tf.reverse_sequence(tf.reshape( tf.concat(self._targets, 1),[batch_size,num_unrollings,num_outputs] ), self._seq_lengths,seq_axis=1,batch_axis=0),axis=1)[0] agg_loss = tf.nn.softmax_cross_entropy_with_logits(labels=targets,logits=logits) train_mask = tf.unstack(tf.reverse_sequence(tf.transpose( tf.reshape( tf.concat(self._train_mask, 0 ), [num_unrollings, batch_size] ) ), self._seq_lengths,seq_axis=1,batch_axis=0),axis=1)[0] valid_mask = tf.unstack(tf.reverse_sequence(tf.transpose( tf.reshape( tf.concat(self._valid_mask, 0), [num_unrollings, batch_size] ) ), self._seq_lengths,seq_axis=1,batch_axis=0),axis=1)[0] train_loss = tf.multiply(agg_loss, train_mask) valid_loss = tf.multiply(agg_loss, valid_mask) self._loss = self._train_loss = train_loss self._valid_loss = valid_loss self._train_evals = tf.reduce_sum( train_mask ) self._valid_evals = tf.reduce_sum( valid_mask ) self._train_cst = tf.reduce_sum( train_loss ) self._valid_cst = tf.reduce_sum( valid_loss ) self._predictions = tf.nn.softmax(logits) self._class_predictions = tf.one_hot(tf.argmax(self._predictions,1), num_outputs, axis=-1) accy = tf.multiply(self._class_predictions, targets) train_accy = tf.multiply(accy,tf.reshape(train_mask, shape=[batch_size,1])) valid_accy = tf.multiply(accy,tf.reshape(valid_mask, shape=[batch_size,1])) self._train_accy = tf.reduce_sum( train_accy ) self._valid_accy = tf.reduce_sum( valid_accy ) self._cost = self._train_cst self._accy = self._train_accy self._evals = self._train_evals self._batch_cst = self._train_cst / (self._train_evals + 1.0) # here is the learning part of the graph tvars = tf.trainable_variables() grads = tf.gradients(self._batch_cst,tvars) if (max_grad_norm > 0): grads, _ = tf.clip_by_global_norm(grads,max_grad_norm) self._lr = tf.Variable(0.0, trainable=False) optim = None if optimizer == 'gd': optim = tf.train.GradientDescentOptimizer(self._lr) elif optimizer == 'adagrad': optim = tf.train.AdagradOptimizer(self._lr) elif optimizer == 'adam': optim = tf.train.AdamOptimizer(self._lr) elif optimizer == 'mo': optim = tf.train.MomentumOptimizer(self._lr) else: raise RuntimeError("Unknown optimizer = %s"%optimizer) self._train_op = optim.apply_gradients(zip(grads, tvars)) def _input_dropout(self,inputs): # This implementation of dropout dropouts an entire feature along the time dim random_tensor = self._keep_prob random_tensor += random_ops.random_uniform([self._batch_size,self._num_inputs], dtype=inputs.dtype) random_tensor = tf.tile(random_tensor,[1,self._num_unrollings]) binary_tensor = math_ops.floor(random_tensor) ret = math_ops.div(inputs, self._keep_prob) * binary_tensor ret.set_shape(inputs.get_shape()) return ret
euclidjda/dnn-quant
scripts/deep_mlp_model.py
Python
apache-2.0
7,953
0.020118
# Copyright (c) 2015-2018 Cisco Systems, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from molecule.model import schema_v2 def test_base_config(_config): assert {} == schema_v2.validate(_config)
metacloud/molecule
test/unit/model/v2/test_schema.py
Python
mit
1,235
0
from form_views import *
jittat/ku-eng-direct-admission
application/views/__init__.py
Python
agpl-3.0
25
0
import TCP import Steering import Motor import time """ add print "right speed: " + str(right_speed) print "left speed: " + str(left_speed) under def receive_message(self, type, message): if (type == "Gyro" and self.stop == False): and comment out self.motors.set_right_speed(right_speed) self.motors.set_left_speed(left_speed) too test with driving the Motor class """ autoTTCommunication = TCP.AutoTTCommunication(12345) trip_meter = Motor.TripMeter() motors = Motor.Motor(trip_meter) steering = Steering.SteeringWithIOSGyro(motors) autoTTCommunication.gyro_recv = steering autoTTCommunication.stop_cont_recv = steering autoTTCommunication.send_message("Gyro", "0.2") time.sleep(2) autoTTCommunication.send_message("Gyro", "0.2") autoTTCommunication.send_message("Gyro", "0.2") while True: time.sleep(5)
task123/AutoTT
scriptsForTesting/steeringTest.py
Python
mit
823
0.001215
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """ This module implements more advanced transformations. """ import logging import math import warnings from fractions import Fraction from itertools import groupby, product from math import gcd from string import ascii_lowercase from typing import Dict, Optional import numpy as np from monty.dev import requires from monty.fractions import lcm from monty.json import MSONable from pymatgen.analysis.adsorption import AdsorbateSiteFinder from pymatgen.analysis.bond_valence import BVAnalyzer from pymatgen.analysis.energy_models import SymmetryModel from pymatgen.analysis.ewald import EwaldSummation from pymatgen.analysis.gb.grain import GrainBoundaryGenerator from pymatgen.analysis.local_env import MinimumDistanceNN from pymatgen.analysis.structure_matcher import SpinComparator, StructureMatcher from pymatgen.analysis.structure_prediction.substitution_probability import ( SubstitutionPredictor, ) from pymatgen.command_line.enumlib_caller import EnumError, EnumlibAdaptor from pymatgen.command_line.mcsqs_caller import run_mcsqs from pymatgen.core.periodic_table import DummySpecies, Element, Species, get_el_sp from pymatgen.core.structure import Structure from pymatgen.core.surface import SlabGenerator from pymatgen.electronic_structure.core import Spin from pymatgen.io.ase import AseAtomsAdaptor from pymatgen.symmetry.analyzer import SpacegroupAnalyzer from pymatgen.transformations.standard_transformations import ( OrderDisorderedStructureTransformation, SubstitutionTransformation, SupercellTransformation, ) from pymatgen.transformations.transformation_abc import AbstractTransformation try: import hiphive # type: ignore except ImportError: hiphive = None __author__ = "Shyue Ping Ong, Stephen Dacek, Anubhav Jain, Matthew Horton, " "Alex Ganose" logger = logging.getLogger(__name__) class ChargeBalanceTransformation(AbstractTransformation): """ This is a transformation that disorders a structure to make it charge balanced, given an oxidation state-decorated structure. """ def __init__(self, charge_balance_sp): """ Args: charge_balance_sp: specie to add or remove. Currently only removal is supported """ self.charge_balance_sp = str(charge_balance_sp) def apply_transformation(self, structure): """ Applies the transformation. Args: structure: Input Structure Returns: Charge balanced structure. """ charge = structure.charge specie = get_el_sp(self.charge_balance_sp) num_to_remove = charge / specie.oxi_state num_in_structure = structure.composition[specie] removal_fraction = num_to_remove / num_in_structure if removal_fraction < 0: raise ValueError("addition of specie not yet supported by " "ChargeBalanceTransformation") trans = SubstitutionTransformation({self.charge_balance_sp: {self.charge_balance_sp: 1 - removal_fraction}}) return trans.apply_transformation(structure) def __str__(self): return "Charge Balance Transformation : " + "Species to remove = {}".format(str(self.charge_balance_sp)) def __repr__(self): return self.__str__() @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: False""" return False class SuperTransformation(AbstractTransformation): """ This is a transformation that is inherently one-to-many. It is constructed from a list of transformations and returns one structure for each transformation. The primary use for this class is extending a transmuter object. """ def __init__(self, transformations, nstructures_per_trans=1): """ Args: transformations ([transformations]): List of transformations to apply to a structure. One transformation is applied to each output structure. nstructures_per_trans (int): If the transformations are one-to-many and, nstructures_per_trans structures from each transformation are added to the full list. Defaults to 1, i.e., only best structure. """ self._transformations = transformations self.nstructures_per_trans = nstructures_per_trans def apply_transformation(self, structure, return_ranked_list=False): """ Applies the transformation. Args: structure: Input Structure return_ranked_list: Number of structures to return. Returns: Structures with all transformations applied. """ if not return_ranked_list: raise ValueError("SuperTransformation has no single best structure" " output. Must use return_ranked_list") structures = [] for t in self._transformations: if t.is_one_to_many: for d in t.apply_transformation(structure, return_ranked_list=self.nstructures_per_trans): d["transformation"] = t structures.append(d) else: structures.append( { "transformation": t, "structure": t.apply_transformation(structure), } ) return structures def __str__(self): return "Super Transformation : Transformations = " + "{}".format( " ".join([str(t) for t in self._transformations]) ) def __repr__(self): return self.__str__() @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True class MultipleSubstitutionTransformation: """ Performs multiple substitutions on a structure. For example, can do a fractional replacement of Ge in LiGePS with a list of species, creating one structure for each substitution. Ordering is done using a dummy element so only one ordering must be done per substitution oxidation state. Charge balancing of the structure is optionally performed. .. note:: There are no checks to make sure that removal fractions are possible and rounding may occur. Currently charge balancing only works for removal of species. """ def __init__( self, sp_to_replace, r_fraction, substitution_dict, charge_balance_species=None, order=True, ): """ Performs multiple fractional substitutions on a transmuter. Args: sp_to_replace: species to be replaced r_fraction: fraction of that specie to replace substitution_dict: dictionary of the format {2: ["Mg", "Ti", "V", "As", "Cr", "Ta", "N", "Nb"], 3: ["Ru", "Fe", "Co", "Ce", "As", "Cr", "Ta", "N", "Nb"], 4: ["Ru", "V", "Cr", "Ta", "N", "Nb"], 5: ["Ru", "W", "Mn"] } The number is the charge used for each of the list of elements (an element can be present in multiple lists) charge_balance_species: If specified, will balance the charge on the structure using that specie. """ self.sp_to_replace = sp_to_replace self.r_fraction = r_fraction self.substitution_dict = substitution_dict self.charge_balance_species = charge_balance_species self.order = order def apply_transformation(self, structure, return_ranked_list=False): """ Applies the transformation. Args: structure: Input Structure return_ranked_list: Number of structures to return. Returns: Structures with all substitutions applied. """ if not return_ranked_list: raise ValueError( "MultipleSubstitutionTransformation has no single" " best structure output. Must use" " return_ranked_list." ) outputs = [] for charge, el_list in self.substitution_dict.items(): mapping = {} if charge > 0: sign = "+" else: sign = "-" dummy_sp = "X{}{}".format(str(charge), sign) mapping[self.sp_to_replace] = { self.sp_to_replace: 1 - self.r_fraction, dummy_sp: self.r_fraction, } trans = SubstitutionTransformation(mapping) dummy_structure = trans.apply_transformation(structure) if self.charge_balance_species is not None: cbt = ChargeBalanceTransformation(self.charge_balance_species) dummy_structure = cbt.apply_transformation(dummy_structure) if self.order: trans = OrderDisorderedStructureTransformation() dummy_structure = trans.apply_transformation(dummy_structure) for el in el_list: if charge > 0: sign = "+" else: sign = "-" st = SubstitutionTransformation({"X{}+".format(str(charge)): "{}{}{}".format(el, charge, sign)}) new_structure = st.apply_transformation(dummy_structure) outputs.append({"structure": new_structure}) return outputs def __str__(self): return "Multiple Substitution Transformation : Substitution on " + "{}".format(self.sp_to_replace) def __repr__(self): return self.__str__() @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True class EnumerateStructureTransformation(AbstractTransformation): """ Order a disordered structure using enumlib. For complete orderings, this generally produces fewer structures that the OrderDisorderedStructure transformation, and at a much faster speed. """ def __init__( self, min_cell_size=1, max_cell_size=1, symm_prec=0.1, refine_structure=False, enum_precision_parameter=0.001, check_ordered_symmetry=True, max_disordered_sites=None, sort_criteria="ewald", timeout=None, ): """ Args: min_cell_size: The minimum cell size wanted. Must be an int. Defaults to 1. max_cell_size: The maximum cell size wanted. Must be an int. Defaults to 1. symm_prec: Tolerance to use for symmetry. refine_structure: This parameter has the same meaning as in enumlib_caller. If you are starting from a structure that has been relaxed via some electronic structure code, it is usually much better to start with symmetry determination and then obtain a refined structure. The refined structure have cell parameters and atomic positions shifted to the expected symmetry positions, which makes it much less sensitive precision issues in enumlib. If you are already starting from an experimental cif, refinment should have already been done and it is not necessary. Defaults to False. enum_precision_parameter (float): Finite precision parameter for enumlib. Default of 0.001 is usually ok, but you might need to tweak it for certain cells. check_ordered_symmetry (bool): Whether to check the symmetry of the ordered sites. If the symmetry of the ordered sites is lower, the lowest symmetry ordered sites is included in the enumeration. This is important if the ordered sites break symmetry in a way that is important getting possible structures. But sometimes including ordered sites slows down enumeration to the point that it cannot be completed. Switch to False in those cases. Defaults to True. max_disordered_sites (int): An alternate parameter to max_cell size. Will sequentially try larger and larger cell sizes until (i) getting a result or (ii) the number of disordered sites in the cell exceeds max_disordered_sites. Must set max_cell_size to None when using this parameter. sort_criteria (str): Sort by Ewald energy ("ewald", must have oxidation states and slow) or by number of sites ("nsites", much faster). timeout (float): timeout in minutes to pass to EnumlibAdaptor """ self.symm_prec = symm_prec self.min_cell_size = min_cell_size self.max_cell_size = max_cell_size self.refine_structure = refine_structure self.enum_precision_parameter = enum_precision_parameter self.check_ordered_symmetry = check_ordered_symmetry self.max_disordered_sites = max_disordered_sites self.sort_criteria = sort_criteria self.timeout = timeout if max_cell_size and max_disordered_sites: raise ValueError("Cannot set both max_cell_size and " "max_disordered_sites!") def apply_transformation(self, structure, return_ranked_list=False): """ Returns either a single ordered structure or a sequence of all ordered structures. Args: structure: Structure to order. return_ranked_list (bool): Whether or not multiple structures are returned. If return_ranked_list is a number, that number of structures is returned. Returns: Depending on returned_ranked list, either a transformed structure or a list of dictionaries, where each dictionary is of the form {"structure" = .... , "other_arguments"} The list of ordered structures is ranked by ewald energy / atom, if the input structure is an oxidation state decorated structure. Otherwise, it is ranked by number of sites, with smallest number of sites first. """ try: num_to_return = int(return_ranked_list) except ValueError: num_to_return = 1 if self.refine_structure: finder = SpacegroupAnalyzer(structure, self.symm_prec) structure = finder.get_refined_structure() contains_oxidation_state = all( hasattr(sp, "oxi_state") and sp.oxi_state != 0 for sp in structure.composition.elements ) structures = None if structure.is_ordered: warnings.warn( "Enumeration skipped for structure with composition {} " "because it is ordered".format(structure.composition) ) structures = [structure.copy()] if self.max_disordered_sites: ndisordered = sum([1 for site in structure if not site.is_ordered]) if ndisordered > self.max_disordered_sites: raise ValueError("Too many disordered sites! ({} > {})".format(ndisordered, self.max_disordered_sites)) max_cell_sizes = range( self.min_cell_size, int(math.floor(self.max_disordered_sites / ndisordered)) + 1, ) else: max_cell_sizes = [self.max_cell_size] for max_cell_size in max_cell_sizes: adaptor = EnumlibAdaptor( structure, min_cell_size=self.min_cell_size, max_cell_size=max_cell_size, symm_prec=self.symm_prec, refine_structure=False, enum_precision_parameter=self.enum_precision_parameter, check_ordered_symmetry=self.check_ordered_symmetry, timeout=self.timeout, ) try: adaptor.run() structures = adaptor.structures if structures: break except EnumError: warnings.warn("Unable to enumerate for max_cell_size = {}".format(max_cell_size)) if structures is None: raise ValueError("Unable to enumerate") original_latt = structure.lattice inv_latt = np.linalg.inv(original_latt.matrix) ewald_matrices = {} all_structures = [] for s in structures: new_latt = s.lattice transformation = np.dot(new_latt.matrix, inv_latt) transformation = tuple(tuple(int(round(cell)) for cell in row) for row in transformation) if contains_oxidation_state and self.sort_criteria == "ewald": if transformation not in ewald_matrices: s_supercell = structure * transformation ewald = EwaldSummation(s_supercell) ewald_matrices[transformation] = ewald else: ewald = ewald_matrices[transformation] energy = ewald.compute_sub_structure(s) all_structures.append({"num_sites": len(s), "energy": energy, "structure": s}) else: all_structures.append({"num_sites": len(s), "structure": s}) def sort_func(s): return ( s["energy"] / s["num_sites"] if contains_oxidation_state and self.sort_criteria == "ewald" else s["num_sites"] ) self._all_structures = sorted(all_structures, key=sort_func) if return_ranked_list: return self._all_structures[0:num_to_return] return self._all_structures[0]["structure"] def __str__(self): return "EnumerateStructureTransformation" def __repr__(self): return self.__str__() @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True class SubstitutionPredictorTransformation(AbstractTransformation): """ This transformation takes a structure and uses the structure prediction module to find likely site substitutions. """ def __init__(self, threshold=1e-2, scale_volumes=True, **kwargs): r""" Args: threshold: Threshold for substitution. scale_volumes: Whether to scale volumes after substitution. **kwargs: Args for SubstitutionProbability class lambda_table, alpha """ self.kwargs = kwargs self.threshold = threshold self.scale_volumes = scale_volumes self._substitutor = SubstitutionPredictor(threshold=threshold, **kwargs) def apply_transformation(self, structure, return_ranked_list=False): """ Applies the transformation. Args: structure: Input Structure return_ranked_list: Number of structures to return. Returns: Predicted Structures. """ if not return_ranked_list: raise ValueError("SubstitutionPredictorTransformation doesn't" " support returning 1 structure") preds = self._substitutor.composition_prediction(structure.composition, to_this_composition=False) preds.sort(key=lambda x: x["probability"], reverse=True) outputs = [] for pred in preds: st = SubstitutionTransformation(pred["substitutions"]) output = { "structure": st.apply_transformation(structure), "probability": pred["probability"], "threshold": self.threshold, "substitutions": {}, } # dictionary keys have to be converted to strings for JSON for key, value in pred["substitutions"].items(): output["substitutions"][str(key)] = str(value) outputs.append(output) return outputs def __str__(self): return "SubstitutionPredictorTransformation" def __repr__(self): return self.__str__() @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True class MagOrderParameterConstraint(MSONable): """ This class can be used to supply MagOrderingTransformation to just a specific subset of species or sites that satisfy the provided constraints. This can be useful for setting an order parameters for, for example, ferrimagnetic structures which might order on certain motifs, with the global order parameter dependent on how many sites satisfy that motif. """ def __init__( self, order_parameter, species_constraints=None, site_constraint_name=None, site_constraints=None, ): """ :param order_parameter (float): any number from 0.0 to 1.0, typically 0.5 (antiferromagnetic) or 1.0 (ferromagnetic) :param species_constraint (list): str or list of strings of Species symbols that the constraint should apply to :param site_constraint_name (str): name of the site property that the constraint should apply to, e.g. "coordination_no" :param site_constraints (list): list of values of the site property that the constraints should apply to """ # validation if site_constraints and site_constraints != [None] and not site_constraint_name: raise ValueError("Specify the name of the site constraint.") if not site_constraints and site_constraint_name: raise ValueError("Please specify some site constraints.") if not isinstance(species_constraints, list): species_constraints = [species_constraints] if not isinstance(site_constraints, list): site_constraints = [site_constraints] if order_parameter > 1 or order_parameter < 0: raise ValueError("Order parameter must lie between 0 and 1") if order_parameter != 0.5: warnings.warn( "Use care when using a non-standard order parameter, " "though it can be useful in some cases it can also " "lead to unintended behavior. Consult documentation." ) self.order_parameter = order_parameter self.species_constraints = species_constraints self.site_constraint_name = site_constraint_name self.site_constraints = site_constraints def satisfies_constraint(self, site): """ Checks if a periodic site satisfies the constraint. """ if not site.is_ordered: return False satisfies_constraints = self.species_constraints and str(site.specie) in self.species_constraints if self.site_constraint_name and self.site_constraint_name in site.properties: prop = site.properties[self.site_constraint_name] satisfies_constraints = prop in self.site_constraints return satisfies_constraints class MagOrderingTransformation(AbstractTransformation): """ This transformation takes a structure and returns a list of collinear magnetic orderings. For disordered structures, make an ordered approximation first. """ def __init__(self, mag_species_spin, order_parameter=0.5, energy_model=SymmetryModel(), **kwargs): """ :param mag_species_spin: A mapping of elements/species to their spin magnitudes, e.g. {"Fe3+": 5, "Mn3+": 4} :param order_parameter (float or list): if float, a specifies a global order parameter and can take values from 0.0 to 1.0 (e.g. 0.5 for antiferromagnetic or 1.0 for ferromagnetic), if list has to be a list of :class: `pymatgen.transformations.advanced_transformations.MagOrderParameterConstraint` to specify more complicated orderings, see documentation for MagOrderParameterConstraint more details on usage :param energy_model: Energy model to rank the returned structures, see :mod: `pymatgen.analysis.energy_models` for more information (note that this is not necessarily a physical energy). By default, returned structures use SymmetryModel() which ranks structures from most symmetric to least. :param kwargs: Additional kwargs that are passed to :class:`EnumerateStructureTransformation` such as min_cell_size etc. """ # checking for sensible order_parameter values if isinstance(order_parameter, float): # convert to constraint format order_parameter = [ MagOrderParameterConstraint( order_parameter=order_parameter, species_constraints=list(mag_species_spin.keys()), ) ] elif isinstance(order_parameter, list): ops = [isinstance(item, MagOrderParameterConstraint) for item in order_parameter] if not any(ops): raise ValueError("Order parameter not correctly defined.") else: raise ValueError("Order parameter not correctly defined.") self.mag_species_spin = mag_species_spin # store order parameter constraints as dicts to save implementing # to/from dict methods for MSONable compatibility self.order_parameter = [op.as_dict() for op in order_parameter] self.energy_model = energy_model self.enum_kwargs = kwargs @staticmethod def determine_min_cell(disordered_structure): """ Determine the smallest supercell that is able to enumerate the provided structure with the given order parameter """ def lcm(n1, n2): """ Find least common multiple of two numbers """ return n1 * n2 / gcd(n1, n2) # assumes all order parameters for a given species are the same mag_species_order_parameter = {} mag_species_occurrences = {} for idx, site in enumerate(disordered_structure): if not site.is_ordered: op = max(site.species.values()) # this very hacky bit of code only works because we know # that on disordered sites in this class, all species are the same # but have different spins, and this is comma-delimited sp = str(list(site.species.keys())[0]).split(",")[0] if sp in mag_species_order_parameter: mag_species_occurrences[sp] += 1 else: mag_species_order_parameter[sp] = op mag_species_occurrences[sp] = 1 smallest_n = [] for sp, order_parameter in mag_species_order_parameter.items(): denom = Fraction(order_parameter).limit_denominator(100).denominator num_atom_per_specie = mag_species_occurrences[sp] n_gcd = gcd(denom, num_atom_per_specie) smallest_n.append(lcm(int(n_gcd), denom) / n_gcd) return max(smallest_n) @staticmethod def _add_dummy_species(structure, order_parameters): """ :param structure: ordered Structure :param order_parameters: list of MagOrderParameterConstraints :return: A structure decorated with disordered DummySpecies on which to perform the enumeration. Note that the DummySpecies are super-imposed on to the original sites, to make it easier to retrieve the original site after enumeration is performed (this approach is preferred over a simple mapping since multiple species may have the same DummySpecies, depending on the constraints specified). This approach can also preserve site properties even after enumeration. """ dummy_struct = structure.copy() def generate_dummy_specie(): """ Generator which returns DummySpecies symbols Mma, Mmb, etc. """ subscript_length = 1 while True: for subscript in product(ascii_lowercase, repeat=subscript_length): yield "Mm" + "".join(subscript) subscript_length += 1 dummy_species_gen = generate_dummy_specie() # one dummy species for each order parameter constraint dummy_species_symbols = [next(dummy_species_gen) for i in range(len(order_parameters))] dummy_species = [ { DummySpecies(symbol, properties={"spin": Spin.up}): constraint.order_parameter, DummySpecies(symbol, properties={"spin": Spin.down}): 1 - constraint.order_parameter, } for symbol, constraint in zip(dummy_species_symbols, order_parameters) ] for idx, site in enumerate(dummy_struct): satisfies_constraints = [c.satisfies_constraint(site) for c in order_parameters] if satisfies_constraints.count(True) > 1: # site should either not satisfy any constraints, or satisfy # one constraint raise ValueError( "Order parameter constraints conflict for site: {}, {}".format(str(site.specie), site.properties) ) if any(satisfies_constraints): dummy_specie_idx = satisfies_constraints.index(True) dummy_struct.append(dummy_species[dummy_specie_idx], site.coords, site.lattice) return dummy_struct @staticmethod def _remove_dummy_species(structure): """ :return: Structure with dummy species removed, but their corresponding spin properties merged with the original sites. Used after performing enumeration. """ if not structure.is_ordered: raise Exception("Something went wrong with enumeration.") sites_to_remove = [] logger.debug("Dummy species structure:\n{}".format(str(structure))) for idx, site in enumerate(structure): if isinstance(site.specie, DummySpecies): sites_to_remove.append(idx) spin = site.specie._properties.get("spin", None) neighbors = structure.get_neighbors( site, 0.05, # arbitrary threshold, needs to be << any bond length # but >> floating point precision issues include_index=True, ) if len(neighbors) != 1: raise Exception("This shouldn't happen, found neighbors: {}".format(neighbors)) orig_site_idx = neighbors[0][2] orig_specie = structure[orig_site_idx].specie new_specie = Species( orig_specie.symbol, getattr(orig_specie, "oxi_state", None), properties={"spin": spin}, ) structure.replace( orig_site_idx, new_specie, properties=structure[orig_site_idx].properties, ) structure.remove_sites(sites_to_remove) logger.debug("Structure with dummy species removed:\n{}".format(str(structure))) return structure def _add_spin_magnitudes(self, structure): """ Replaces Spin.up/Spin.down with spin magnitudes specified by mag_species_spin. :param structure: :return: """ for idx, site in enumerate(structure): if getattr(site.specie, "_properties", None): spin = site.specie._properties.get("spin", None) sign = int(spin) if spin else 0 if spin: new_properties = site.specie._properties.copy() # this very hacky bit of code only works because we know # that on disordered sites in this class, all species are the same # but have different spins, and this is comma-delimited sp = str(site.specie).split(",")[0] new_properties.update({"spin": sign * self.mag_species_spin.get(sp, 0)}) new_specie = Species( site.specie.symbol, getattr(site.specie, "oxi_state", None), new_properties, ) structure.replace(idx, new_specie, properties=site.properties) logger.debug("Structure with spin magnitudes:\n{}".format(str(structure))) return structure def apply_transformation(self, structure, return_ranked_list=False): """ Apply MagOrderTransformation to an input structure. :param structure: Any ordered structure. :param return_ranked_list: As in other Transformations. :return: """ if not structure.is_ordered: raise ValueError("Create an ordered approximation of " "your input structure first.") # retrieve order parameters order_parameters = [MagOrderParameterConstraint.from_dict(op_dict) for op_dict in self.order_parameter] # add dummy species on which to perform enumeration structure = self._add_dummy_species(structure, order_parameters) # trivial case if structure.is_ordered: structure = self._remove_dummy_species(structure) return [structure] if return_ranked_list > 1 else structure enum_kwargs = self.enum_kwargs.copy() enum_kwargs["min_cell_size"] = max(int(self.determine_min_cell(structure)), enum_kwargs.get("min_cell_size", 1)) if enum_kwargs.get("max_cell_size", None): if enum_kwargs["min_cell_size"] > enum_kwargs["max_cell_size"]: warnings.warn( "Specified max cell size ({}) is smaller " "than the minimum enumerable cell size ({}), " "changing max cell size to {}".format( enum_kwargs["max_cell_size"], enum_kwargs["min_cell_size"], enum_kwargs["min_cell_size"], ) ) enum_kwargs["max_cell_size"] = enum_kwargs["min_cell_size"] else: enum_kwargs["max_cell_size"] = enum_kwargs["min_cell_size"] t = EnumerateStructureTransformation(**enum_kwargs) alls = t.apply_transformation(structure, return_ranked_list=return_ranked_list) # handle the fact that EnumerateStructureTransformation can either # return a single Structure or a list if isinstance(alls, Structure): # remove dummy species and replace Spin.up or Spin.down # with spin magnitudes given in mag_species_spin arg alls = self._remove_dummy_species(alls) alls = self._add_spin_magnitudes(alls) else: for idx, _ in enumerate(alls): alls[idx]["structure"] = self._remove_dummy_species(alls[idx]["structure"]) alls[idx]["structure"] = self._add_spin_magnitudes(alls[idx]["structure"]) try: num_to_return = int(return_ranked_list) except ValueError: num_to_return = 1 if num_to_return == 1 or not return_ranked_list: return alls[0]["structure"] if num_to_return else alls # remove duplicate structures and group according to energy model m = StructureMatcher(comparator=SpinComparator()) def key(x): return SpacegroupAnalyzer(x, 0.1).get_space_group_number() out = [] for _, g in groupby(sorted([d["structure"] for d in alls], key=key), key): g = list(g) grouped = m.group_structures(g) out.extend([{"structure": g[0], "energy": self.energy_model.get_energy(g[0])} for g in grouped]) self._all_structures = sorted(out, key=lambda d: d["energy"]) return self._all_structures[0:num_to_return] def __str__(self): return "MagOrderingTransformation" def __repr__(self): return self.__str__() @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True def _find_codopant(target, oxidation_state, allowed_elements=None): """ Finds the element from "allowed elements" that (i) possesses the desired "oxidation state" and (ii) is closest in ionic radius to the target specie Args: target: (Species) provides target ionic radius. oxidation_state: (float) codopant oxidation state. allowed_elements: ([str]) List of allowed elements. If None, all elements are tried. Returns: (Species) with oxidation_state that has ionic radius closest to target. """ ref_radius = target.ionic_radius candidates = [] symbols = allowed_elements or [el.symbol for el in Element] for sym in symbols: try: with warnings.catch_warnings(): warnings.simplefilter("ignore") sp = Species(sym, oxidation_state) r = sp.ionic_radius if r is not None: candidates.append((r, sp)) except Exception: pass return min(candidates, key=lambda l: abs(l[0] / ref_radius - 1))[1] class DopingTransformation(AbstractTransformation): """ A transformation that performs doping of a structure. """ def __init__( self, dopant, ionic_radius_tol=float("inf"), min_length=10, alio_tol=0, codopant=False, max_structures_per_enum=100, allowed_doping_species=None, **kwargs, ): r""" Args: dopant (Species-like): E.g., Al3+. Must have oxidation state. ionic_radius_tol (float): E.g., Fractional allowable ionic radii mismatch for dopant to fit into a site. Default of inf means that any dopant with the right oxidation state is allowed. min_Length (float): Min. lattice parameter between periodic images of dopant. Defaults to 10A for now. alio_tol (int): If this is not 0, attempt will be made to dope sites with oxidation_states +- alio_tol of the dopant. E.g., 1 means that the ions like Ca2+ and Ti4+ are considered as potential doping sites for Al3+. codopant (bool): If True, doping will be carried out with a codopant to maintain charge neutrality. Otherwise, vacancies will be used. max_structures_per_enum (float): Maximum number of structures to return per enumeration. Note that there can be more than one candidate doping site, and each site enumeration will return at max max_structures_per_enum structures. Defaults to 100. allowed_doping_species (list): Species that are allowed to be doping sites. This is an inclusionary list. If specified, any sites which are not **kwargs: Same keyword args as :class:`EnumerateStructureTransformation`, i.e., min_cell_size, etc. """ self.dopant = get_el_sp(dopant) self.ionic_radius_tol = ionic_radius_tol self.min_length = min_length self.alio_tol = alio_tol self.codopant = codopant self.max_structures_per_enum = max_structures_per_enum self.allowed_doping_species = allowed_doping_species self.kwargs = kwargs def apply_transformation(self, structure, return_ranked_list=False): """ Args: structure (Structure): Input structure to dope Returns: [{"structure": Structure, "energy": float}] """ comp = structure.composition logger.info("Composition: %s" % comp) for sp in comp: try: sp.oxi_state except AttributeError: analyzer = BVAnalyzer() structure = analyzer.get_oxi_state_decorated_structure(structure) comp = structure.composition break ox = self.dopant.oxi_state radius = self.dopant.ionic_radius compatible_species = [ sp for sp in comp if sp.oxi_state == ox and abs(sp.ionic_radius / radius - 1) < self.ionic_radius_tol ] if (not compatible_species) and self.alio_tol: # We only consider aliovalent doping if there are no compatible # isovalent species. compatible_species = [ sp for sp in comp if abs(sp.oxi_state - ox) <= self.alio_tol and abs(sp.ionic_radius / radius - 1) < self.ionic_radius_tol and sp.oxi_state * ox >= 0 ] if self.allowed_doping_species is not None: # Only keep allowed doping species. compatible_species = [ sp for sp in compatible_species if sp in [get_el_sp(s) for s in self.allowed_doping_species] ] logger.info("Compatible species: %s" % compatible_species) lengths = structure.lattice.abc scaling = [max(1, int(round(math.ceil(self.min_length / x)))) for x in lengths] logger.info("Lengths are %s" % str(lengths)) logger.info("Scaling = %s" % str(scaling)) all_structures = [] t = EnumerateStructureTransformation(**self.kwargs) for sp in compatible_species: supercell = structure * scaling nsp = supercell.composition[sp] if sp.oxi_state == ox: supercell.replace_species({sp: {sp: (nsp - 1) / nsp, self.dopant: 1 / nsp}}) logger.info("Doping %s for %s at level %.3f" % (sp, self.dopant, 1 / nsp)) elif self.codopant: codopant = _find_codopant(sp, 2 * sp.oxi_state - ox) supercell.replace_species({sp: {sp: (nsp - 2) / nsp, self.dopant: 1 / nsp, codopant: 1 / nsp}}) logger.info("Doping %s for %s + %s at level %.3f" % (sp, self.dopant, codopant, 1 / nsp)) elif abs(sp.oxi_state) < abs(ox): # Strategy: replace the target species with a # combination of dopant and vacancy. # We will choose the lowest oxidation state species as a # vacancy compensation species as it is likely to be lower in # energy sp_to_remove = min( [s for s in comp if s.oxi_state * ox > 0], key=lambda ss: abs(ss.oxi_state), ) if sp_to_remove == sp: common_charge = lcm(int(abs(sp.oxi_state)), int(abs(ox))) ndopant = common_charge / abs(ox) nsp_to_remove = common_charge / abs(sp.oxi_state) logger.info("Doping %d %s with %d %s." % (nsp_to_remove, sp, ndopant, self.dopant)) supercell.replace_species( { sp: { sp: (nsp - nsp_to_remove) / nsp, self.dopant: ndopant / nsp, } } ) else: ox_diff = int(abs(round(sp.oxi_state - ox))) vac_ox = int(abs(sp_to_remove.oxi_state)) common_charge = lcm(vac_ox, ox_diff) ndopant = common_charge / ox_diff nx_to_remove = common_charge / vac_ox nx = supercell.composition[sp_to_remove] logger.info( "Doping %d %s with %s and removing %d %s." % (ndopant, sp, self.dopant, nx_to_remove, sp_to_remove) ) supercell.replace_species( { sp: {sp: (nsp - ndopant) / nsp, self.dopant: ndopant / nsp}, sp_to_remove: {sp_to_remove: (nx - nx_to_remove) / nx}, } ) elif abs(sp.oxi_state) > abs(ox): # Strategy: replace the target species with dopant and also # remove some opposite charged species for charge neutrality if ox > 0: sp_to_remove = max(supercell.composition.keys(), key=lambda el: el.X) else: sp_to_remove = min(supercell.composition.keys(), key=lambda el: el.X) # Confirm species are of opposite oxidation states. assert sp_to_remove.oxi_state * sp.oxi_state < 0 ox_diff = int(abs(round(sp.oxi_state - ox))) anion_ox = int(abs(sp_to_remove.oxi_state)) nx = supercell.composition[sp_to_remove] common_charge = lcm(anion_ox, ox_diff) ndopant = common_charge / ox_diff nx_to_remove = common_charge / anion_ox logger.info( "Doping %d %s with %s and removing %d %s." % (ndopant, sp, self.dopant, nx_to_remove, sp_to_remove) ) supercell.replace_species( { sp: {sp: (nsp - ndopant) / nsp, self.dopant: ndopant / nsp}, sp_to_remove: {sp_to_remove: (nx - nx_to_remove) / nx}, } ) ss = t.apply_transformation(supercell, return_ranked_list=self.max_structures_per_enum) logger.info("%s distinct structures" % len(ss)) all_structures.extend(ss) logger.info("Total %s doped structures" % len(all_structures)) if return_ranked_list: return all_structures[:return_ranked_list] return all_structures[0]["structure"] @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True class SlabTransformation(AbstractTransformation): """ A transformation that creates a slab from a structure. """ def __init__( self, miller_index, min_slab_size, min_vacuum_size, lll_reduce=False, center_slab=False, in_unit_planes=False, primitive=True, max_normal_search=None, shift=0, tol=0.1, ): """ Args: miller_index (3-tuple or list): miller index of slab min_slab_size (float): minimum slab size in angstroms min_vacuum_size (float): minimum size of vacuum lll_reduce (bool): whether to apply LLL reduction center_slab (bool): whether to center the slab primitive (bool): whether to reduce slabs to most primitive cell max_normal_search (int): maximum index to include in linear combinations of indices to find c lattice vector orthogonal to slab surface shift (float): shift to get termination tol (float): tolerance for primitive cell finding """ self.miller_index = miller_index self.min_slab_size = min_slab_size self.min_vacuum_size = min_vacuum_size self.lll_reduce = lll_reduce self.center_slab = center_slab self.in_unit_planes = in_unit_planes self.primitive = primitive self.max_normal_search = max_normal_search self.shift = shift self.tol = tol def apply_transformation(self, structure): """ Applies the transformation. Args: structure: Input Structure Returns: Slab Structures. """ sg = SlabGenerator( structure, self.miller_index, self.min_slab_size, self.min_vacuum_size, self.lll_reduce, self.center_slab, self.in_unit_planes, self.primitive, self.max_normal_search, ) slab = sg.get_slab(self.shift, self.tol) return slab @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: False""" return False class DisorderOrderedTransformation(AbstractTransformation): """ Not to be confused with OrderDisorderedTransformation, this transformation attempts to obtain a *disordered* structure from an input ordered structure. This may or may not be physically plausible, further inspection of the returned structures is advised. The main purpose for this transformation is for structure matching to crystal prototypes for structures that have been derived from a parent prototype structure by substitutions or alloying additions. """ def __init__(self, max_sites_to_merge=2): """ Args: max_sites_to_merge: only merge this number of sites together """ self.max_sites_to_merge = max_sites_to_merge def apply_transformation(self, structure, return_ranked_list=False): """ Args: structure: ordered structure return_ranked_list: as in other pymatgen Transformations Returns: Transformed disordered structure(s) """ if not structure.is_ordered: raise ValueError("This transformation is for disordered structures only.") partitions = self._partition_species(structure.composition, max_components=self.max_sites_to_merge) disorder_mappings = self._get_disorder_mappings(structure.composition, partitions) disordered_structures = [] for mapping in disorder_mappings: disordered_structure = structure.copy() disordered_structure.replace_species(mapping) disordered_structures.append({"structure": disordered_structure, "mapping": mapping}) if len(disordered_structures) == 0: return None if not return_ranked_list: return disordered_structures[0]["structure"] if len(disordered_structures) > return_ranked_list: disordered_structures = disordered_structures[0:return_ranked_list] return disordered_structures @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True @staticmethod def _partition_species(composition, max_components=2): """ Private method to split a list of species into various partitions. """ def _partition(collection): # thanks https://stackoverflow.com/a/30134039 if len(collection) == 1: yield [collection] return first = collection[0] for smaller in _partition(collection[1:]): # insert `first` in each of the subpartition's subsets for n, subset in enumerate(smaller): yield smaller[:n] + [[first] + subset] + smaller[n + 1 :] # put `first` in its own subset yield [[first]] + smaller def _sort_partitions(partitions_to_sort): """ Sort partitions by those we want to check first (typically, merging two sites into one is the one to try first). """ partition_indices = [(idx, [len(p) for p in partition]) for idx, partition in enumerate(partitions_to_sort)] # sort by maximum length of partition first (try smallest maximums first) # and secondarily by number of partitions (most partitions first, i.e. # create the 'least disordered' structures first) partition_indices = sorted(partition_indices, key=lambda x: (max(x[1]), -len(x[1]))) # merge at most max_component sites, # e.g. merge at most 2 species into 1 disordered site partition_indices = [x for x in partition_indices if max(x[1]) <= max_components] partition_indices.pop(0) # this is just the input structure sorted_partitions = [partitions_to_sort[x[0]] for x in partition_indices] return sorted_partitions collection = list(composition.keys()) partitions = list(_partition(collection)) partitions = _sort_partitions(partitions) return partitions @staticmethod def _get_disorder_mappings(composition, partitions): """ Private method to obtain the mapping to create a disordered structure from a given partition. """ def _get_replacement_dict_from_partition(partition): d = {} # to be passed to Structure.replace_species() for sp_list in partition: if len(sp_list) > 1: total_occ = sum([composition[sp] for sp in sp_list]) merged_comp = {sp: composition[sp] / total_occ for sp in sp_list} for sp in sp_list: d[sp] = merged_comp return d disorder_mapping = [_get_replacement_dict_from_partition(p) for p in partitions] return disorder_mapping class GrainBoundaryTransformation(AbstractTransformation): """ A transformation that creates a gb from a bulk structure. """ def __init__( self, rotation_axis, rotation_angle, expand_times=4, vacuum_thickness=0.0, ab_shift=None, normal=False, ratio=True, plane=None, max_search=20, tol_coi=1.0e-8, rm_ratio=0.7, quick_gen=False, ): """ Args: rotation_axis (list): Rotation axis of GB in the form of a list of integer e.g.: [1, 1, 0] rotation_angle (float, in unit of degree): rotation angle used to generate GB. Make sure the angle is accurate enough. You can use the enum* functions in this class to extract the accurate angle. e.g.: The rotation angle of sigma 3 twist GB with the rotation axis [1, 1, 1] and GB plane (1, 1, 1) can be 60.000000000 degree. If you do not know the rotation angle, but know the sigma value, we have provide the function get_rotation_angle_from_sigma which is able to return all the rotation angles of sigma value you provided. expand_times (int): The multiple times used to expand one unit grain to larger grain. This is used to tune the grain length of GB to warrant that the two GBs in one cell do not interact with each other. Default set to 4. vacuum_thickness (float): The thickness of vacuum that you want to insert between two grains of the GB. Default to 0. ab_shift (list of float, in unit of a, b vectors of Gb): in plane shift of two grains normal (logic): determine if need to require the c axis of top grain (first transformation matrix) perperdicular to the surface or not. default to false. ratio (list of integers): lattice axial ratio. If True, will try to determine automatically from structure. For cubic system, ratio is not needed and can be set to None. For tetragonal system, ratio = [mu, mv], list of two integers, that is, mu/mv = c2/a2. If it is irrational, set it to None. For orthorhombic system, ratio = [mu, lam, mv], list of three integers, that is, mu:lam:mv = c2:b2:a2. If irrational for one axis, set it to None. e.g. mu:lam:mv = c2,None,a2, means b2 is irrational. For rhombohedral system, ratio = [mu, mv], list of two integers, that is, mu/mv is the ratio of (1+2*cos(alpha))/cos(alpha). If irrational, set it to None. For hexagonal system, ratio = [mu, mv], list of two integers, that is, mu/mv = c2/a2. If it is irrational, set it to none. plane (list): Grain boundary plane in the form of a list of integers e.g.: [1, 2, 3]. If none, we set it as twist GB. The plane will be perpendicular to the rotation axis. max_search (int): max search for the GB lattice vectors that give the smallest GB lattice. If normal is true, also max search the GB c vector that perpendicular to the plane. For complex GB, if you want to speed up, you can reduce this value. But too small of this value may lead to error. tol_coi (float): tolerance to find the coincidence sites. When making approximations to the ratio needed to generate the GB, you probably need to increase this tolerance to obtain the correct number of coincidence sites. To check the number of coincidence sites are correct or not, you can compare the generated Gb object's sigma with enum* sigma values (what user expected by input). rm_ratio (float): the criteria to remove the atoms which are too close with each other. rm_ratio * bond_length of bulk system is the criteria of bond length, below which the atom will be removed. Default to 0.7. quick_gen (bool): whether to quickly generate a supercell, if set to true, no need to find the smallest cell. Returns: Grain boundary structure (gb (Structure) object). """ self.rotation_axis = rotation_axis self.rotation_angle = rotation_angle self.expand_times = expand_times self.vacuum_thickness = vacuum_thickness self.ab_shift = ab_shift or [0, 0] self.normal = normal self.ratio = ratio self.plane = plane self.max_search = max_search self.tol_coi = tol_coi self.rm_ratio = rm_ratio self.quick_gen = quick_gen def apply_transformation(self, structure): """ Applies the transformation. Args: structure: Input Structure return_ranked_list: Number of structures to return. Returns: Grain boundary Structures. """ gbg = GrainBoundaryGenerator(structure) gb_struct = gbg.gb_from_parameters( self.rotation_axis, self.rotation_angle, expand_times=self.expand_times, vacuum_thickness=self.vacuum_thickness, ab_shift=self.ab_shift, normal=self.normal, ratio=gbg.get_ratio() if self.ratio is True else self.ratio, plane=self.plane, max_search=self.max_search, tol_coi=self.tol_coi, rm_ratio=self.rm_ratio, quick_gen=self.quick_gen, ) return gb_struct @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: False""" return False class CubicSupercellTransformation(AbstractTransformation): """ A transformation that aims to generate a nearly cubic supercell structure from a structure. The algorithm solves for a transformation matrix that makes the supercell cubic. The matrix must have integer entries, so entries are rounded (in such a way that forces the matrix to be nonsingular). From the supercell resulting from this transformation matrix, vector projections are used to determine the side length of the largest cube that can fit inside the supercell. The algorithm will iteratively increase the size of the supercell until the largest inscribed cube's side length is at least 'min_length' and the number of atoms in the supercell falls in the range ``min_atoms < n < max_atoms``. """ def __init__( self, min_atoms: Optional[int] = None, max_atoms: Optional[int] = None, min_length: float = 15.0, force_diagonal: bool = False, ): """ Args: max_atoms: Maximum number of atoms allowed in the supercell. min_atoms: Minimum number of atoms allowed in the supercell. min_length: Minimum length of the smallest supercell lattice vector. force_diagonal: If True, return a transformation with a diagonal transformation matrix. """ self.min_atoms = min_atoms if min_atoms else -np.Inf self.max_atoms = max_atoms if max_atoms else np.Inf self.min_length = min_length self.force_diagonal = force_diagonal self.transformation_matrix = None def apply_transformation(self, structure: Structure) -> Structure: """ The algorithm solves for a transformation matrix that makes the supercell cubic. The matrix must have integer entries, so entries are rounded (in such a way that forces the matrix to be nonsingular). From the supercell resulting from this transformation matrix, vector projections are used to determine the side length of the largest cube that can fit inside the supercell. The algorithm will iteratively increase the size of the supercell until the largest inscribed cube's side length is at least 'num_nn_dists' times the nearest neighbor distance and the number of atoms in the supercell falls in the range defined by min_atoms and max_atoms. Returns: supercell: Transformed supercell. """ lat_vecs = structure.lattice.matrix # boolean for if a sufficiently large supercell has been created sc_not_found = True if self.force_diagonal: scale = self.min_length / np.array(structure.lattice.abc) self.transformation_matrix = np.diag(np.ceil(scale).astype(int)) st = SupercellTransformation(self.transformation_matrix) return st.apply_transformation(structure) # target_threshold is used as the desired cubic side lengths target_sc_size = self.min_length while sc_not_found: target_sc_lat_vecs = np.eye(3, 3) * target_sc_size self.transformation_matrix = target_sc_lat_vecs @ np.linalg.inv(lat_vecs) # round the entries of T and force T to be nonsingular self.transformation_matrix = _round_and_make_arr_singular(self.transformation_matrix) # type: ignore proposed_sc_lat_vecs = self.transformation_matrix @ lat_vecs # type: ignore # Find the shortest dimension length and direction a = proposed_sc_lat_vecs[0] b = proposed_sc_lat_vecs[1] c = proposed_sc_lat_vecs[2] length1_vec = c - _proj(c, a) # a-c plane length2_vec = a - _proj(a, c) length3_vec = b - _proj(b, a) # b-a plane length4_vec = a - _proj(a, b) length5_vec = b - _proj(b, c) # b-c plane length6_vec = c - _proj(c, b) length_vecs = np.array( [ length1_vec, length2_vec, length3_vec, length4_vec, length5_vec, length6_vec, ] ) # Get number of atoms st = SupercellTransformation(self.transformation_matrix) superstructure = st.apply_transformation(structure) num_at = superstructure.num_sites # Check if constraints are satisfied if ( np.min(np.linalg.norm(length_vecs, axis=1)) >= self.min_length and self.min_atoms <= num_at <= self.max_atoms ): return superstructure # Increase threshold until proposed supercell meets requirements target_sc_size += 0.1 if num_at > self.max_atoms: raise AttributeError( "While trying to solve for the supercell, the max " "number of atoms was exceeded. Try lowering the number" "of nearest neighbor distances." ) raise AttributeError("Unable to find cubic supercell") @property def inverse(self): """ Returns: None """ return None @property def is_one_to_many(self): """ Returns: False """ return False class AddAdsorbateTransformation(AbstractTransformation): """ Create absorbate structures. """ def __init__( self, adsorbate, selective_dynamics=False, height=0.9, mi_vec=None, repeat=None, min_lw=5.0, translate=True, reorient=True, find_args=None, ): """ Use AdsorbateSiteFinder to add an absorbate to a slab. Args: adsorbate (Molecule): molecule to add as adsorbate selective_dynamics (bool): flag for whether to assign non-surface sites as fixed for selective dynamics height (float): height criteria for selection of surface sites mi_vec : vector corresponding to the vector concurrent with the miller index, this enables use with slabs that have been reoriented, but the miller vector must be supplied manually repeat (3-tuple or list): repeat argument for supercell generation min_lw (float): minimum length and width of the slab, only used if repeat is None translate (bool): flag on whether to translate the molecule so that its CoM is at the origin prior to adding it to the surface reorient (bool): flag on whether or not to reorient adsorbate along the miller index find_args (dict): dictionary of arguments to be passed to the call to self.find_adsorption_sites, e.g. {"distance":2.0} """ self.adsorbate = adsorbate self.selective_dynamics = selective_dynamics self.height = height self.mi_vec = mi_vec self.repeat = repeat self.min_lw = min_lw self.translate = translate self.reorient = reorient self.find_args = find_args def apply_transformation(self, structure, return_ranked_list=False): """ Args: structure: Must be a Slab structure return_ranked_list: Whether or not multiple structures are returned. If return_ranked_list is a number, up to that number of structures is returned. Returns: Slab with adsorbate """ sitefinder = AdsorbateSiteFinder( structure, selective_dynamics=self.selective_dynamics, height=self.height, mi_vec=self.mi_vec, ) structures = sitefinder.generate_adsorption_structures( self.adsorbate, repeat=self.repeat, min_lw=self.min_lw, translate=self.translate, reorient=self.reorient, find_args=self.find_args, ) if not return_ranked_list: return structures[0] return [{"structure": structure} for structure in structures[:return_ranked_list]] @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True def _round_and_make_arr_singular(arr: np.ndarray) -> np.ndarray: """ This function rounds all elements of a matrix to the nearest integer, unless the rounding scheme causes the matrix to be singular, in which case elements of zero rows or columns in the rounded matrix with the largest absolute valued magnitude in the unrounded matrix will be rounded to the next integer away from zero rather than to the nearest integer. The transformation is as follows. First, all entries in 'arr' will be rounded to the nearest integer to yield 'arr_rounded'. If 'arr_rounded' has any zero rows, then one element in each zero row of 'arr_rounded' corresponding to the element in 'arr' of that row with the largest absolute valued magnitude will be rounded to the next integer away from zero (see the '_round_away_from_zero(x)' function) rather than the nearest integer. This process is then repeated for zero columns. Also note that if 'arr' already has zero rows or columns, then this function will not change those rows/columns. Args: arr: Input matrix Returns: Transformed matrix. """ def round_away_from_zero(x): """ Returns 'x' rounded to the next integer away from 0. If 'x' is zero, then returns zero. E.g. -1.2 rounds to -2.0. 1.2 rounds to 2.0. """ abs_x = abs(x) return math.ceil(abs_x) * (abs_x / x) if x != 0 else 0 arr_rounded = np.around(arr) # Zero rows in 'arr_rounded' make the array singular, so force zero rows to # be nonzero if (~arr_rounded.any(axis=1)).any(): # Check for zero rows in T_rounded # indices of zero rows zero_row_idxs = np.where(~arr_rounded.any(axis=1))[0] for zero_row_idx in zero_row_idxs: # loop over zero rows zero_row = arr[zero_row_idx, :] # Find the element of the zero row with the largest absolute # magnitude in the original (non-rounded) array (i.e. 'arr') matches = np.absolute(zero_row) == np.amax(np.absolute(zero_row)) col_idx_to_fix = np.where(matches)[0] # Break ties for the largest absolute magnitude r_idx = np.random.randint(len(col_idx_to_fix)) col_idx_to_fix = col_idx_to_fix[r_idx] # Round the chosen element away from zero arr_rounded[zero_row_idx, col_idx_to_fix] = round_away_from_zero(arr[zero_row_idx, col_idx_to_fix]) # Repeat process for zero columns if (~arr_rounded.any(axis=0)).any(): # Check for zero columns in T_rounded zero_col_idxs = np.where(~arr_rounded.any(axis=0))[0] for zero_col_idx in zero_col_idxs: zero_col = arr[:, zero_col_idx] matches = np.absolute(zero_col) == np.amax(np.absolute(zero_col)) row_idx_to_fix = np.where(matches)[0] for i in row_idx_to_fix: arr_rounded[i, zero_col_idx] = round_away_from_zero(arr[i, zero_col_idx]) return arr_rounded.astype(int) class SubstituteSurfaceSiteTransformation(AbstractTransformation): """ Use AdsorptionSiteFinder to perform substitution-type doping on the surface and returns all possible configurations where one dopant is substituted per surface. Can substitute one surface or both. """ def __init__( self, atom, selective_dynamics=False, height=0.9, mi_vec=None, target_species=None, sub_both_sides=False, range_tol=1e-2, dist_from_surf=0, ): """ Args: atom (str): atom corresponding to substitutional dopant selective_dynamics (bool): flag for whether to assign non-surface sites as fixed for selective dynamics height (float): height criteria for selection of surface sites mi_vec : vector corresponding to the vector concurrent with the miller index, this enables use with slabs that have been reoriented, but the miller vector must be supplied manually target_species: List of specific species to substitute sub_both_sides (bool): If true, substitute an equivalent site on the other surface range_tol (float): Find viable substitution sites at a specific distance from the surface +- this tolerance dist_from_surf (float): Distance from the surface to find viable substitution sites, defaults to 0 to substitute at the surface """ self.atom = atom self.selective_dynamics = selective_dynamics self.height = height self.mi_vec = mi_vec self.target_species = target_species self.sub_both_sides = sub_both_sides self.range_tol = range_tol self.dist_from_surf = dist_from_surf def apply_transformation(self, structure, return_ranked_list=False): """ Args: structure: Must be a Slab structure return_ranked_list: Whether or not multiple structures are returned. If return_ranked_list is a number, up to that number of structures is returned. Returns: Slab with sites substituted """ sitefinder = AdsorbateSiteFinder( structure, selective_dynamics=self.selective_dynamics, height=self.height, mi_vec=self.mi_vec, ) structures = sitefinder.generate_substitution_structures( self.atom, target_species=self.target_species, sub_both_sides=self.sub_both_sides, range_tol=self.range_tol, dist_from_surf=self.dist_from_surf, ) if not return_ranked_list: return structures[0] return [{"structure": structure} for structure in structures[:return_ranked_list]] @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True def _proj(b, a): """ Returns vector projection (np.ndarray) of vector b (np.ndarray) onto vector a (np.ndarray) """ return (b.T @ (a / np.linalg.norm(a))) * (a / np.linalg.norm(a)) class SQSTransformation(AbstractTransformation): """ A transformation that creates a special quasirandom structure (SQS) from a structure with partial occupancies. """ def __init__( self, scaling, cluster_size_and_shell=None, search_time=60, directory=None, instances=None, temperature=1, wr=1, wn=1, wd=0.5, tol=1e-3, best_only=True, remove_duplicate_structures=True, reduction_algo="LLL", ): """ Args: structure (Structure): Disordered pymatgen Structure object scaling (int or list): Scaling factor to determine supercell. Two options are possible: a. (preferred) Scales number of atoms, e.g., for a structure with 8 atoms, scaling=4 would lead to a 32 atom supercell b. A sequence of three scaling factors, e.g., [2, 1, 1], which specifies that the supercell should have dimensions 2a x b x c cluster_size_and_shell (Optional[Dict[int, int]]): Dictionary of cluster interactions with entries in the form number of atoms: nearest neighbor shell Keyword Args: search_time (float): Time spent looking for the ideal SQS in minutes (default: 60) directory (str): Directory to run mcsqs calculation and store files (default: None runs calculations in a temp directory) instances (int): Specifies the number of parallel instances of mcsqs to run (default: number of cpu cores detected by Python) temperature (int or float): Monte Carlo temperature (default: 1), "T" in atat code wr (int or float): Weight assigned to range of perfect correlation match in objective function (default = 1) wn (int or float): Multiplicative decrease in weight per additional point in cluster (default: 1) wd (int or float): Exponent of decay in weight as function of cluster diameter (default: 0) tol (int or float): Tolerance for matching correlations (default: 1e-3) best_only (bool): only return structures with lowest objective function remove_duplicate_structures (bool): only return unique structures reduction_algo (str): The lattice reduction algorithm to use. Currently supported options are "niggli" or "LLL". "False" does not reduce structure. """ self.scaling = scaling self.search_time = search_time self.cluster_size_and_shell = cluster_size_and_shell self.directory = directory self.instances = instances self.temperature = temperature self.wr = wr self.wn = wn self.wd = wd self.tol = tol self.best_only = best_only self.remove_duplicate_structures = remove_duplicate_structures self.reduction_algo = reduction_algo @staticmethod def _get_max_neighbor_distance(struc, shell): """ Calculate maximum nearest neighbor distance Args: struc: pymatgen Structure object shell: nearest neighbor shell, such that shell=1 is the first nearest neighbor, etc. Returns: maximum nearest neighbor distance, in angstroms """ mdnn = MinimumDistanceNN() distances = [] for site_num, site in enumerate(struc): shell_info = mdnn.get_nn_shell_info(struc, site_num, shell) for entry in shell_info: image = entry["image"] distance = site.distance(struc[entry["site_index"]], jimage=image) distances.append(distance) return max(distances) @staticmethod def _get_disordered_substructure(struc_disordered): """ Converts disordered structure into a substructure consisting of only disordered sites Args: struc_disordered: pymatgen disordered Structure object Returns: pymatgen Structure object representing a substructure of disordered sites """ disordered_substructure = struc_disordered.copy() idx_to_remove = [] for idx, site in enumerate(disordered_substructure.sites): if site.is_ordered: idx_to_remove.append(idx) disordered_substructure.remove_sites(idx_to_remove) return disordered_substructure @staticmethod def _sqs_cluster_estimate(struc_disordered, cluster_size_and_shell: Optional[Dict[int, int]] = None): """ Set up an ATAT cluster.out file for a given structure and set of constraints Args: struc_disordered: disordered pymatgen Structure object cluster_size_and_shell: dict of integers {cluster: shell} Returns: dict of {cluster size: distance in angstroms} for mcsqs calculation """ cluster_size_and_shell = cluster_size_and_shell or {2: 3, 3: 2, 4: 1} disordered_substructure = SQSTransformation._get_disordered_substructure(struc_disordered) clusters = {} for cluster_size, shell in cluster_size_and_shell.items(): max_distance = SQSTransformation._get_max_neighbor_distance(disordered_substructure, shell) clusters[cluster_size] = max_distance + 0.01 # add small tolerance return clusters def apply_transformation(self, structure, return_ranked_list=False): """ Applies SQS transformation Args: structure (pymatgen Structure): pymatgen Structure with partial occupancies return_ranked_list (bool): number of structures to return Returns: pymatgen Structure which is an SQS of the input structure """ if return_ranked_list and self.instances is None: raise ValueError("mcsqs has no instances, so cannot return a ranked list") if ( isinstance(return_ranked_list, int) and isinstance(self.instances, int) and return_ranked_list > self.instances ): raise ValueError("return_ranked_list cannot be less that number of instances") clusters = self._sqs_cluster_estimate(structure, self.cluster_size_and_shell) # useful for debugging and understanding self._last_used_clusters = clusters sqs = run_mcsqs( structure=structure, clusters=clusters, scaling=self.scaling, search_time=self.search_time, directory=self.directory, instances=self.instances, temperature=self.temperature, wr=self.wr, wn=self.wn, wd=self.wd, tol=self.tol, ) return self._get_unique_bestsqs_strucs( sqs, best_only=self.best_only, return_ranked_list=return_ranked_list, remove_duplicate_structures=self.remove_duplicate_structures, reduction_algo=self.reduction_algo, ) @staticmethod def _get_unique_bestsqs_strucs(sqs, best_only, return_ranked_list, remove_duplicate_structures, reduction_algo): """ Gets unique sqs structures with lowest objective function. Requires an mcsqs output that has been run in parallel, otherwise returns Sqs.bestsqs Args: sqs (Sqs): Sqs class object. best_only (bool): only return structures with lowest objective function. return_ranked_list (bool): Number of structures to return. remove_duplicate_structures (bool): only return unique structures. reduction_algo (str): The lattice reduction algorithm to use. Currently supported options are "niggli" or "LLL". "False" does not reduce structure. Returns: list of dicts of the form {'structure': Structure, 'objective_function': ...}, unless run in serial (returns a single structure Sqs.bestsqs) """ if not return_ranked_list: return_struc = sqs.bestsqs # reduce structure if reduction_algo: return_struc = return_struc.get_reduced_structure(reduction_algo=reduction_algo) # return just the structure return return_struc strucs = [] for d in sqs.allsqs: # filter for best structures only if enabled, else use full sqs.all_sqs list if (not best_only) or (best_only and d["objective_function"] == sqs.objective_function): struc = d["structure"] # add temporary objective_function attribute to access objective_function after grouping struc.objective_function = d["objective_function"] strucs.append(struc) if remove_duplicate_structures: matcher = StructureMatcher() # sort by unique structures ... can take a while for a long list of strucs unique_strucs_grouped = matcher.group_structures(strucs) # get unique structures only strucs = [group[0] for group in unique_strucs_grouped] # sort structures by objective function strucs.sort(key=lambda x: x.objective_function if isinstance(x.objective_function, float) else -np.inf) to_return = [{"structure": struc, "objective_function": struc.objective_function} for struc in strucs] for d in to_return: # delete temporary objective_function attribute del d["structure"].objective_function # reduce structure if reduction_algo: d["structure"] = d["structure"].get_reduced_structure(reduction_algo=reduction_algo) if isinstance(return_ranked_list, int): return to_return[:return_ranked_list] return to_return @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True class MonteCarloRattleTransformation(AbstractTransformation): r""" Uses a Monte Carlo rattle procedure to randomly perturb the sites in a structure. This class requires the hiPhive package to be installed. Rattling atom `i` is carried out as a Monte Carlo move that is accepted with a probability determined from the minimum interatomic distance :math:`d_{ij}`. If :math:`\\min(d_{ij})` is smaller than :math:`d_{min}` the move is only accepted with a low probability. This process is repeated for each atom a number of times meaning the magnitude of the final displacements is not *directly* connected to `rattle_std`. """ @requires(hiphive, "hiphive is required for MonteCarloRattleTransformation") def __init__(self, rattle_std: float, min_distance: float, seed: Optional[int] = None, **kwargs): """ Args: rattle_std: Rattle amplitude (standard deviation in normal distribution). Note: this value is not *directly* connected to the final average displacement for the structures min_distance: Interatomic distance used for computing the probability for each rattle move. seed: Seed for setting up NumPy random state from which random numbers are generated. If ``None``, a random seed will be generated (default). This option allows the output of this transformation to be deterministic. **kwargs: Additional keyword arguments to be passed to the hiPhive mc_rattle function. """ self.rattle_std = rattle_std self.min_distance = min_distance self.seed = seed if not seed: # if seed is None, use a random RandomState seed but make sure # we store that the original seed was None seed = np.random.randint(1, 1000000000) self.random_state = np.random.RandomState(seed) # pylint: disable=E1101 self.kwargs = kwargs def apply_transformation(self, structure: Structure) -> Structure: """ Apply the transformation. Args: structure: Input Structure Returns: Structure with sites perturbed. """ from hiphive.structure_generation.rattle import mc_rattle # type: ignore atoms = AseAtomsAdaptor.get_atoms(structure) seed = self.random_state.randint(1, 1000000000) displacements = mc_rattle(atoms, self.rattle_std, self.min_distance, seed=seed, **self.kwargs) transformed_structure = Structure( structure.lattice, structure.species, structure.cart_coords + displacements, coords_are_cartesian=True, ) return transformed_structure def __str__(self): return "{} : rattle_std = {}".format(__name__, self.rattle_std) def __repr__(self): return self.__str__() @property def inverse(self): """ Returns: None """ return None @property def is_one_to_many(self): """ Returns: False """ return False
richardtran415/pymatgen
pymatgen/transformations/advanced_transformations.py
Python
mit
88,505
0.001955
from buck import format_watchman_query_params, glob_internal, LazyBuildEnvPartial from buck import subdir_glob, BuildFileContext from pathlib import Path, PurePosixPath, PureWindowsPath import os import shutil import tempfile import unittest class FakePathMixin(object): def glob(self, pattern): return self.glob_results.get(pattern) def is_file(self): return True class FakePosixPath(FakePathMixin, PurePosixPath): pass class FakeWindowsPath(FakePathMixin, PureWindowsPath): pass def fake_path(fake_path_class, path, glob_results={}): # Path does magic in __new__ with its args; it's hard to add more without # changing that class. So we use a wrapper function to diddle with # FakePath's members. result = fake_path_class(path) result.glob_results = {} for pattern, paths in glob_results.iteritems(): result.glob_results[pattern] = [result / fake_path_class(p) for p in paths] return result class TestBuckPlatformBase(object): def test_glob_includes_simple(self): search_base = self.fake_path( 'foo', glob_results={'*.java': ['A.java', 'B.java']}) self.assertGlobMatches( ['A.java', 'B.java'], glob_internal( includes=['*.java'], excludes=[], include_dotfiles=False, search_base=search_base)) def test_glob_includes_sort(self): search_base = self.fake_path( 'foo', glob_results={'*.java': ['A.java', 'E.java', 'D.java', 'C.java', 'B.java']}) self.assertGlobMatches( ['A.java', 'B.java', 'C.java', 'D.java', 'E.java'], glob_internal( includes=['*.java'], excludes=[], include_dotfiles=False, search_base=search_base)) def test_glob_includes_multi(self): search_base = self.fake_path( 'foo', glob_results={ 'bar/*.java': ['bar/A.java', 'bar/B.java'], 'baz/*.java': ['baz/C.java', 'baz/D.java'], }) self.assertGlobMatches( ['bar/A.java', 'bar/B.java', 'baz/C.java', 'baz/D.java'], glob_internal( includes=['bar/*.java', 'baz/*.java'], excludes=[], include_dotfiles=False, search_base=search_base)) def test_glob_excludes_double_star(self): search_base = self.fake_path( 'foo', glob_results={ '**/*.java': ['A.java', 'B.java', 'Test.java'], }) self.assertGlobMatches( ['A.java', 'B.java'], glob_internal( includes=['**/*.java'], excludes=['**/*Test.java'], include_dotfiles=False, search_base=search_base)) def test_glob_excludes_multi(self): search_base = self.fake_path( 'foo', glob_results={ 'bar/*.java': ['bar/A.java', 'bar/B.java'], 'baz/*.java': ['baz/C.java', 'baz/D.java'], }) self.assertGlobMatches( ['bar/B.java', 'baz/D.java'], glob_internal( includes=['bar/*.java', 'baz/*.java'], excludes=['*/[AC].java'], include_dotfiles=False, search_base=search_base)) def test_subdir_glob(self): build_env = BuildFileContext(None, None, None, None, None, None, None, None) search_base = self.fake_path( 'foo', glob_results={ 'lib/bar/*.h': ['lib/bar/A.h', 'lib/bar/B.h'], 'lib/baz/*.h': ['lib/baz/C.h', 'lib/baz/D.h'], }) self.assertGlobMatches( { 'bar/B.h': 'lib/bar/B.h', 'bar/A.h': 'lib/bar/A.h', 'baz/D.h': 'lib/baz/D.h', 'baz/C.h': 'lib/baz/C.h', }, subdir_glob([ ('lib', 'bar/*.h'), ('lib', 'baz/*.h')], build_env=build_env, search_base=search_base)) def test_subdir_glob_with_prefix(self): build_env = BuildFileContext(None, None, None, None, None, None, None, None) search_base = self.fake_path( 'foo', glob_results={ 'lib/bar/*.h': ['lib/bar/A.h', 'lib/bar/B.h'], }) self.assertGlobMatches( { 'Prefix/bar/B.h': 'lib/bar/B.h', 'Prefix/bar/A.h': 'lib/bar/A.h', }, subdir_glob([('lib', 'bar/*.h')], prefix='Prefix', build_env=build_env, search_base=search_base)) def test_glob_excludes_relative(self): search_base = self.fake_path( 'foo', glob_results={ '**/*.java': ['foo/A.java', 'foo/bar/B.java', 'bar/C.java'], }) self.assertGlobMatches( ['foo/A.java', 'foo/bar/B.java'], glob_internal( includes=['**/*.java'], excludes=['bar/*.java'], include_dotfiles=False, search_base=search_base)) def test_glob_includes_skips_dotfiles(self): search_base = self.fake_path( 'foo', glob_results={'*.java': ['A.java', '.B.java']}) self.assertGlobMatches( ['A.java'], glob_internal( includes=['*.java'], excludes=[], include_dotfiles=False, search_base=search_base)) def test_glob_includes_does_not_skip_dotfiles_if_include_dotfiles(self): search_base = self.fake_path( 'foo', glob_results={'*.java': ['A.java', '.B.java']}) self.assertGlobMatches( ['.B.java', 'A.java'], glob_internal( includes=['*.java'], excludes=[], include_dotfiles=True, search_base=search_base)) def test_lazy_build_env_partial(self): def cobol_binary( name, deps=[], build_env=None): return (name, deps, build_env) testLazy = LazyBuildEnvPartial(cobol_binary) testLazy.build_env = {} self.assertEqual( ('HAL', [1, 2, 3], {}), testLazy.invoke(name='HAL', deps=[1, 2, 3])) testLazy.build_env = {'abc': 789} self.assertEqual( ('HAL', [1, 2, 3], {'abc': 789}), testLazy.invoke(name='HAL', deps=[1, 2, 3])) def test_explicit_exclude_with_file_separator_excludes(self): search_base = self.fake_path( 'foo', glob_results={'java/**/*.java': ['java/Include.java', 'java/Exclude.java']}) self.assertGlobMatches( ['java/Include.java'], glob_internal( includes=['java/**/*.java'], excludes=['java/Exclude.java'], include_dotfiles=False, search_base=search_base)) class TestBuckPosix(TestBuckPlatformBase, unittest.TestCase): @staticmethod def fake_path(*args, **kwargs): return fake_path(FakePosixPath, *args, **kwargs) def assertGlobMatches(self, expected, actual): self.assertEqual(expected, actual) class TestBuckWindows(TestBuckPlatformBase, unittest.TestCase): @staticmethod def fake_path(*args, **kwargs): return fake_path(FakeWindowsPath, *args, **kwargs) def assertGlobMatches(self, expected, actual): # Fix the path separator to make test writing easier fixed_expected = None if isinstance(expected, list): fixed_expected = [] for path in expected: fixed_expected.append(path.replace('/', '\\')) else: fixed_expected = {} for key, value in expected.items(): fixed_expected.update({key.replace('/', '\\'): value.replace('/', '\\')}) self.assertEqual(fixed_expected, actual) class TestBuck(unittest.TestCase): def test_glob_double_star_integration(self): d = tempfile.mkdtemp() try: subdir = os.path.join(d, 'b', 'a', 'c', 'a') os.makedirs(subdir) f = open(os.path.join(subdir, 'A.java'), 'w') f.close() f = open(os.path.join(subdir, 'B.java'), 'w') f.close() f = open(os.path.join(subdir, 'Test.java'), 'w') f.close() f = open(os.path.join(subdir, '.tmp.java'), 'w') f.close() os.makedirs(os.path.join(subdir, 'NotAFile.java')) self.assertEquals( [ os.path.join('b', 'a', 'c', 'a', 'A.java'), os.path.join('b', 'a', 'c', 'a', 'B.java'), ], glob_internal( includes=['b/a/**/*.java'], excludes=['**/*Test.java'], include_dotfiles=False, search_base=Path(d))) finally: shutil.rmtree(d) def test_case_preserved(self): d = tempfile.mkdtemp() try: subdir = os.path.join(d, 'java') os.makedirs(subdir) open(os.path.join(subdir, 'Main.java'), 'w').close() self.assertEquals( [ os.path.join('java', 'Main.java'), ], glob_internal( includes=['java/Main.java'], excludes=[], include_dotfiles=False, search_base=Path(d))) finally: shutil.rmtree(d) def test_watchman_query_params_includes(self): query_params = format_watchman_query_params( ['**/*.java'], [], False, '/path/to/glob') self.assertEquals( { 'relative_root': '/path/to/glob', 'path': [''], 'fields': ['name'], 'expression': [ 'allof', 'exists', ['anyof', ['type', 'f'], ['type', 'l']], ['anyof', ['match', '**/*.java', 'wholename', {}]], ] }, query_params) def test_watchman_query_params_includes_and_excludes(self): query_params = format_watchman_query_params( ['**/*.java'], ['**/*Test.java'], False, '/path/to/glob') self.assertEquals( { 'relative_root': '/path/to/glob', 'path': [''], 'fields': ['name'], 'expression': [ 'allof', 'exists', ['anyof', ['type', 'f'], ['type', 'l']], ['anyof', ['match', '**/*.java', 'wholename', {}]], ['not', ['anyof', ['match', '**/*Test.java', 'wholename', {}]]], ] }, query_params) if __name__ == '__main__': unittest.main()
Learn-Android-app/buck
src/com/facebook/buck/json/buck_test.py
Python
apache-2.0
11,264
0.00071
""" Subspace is a modified implementation of the Kademlia protocol for `Twisted <http://twistedmatrix.com>`_. """ version_info = (0, 2) version = '.'.join(map(str, version_info))
cpacia/Subspace
subspace/__init__.py
Python
mit
179
0.005587
# Copyright 2016 Bridgewater Associates # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ .. module: security_monkey.watchers.rds.rds_security_group :platform: Unix .. version:: $$VERSION$$ .. moduleauthor:: Bridgewater OSS <opensource@bwater.com> """ from security_monkey.decorators import record_exception, iter_account_region from security_monkey.watcher import Watcher from security_monkey.watcher import ChangeItem from security_monkey import app class RDSSecurityGroup(Watcher): index = 'rdssecuritygroup' i_am_singular = 'RDS Security Group' i_am_plural = 'RDS Security Groups' def __init__(self, accounts=None, debug=False): super(RDSSecurityGroup, self).__init__(accounts=accounts, debug=debug) @record_exception() def get_all_dbsecurity_groups(self, **kwargs): from security_monkey.common.sts_connect import connect sgs = [] rds = connect(kwargs['account_name'], 'boto3.rds.client', region=kwargs['region'], assumed_role=kwargs['assumed_role']) marker = None while True: if marker: response = self.wrap_aws_rate_limited_call( rds.describe_db_security_groups, Marker=marker) else: response = self.wrap_aws_rate_limited_call( rds.describe_db_security_groups) sgs.extend(response.get('DBSecurityGroups', [])) if response.get('Marker'): marker = response.get('Marker') else: break return sgs def slurp(self): """ :returns: item_list - list of RDS Security Groups. :returns: exception_map - A dict where the keys are a tuple containing the location of the exception and the value is the actual exception """ self.prep_for_slurp() @iter_account_region(index=self.index, accounts=self.accounts, service_name='rds') def slurp_items(**kwargs): item_list = [] exception_map = {} kwargs['exception_map'] = exception_map app.logger.debug("Checking {}/{}/{}".format(self.index, kwargs['account_name'], kwargs['region'])) sgs = self.get_all_dbsecurity_groups(**kwargs) if sgs: app.logger.debug("Found {} {}".format( len(sgs), self.i_am_plural)) for sg in sgs: name = sg.get('DBSecurityGroupName') if self.check_ignore_list(name): continue vpc_id = None if hasattr(sg, 'VpcId'): vpc_id = sg.get('VpcId') name = "{} (in {})".format(name, vpc_id) item_config = { "name": name, "description": sg.get('DBSecurityGroupDescription'), "owner_id": sg.get('OwnerId'), "region": kwargs['region'], "ec2_groups": [], "ip_ranges": [], "vpc_id": vpc_id } for ipr in sg.get('IPRanges'): ipr_config = { "cidr_ip": ipr.get('CIDRIP'), "status": ipr.get('Status'), } item_config["ip_ranges"].append(ipr_config) item_config["ip_ranges"] = sorted(item_config["ip_ranges"]) for ec2_sg in sg.get('EC2SecurityGroups'): ec2sg_config = { "name": ec2_sg.get('EC2SecurityGroupName'), "owner_id": ec2_sg.get('EC2SecurityGroupOwnerId'), "Status": ec2_sg.get('Status'), } item_config["ec2_groups"].append(ec2sg_config) item_config["ec2_groups"] = sorted( item_config["ec2_groups"]) arn = sg.get('DBSecurityGroupArn') item_config['arn'] = arn item = RDSSecurityGroupItem(region=kwargs['region'], account=kwargs['account_name'], name=name, arn=arn, config=item_config, source_watcher=self) item_list.append(item) return item_list, exception_map return slurp_items() class RDSSecurityGroupItem(ChangeItem): def __init__(self, region=None, account=None, name=None, arn=None, config=None, source_watcher=None): super(RDSSecurityGroupItem, self).__init__( index=RDSSecurityGroup.index, region=region, account=account, name=name, arn=arn, new_config=config if config else {}, source_watcher=source_watcher)
markofu/security_monkey
security_monkey/watchers/rds/rds_security_group.py
Python
apache-2.0
5,590
0.001073
#!/usr/bin/env python # -*- coding: utf-8 -*- __author__ = 'Yue-Wen FANG' __maintainer__ = "Yue-Wen FANG" __email__ = 'fyuewen@gmail.com' __license__ = 'Apache License 2.0' __creation_date__= 'Dec. 28, 2018' """ 9-3. Users: Make a class called User . Create two attributes called first_name and last_name, and then create several other attributes that are typically stored in a user profile . Make a method called describe_user() that prints a summary of the user’s information . Make another method called greet_user() that prints a personalized greeting to the user . Create several instances representing different users, and call both methods for each user .t mv dog.py """ class User: """ a class for User """ def __init__(self, first_name, last_name, gender, age, email='f@cn'): self.name = first_name + last_name self.gender = gender self.age = age self.email = email # if no email is specified, the default will be used def describe_use(self): print('The profile of ' + self.name + ":") print('Gender: ', self.gender) print('Age: ', self.age) print('Email: ', self.email) Tiantian_Li = User('Tiantian', 'Li', 'Male', '20', email='Li@cn') Tiantian_Li.describe_use()
yw-fang/readingnotes
machine-learning/Matthes-crash-course/chapt09/scripts/user_03.py
Python
apache-2.0
1,263
0.009516
# -*- coding: utf-8 -*- ''' Manage VPCs ================= .. versionadded:: 2015.8.0 Create and destroy VPCs. Be aware that this interacts with Amazon's services, and so may incur charges. This module uses ``boto``, which can be installed via package, or pip. This module accepts explicit vpc credentials but can also utilize IAM roles assigned to the instance through Instance Profiles. Dynamic credentials are then automatically obtained from AWS API and no further configuration is necessary. More information available `here <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html>`_. If IAM roles are not used you need to specify them either in a pillar file or in the minion's config file: .. code-block:: yaml vpc.keyid: GKTADJGHEIQSXMKKRBJ08H vpc.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs It's also possible to specify ``key``, ``keyid`` and ``region`` via a profile, either passed in as a dict, or as a string to pull from pillars or minion config: .. code-block:: yaml myprofile: keyid: GKTADJGHEIQSXMKKRBJ08H key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs region: us-east-1 .. code-block:: yaml Ensure VPC exists: boto_vpc.present: - name: myvpc - cidr_block: 10.10.11.0/24 - dns_hostnames: True - region: us-east-1 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs Ensure subnet exists: boto_vpc.subnet_present: - name: mysubnet - vpc_id: vpc-123456 - cidr_block: 10.0.0.0/16 - region: us-east-1 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs Ensure internet gateway exists: boto_vpc.internet_gateway_present: - name: myigw - vpc_name: myvpc - region: us-east-1 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs Ensure route table exists: boto_vpc.route_table_present: - name: my_route_table - vpc_id: vpc-123456 - routes: - destination_cidr_block: 0.0.0.0/0 instance_id: i-123456 interface_id: eni-123456 - subnet_names: - subnet1 - subnet2 - region: us-east-1 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs ''' # Import Python Libs from __future__ import absolute_import import logging # Import Salt Libs import salt.utils.dictupdate as dictupdate log = logging.getLogger(__name__) def __virtual__(): ''' Only load if boto is available. ''' return 'boto_vpc' if 'boto_vpc.exists' in __salt__ else False def present(name, cidr_block, instance_tenancy=None, dns_support=None, dns_hostnames=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Ensure VPC exists. name Name of the VPC. cidr_block The range of IPs in CIDR format, for example: 10.0.0.0/24. Block size must be between /16 and /28 netmask. instance_tenancy Instances launched in this VPC will be ingle-tenant or dedicated hardware. dns_support Indicates whether the DNS resolution is supported for the VPC. dns_hostnames Indicates whether the instances launched in the VPC get DNS hostnames. tags A list of tags. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.exists'](name=name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to create VPC: {0}.'.format(r['error']['message']) return ret if not r.get('exists'): if __opts__['test']: ret['comment'] = 'VPC {0} is set to be created.'.format(name) ret['result'] = None return ret r = __salt__['boto_vpc.create'](cidr_block, instance_tenancy, name, dns_support, dns_hostnames, tags, region, key, keyid, profile) if not r.get('created'): ret['result'] = False ret['comment'] = 'Failed to create VPC: {0}.'.format(r['error']['message']) return ret _describe = __salt__['boto_vpc.describe'](r['id'], region=region, key=key, keyid=keyid, profile=profile) ret['changes']['old'] = {'vpc': None} ret['changes']['new'] = _describe ret['comment'] = 'VPC {0} created.'.format(name) return ret ret['comment'] = 'VPC present.' return ret def absent(name, tags=None, region=None, key=None, keyid=None, profile=None): ''' Ensure VPC with passed properties is absent. name Name of the VPC. tags A list of tags. All tags must match. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.get_id'](name=name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to delete VPC: {0}.'.format(r['error']['message']) return ret _id = r.get('id') if not _id: ret['comment'] = '{0} VPC does not exist.'.format(name) return ret if __opts__['test']: ret['comment'] = 'VPC {0} is set to be removed.'.format(name) ret['result'] = None return ret r = __salt__['boto_vpc.delete'](name=name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if not r['deleted']: ret['result'] = False ret['comment'] = 'Failed to delete VPC: {0}.'.format(r['error']['message']) return ret ret['changes']['old'] = {'vpc': _id} ret['changes']['new'] = {'vpc': None} ret['comment'] = 'VPC {0} deleted.'.format(name) return ret def subnet_present(name, cidr_block, vpc_name=None, vpc_id=None, availability_zone=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Ensure a subnet exists. name Name of the subnet. cidr_block The range if IPs for the subnet, in CIDR format. For example: 10.0.0.0/24. Block size must be between /16 and /28 netmask. vpc_name Name of the VPC in which the subnet should be placed. Either vpc_name or vpc_id must be provided. vpc_id Id of the VPC in which the subnet should be placed. Either vpc_name or vpc_id must be provided. availability_zone AZ in which the subnet should be placed. tags A list of tags. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.subnet_exists'](subnet_name=name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to create subnet: {0}.'.format(r['error']['message']) return ret if not r.get('exists'): if __opts__['test']: ret['comment'] = 'Subnet {0} is set to be created.'.format(name) ret['result'] = None return ret r = __salt__['boto_vpc.create_subnet'](subnet_name=name, cidr_block=cidr_block, availability_zone=availability_zone, vpc_name=vpc_name, vpc_id=vpc_id, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if not r.get('created'): ret['result'] = False ret['comment'] = 'Failed to create subnet: {0}'.format(r['error']['message']) return ret _describe = __salt__['boto_vpc.describe_subnet'](r['id'], region=region, key=key, keyid=keyid, profile=profile) ret['changes']['old'] = {'subnet': None} ret['changes']['new'] = _describe ret['comment'] = 'Subnet {0} created.'.format(name) return ret ret['comment'] = 'Subnet present.' return ret def subnet_absent(name=None, subnet_id=None, region=None, key=None, keyid=None, profile=None): ''' Ensure subnet with passed properties is absent. name Name of the subnet. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.get_resource_id']('subnet', name=name, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to delete subnet: {0}.'.format(r['error']['message']) return ret _id = r.get('id') if not _id: ret['comment'] = '{0} subnet does not exist.'.format(name) return ret if __opts__['test']: ret['comment'] = 'Subnet {0} ({1}) is set to be removed.'.format(name, r['id']) ret['result'] = None return ret r = __salt__['boto_vpc.delete_subnet'](subnet_name=name, region=region, key=key, keyid=keyid, profile=profile) if not r.get('deleted'): ret['result'] = False ret['comment'] = 'Failed to delete subnet: {0}'.format(r['error']['message']) return ret ret['changes']['old'] = {'subnet': _id} ret['changes']['new'] = {'subnet': None} ret['comment'] = 'Subnet {0} deleted.'.format(name) return ret def internet_gateway_present(name, vpc_name=None, vpc_id=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Ensure an internet gateway exists. name Name of the internet gateway. vpc_name Name of the VPC to which the internet gateway should be attached. vpc_id Id of the VPC to which the internet_gateway should be attached. Only one of vpc_name or vpc_id may be provided. tags A list of tags. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.resource_exists']('internet_gateway', name=name, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to create internet gateway: {0}.'.format(r['error']['message']) return ret if not r.get('exists'): if __opts__['test']: ret['comment'] = 'Internet gateway {0} is set to be created.'.format(name) ret['result'] = None return ret r = __salt__['boto_vpc.create_internet_gateway'](internet_gateway_name=name, vpc_name=vpc_name, vpc_id=vpc_id, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if not r.get('created'): ret['result'] = False ret['comment'] = 'Failed to create internet gateway: {0}'.format(r['error']['message']) return ret ret['changes']['old'] = {'internet_gateway': None} ret['changes']['new'] = {'internet_gateway': r['id']} ret['comment'] = 'Internet gateway {0} created.'.format(name) return ret ret['comment'] = 'Internet gateway {0} present.'.format(name) return ret def internet_gateway_absent(name, detach=False, region=None, key=None, keyid=None, profile=None): ''' Ensure the named internet gateway is absent. name Name of the internet gateway. detach First detach the internet gateway from a VPC, if attached. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.get_resource_id']('internet_gateway', name=name, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to delete internet gateway: {0}.'.format(r['error']['message']) return ret igw_id = r['id'] if not igw_id: ret['comment'] = 'Internet gateway {0} does not exist.'.format(name) return ret if __opts__['test']: ret['comment'] = 'Internet gateway {0} is set to be removed.'.format(name) ret['result'] = None return ret r = __salt__['boto_vpc.delete_internet_gateway'](internet_gateway_name=name, detach=detach, region=region, key=key, keyid=keyid, profile=profile) if not r.get('deleted'): ret['result'] = False ret['comment'] = 'Failed to delete internet gateway: {0}.'.format(r['error']['message']) return ret ret['changes']['old'] = {'internet_gateway': igw_id} ret['changes']['new'] = {'internet_gateway': None} ret['comment'] = 'Internet gateway {0} deleted.'.format(name) return ret def route_table_present(name, vpc_name=None, vpc_id=None, routes=None, subnet_ids=None, subnet_names=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Ensure route table with routes exists and is associated to a VPC. Example:: .. code-block:: yaml boto_vpc.route_table_present: - name: my_route_table - vpc_id: vpc-123456 - routes: - destination_cidr_block: 0.0.0.0/0 instance_id: i-123456 interface_id: eni-123456 - subnet_names: - subnet1 - subnet2 name Name of the route table. vpc_name Name of the VPC with which the route table should be associated. vpc_id Id of the VPC with which the route table should be associated. Either vpc_name or vpc_id must be provided. routes A list of routes. subnet_ids A list of subnet ids to associate subnet_names A list of subnet names to associate tags A list of tags. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } _ret = _route_table_present(name=name, vpc_name=vpc_name, vpc_id=vpc_id, tags=tags, region=region, key=key, keyid=keyid, profile=profile) ret['changes'] = _ret['changes'] ret['comment'] = ' '.join([ret['comment'], _ret['comment']]) if not _ret['result']: ret['result'] = _ret['result'] if ret['result'] is False: return ret _ret = _routes_present(route_table_name=name, routes=routes, tags=tags, region=region, key=key, keyid=keyid, profile=profile) ret['changes'] = dictupdate.update(ret['changes'], _ret['changes']) ret['comment'] = ' '.join([ret['comment'], _ret['comment']]) if not _ret['result']: ret['result'] = _ret['result'] if ret['result'] is False: return ret _ret = _subnets_present(route_table_name=name, subnet_ids=subnet_ids, subnet_names=subnet_names, tags=tags, region=region, key=key, keyid=keyid, profile=profile) ret['changes'] = dictupdate.update(ret['changes'], _ret['changes']) ret['comment'] = ' '.join([ret['comment'], _ret['comment']]) if not _ret['result']: ret['result'] = _ret['result'] if ret['result'] is False: return ret return ret def _route_table_present(name, vpc_name=None, vpc_id=None, tags=None, region=None, key=None, keyid=None, profile=None): ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.get_resource_id'](resource='route_table', name=name, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to create route table: {0}.'.format(r['error']['message']) return ret _id = r.get('id') if not _id: if __opts__['test']: msg = 'Route table {0} is set to be created.'.format(name) ret['comment'] = msg ret['result'] = None return ret r = __salt__['boto_vpc.create_route_table'](route_table_name=name, vpc_name=vpc_name, vpc_id=vpc_id, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if not r.get('created'): ret['result'] = False ret['comment'] = 'Failed to create route table: {0}.'.format(r['error']['message']) return ret ret['changes']['old'] = {'route_table': None} ret['changes']['new'] = {'route_table': r['id']} ret['comment'] = 'Route table {0} created.'.format(name) return ret ret['comment'] = 'Route table {0} ({1}) present.'.format(name, _id) return ret def _routes_present(route_table_name, routes, tags=None, region=None, key=None, keyid=None, profile=None): ret = {'name': route_table_name, 'result': True, 'comment': '', 'changes': {} } route_table = __salt__['boto_vpc.describe_route_table'](route_table_name=route_table_name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if 'error' in route_table: msg = 'Could not retrieve configuration for route table {0}: {1}`.'.format(route_table_name, route_table['error']['message']) ret['comment'] = msg ret['result'] = False return ret _routes = [] if routes: route_keys = ['gateway_id', 'instance_id', 'destination_cidr_block', 'interface_id'] for i in routes: _r = dict((k, i.get(k)) for k in route_keys) if i.get('internet_gateway_name'): r = __salt__['boto_vpc.get_resource_id']('internet_gateway', name=i['internet_gateway_name'], region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: msg = 'Error looking up id for internet gateway {0}: {1}'.format(i.get('internet_gateway_name'), r['error']['message']) ret['comment'] = msg ret['result'] = False return ret if r['id'] is None: msg = 'Internet gateway {0} does not exist.'.format(i) ret['comment'] = msg ret['result'] = False return ret _r['gateway_id'] = r['id'] _routes.append(_r) to_delete = [] to_create = [] for route in _routes: if route not in route_table['routes']: to_create.append(dict(route)) for route in route_table['routes']: if route not in _routes: if route['gateway_id'] != 'local': to_delete.append(route) if to_create or to_delete: if __opts__['test']: msg = 'Route table {0} set to have routes modified.'.format(route_table_name) ret['comment'] = msg ret['result'] = None return ret if to_delete: for r in to_delete: res = __salt__['boto_vpc.delete_route'](route_table_id=route_table['id'], destination_cidr_block=r['destination_cidr_block'], region=region, key=key, keyid=keyid, profile=profile) if not res['deleted']: msg = 'Failed to delete route {0} from route table {1}: {2}.'.format(r['destination_cidr_block'], route_table_name, res['error']['message']) ret['comment'] = msg ret['result'] = False return ret ret['comment'] = 'Deleted route {0} from route table {1}.'.format(r['destination_cidr_block'], route_table_name) if to_create: for r in to_create: res = __salt__['boto_vpc.create_route'](route_table_id=route_table['id'], region=region, key=key, keyid=keyid, profile=profile, **r) if not res['created']: msg = 'Failed to create route {0} in route table {1}: {2}.'.format(r['destination_cidr_block'], route_table_name, res['error']['message']) ret['comment'] = msg ret['result'] = False return ret ret['comment'] = 'Created route {0} in route table {1}.'.format(r['destination_cidr_block'], route_table_name) ret['changes']['old'] = {'routes': route_table['routes']} route = __salt__['boto_vpc.describe_route_table'](route_table_name=route_table_name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) ret['changes']['new'] = {'routes': route['routes']} return ret def _subnets_present(route_table_name, subnet_ids=None, subnet_names=None, tags=None, region=None, key=None, keyid=None, profile=None): ret = {'name': route_table_name, 'result': True, 'comment': '', 'changes': {} } if not subnet_ids: subnet_ids = [] # Look up subnet ids if subnet_names: for i in subnet_names: r = __salt__['boto_vpc.get_resource_id']('subnet', name=i, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: msg = 'Error looking up subnet ids: {0}'.format(r['error']['message']) ret['comment'] = msg ret['result'] = False return ret if r['id'] is None: msg = 'Subnet {0} does not exist.'.format(i) ret['comment'] = msg ret['result'] = False return ret subnet_ids.append(r['id']) # Describe routing table route_table = __salt__['boto_vpc.describe_route_table'](route_table_name=route_table_name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if not route_table: msg = 'Could not retrieve configuration for route table {0}.'.format(route_table_name) ret['comment'] = msg ret['result'] = False return ret assoc_ids = [x['subnet_id'] for x in route_table['associations']] to_create = [x for x in subnet_ids if x not in assoc_ids] to_delete = [] for x in route_table['associations']: # Don't remove the main route table association if x['subnet_id'] not in subnet_ids and x['subnet_id'] is not None: to_delete.append(x['id']) if to_create or to_delete: if __opts__['test']: msg = 'Subnet associations for route table {0} set to be modified.'.format(route_table_name) ret['comment'] = msg ret['result'] = None return ret if to_delete: for r_asc in to_delete: r = __salt__['boto_vpc.disassociate_route_table'](r_asc, region, key, keyid, profile) if 'error' in r: msg = 'Failed to dissociate {0} from route table {1}: {2}.'.format(r_asc, route_table_name, r['error']['message']) ret['comment'] = msg ret['result'] = False return ret ret['comment'] = 'Dissociated subnet {0} from route table {1}.'.format(r_asc, route_table_name) if to_create: for sn in to_create: r = __salt__['boto_vpc.associate_route_table'](route_table_id=route_table['id'], subnet_id=sn, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: msg = 'Failed to associate subnet {0} with route table {1}: {2}.'.format(sn, route_table_name, r['error']['message']) ret['comment'] = msg ret['result'] = False return ret ret['comment'] = 'Associated subnet {0} with route table {1}.'.format(sn, route_table_name) ret['changes']['old'] = {'subnets_associations': route_table['associations']} new_sub = __salt__['boto_vpc.describe_route_table'](route_table_name=route_table_name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) ret['changes']['new'] = {'subnets_associations': new_sub['associations']} return ret def route_table_absent(name, region=None, key=None, keyid=None, profile=None): ''' Ensure the named route table is absent. name Name of the route table. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.get_resource_id']('route_table', name=name, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = r['error']['message'] return ret rtbl_id = r['id'] if not rtbl_id: ret['comment'] = 'Route table {0} does not exist.'.format(name) return ret if __opts__['test']: ret['comment'] = 'Route table {0} is set to be removed.'.format(name) ret['result'] = None return ret r = __salt__['boto_vpc.delete_route_table'](route_table_name=name, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to delete route table: {0}'.format(r['error']['message']) return ret ret['changes']['old'] = {'route_table': rtbl_id} ret['changes']['new'] = {'route_table': None} ret['comment'] = 'Route table {0} deleted.'.format(name) return ret
smallyear/linuxLearn
salt/salt/states/boto_vpc.py
Python
apache-2.0
30,718
0.002637
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from common.chrome_proxy_benchmark import ChromeProxyBenchmark from integration_tests import chrome_proxy_measurements as measurements from integration_tests import chrome_proxy_pagesets as pagesets from telemetry import benchmark NON_SAFE_BROWSING_BROWSERS = ['mac', 'linux', 'win', 'chromeos', 'android-webview', 'android-webview-shell'] class ChromeProxyClientVersion(ChromeProxyBenchmark): tag = 'client_version' test = measurements.ChromeProxyClientVersion page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.client_version.synthetic' class ChromeProxyClientType(ChromeProxyBenchmark): tag = 'client_type' test = measurements.ChromeProxyClientType page_set = pagesets.ClientTypePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.client_type.client_type' class ChromeProxyLoFi(ChromeProxyBenchmark): tag = 'lo_fi' test = measurements.ChromeProxyLoFi page_set = pagesets.LoFiPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.lo_fi.lo_fi' class ChromeProxyExpDirective(ChromeProxyBenchmark): tag = 'exp_directive' test = measurements.ChromeProxyExpDirective page_set = pagesets.ExpDirectivePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.exp_directive.exp_directive' class ChromeProxyPassThrough(ChromeProxyBenchmark): tag = 'pass_through' test = measurements.ChromeProxyPassThrough page_set = pagesets.PassThroughPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.pass_through.pass_through' class ChromeProxyBypass(ChromeProxyBenchmark): tag = 'bypass' test = measurements.ChromeProxyBypass page_set = pagesets.BypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.bypass.bypass' class ChromeProxyCorsBypass(ChromeProxyBenchmark): tag = 'bypass' test = measurements.ChromeProxyCorsBypass page_set = pagesets.CorsBypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.bypass.corsbypass' class ChromeProxyBlockOnce(ChromeProxyBenchmark): tag = 'block_once' test = measurements.ChromeProxyBlockOnce page_set = pagesets.BlockOncePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.block_once.block_once' @benchmark.Disabled(*NON_SAFE_BROWSING_BROWSERS) # Safebrowsing is enabled for Android and iOS. class ChromeProxySafeBrowsingOn(ChromeProxyBenchmark): tag = 'safebrowsing_on' test = measurements.ChromeProxySafebrowsingOn # Override CreateStorySet so that we can instantiate SafebrowsingPageSet # with a non default param. def CreateStorySet(self, options): del options # unused return pagesets.SafebrowsingPageSet(expect_timeout=True) @classmethod def Name(cls): return 'chrome_proxy_benchmark.safebrowsing_on.safebrowsing' @benchmark.Enabled(*NON_SAFE_BROWSING_BROWSERS) # Safebrowsing is switched off for Android Webview and all desktop platforms. class ChromeProxySafeBrowsingOff(ChromeProxyBenchmark): tag = 'safebrowsing_off' test = measurements.ChromeProxySafebrowsingOff page_set = pagesets.SafebrowsingPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.safebrowsing_off.safebrowsing' class ChromeProxyHTTPFallbackProbeURL(ChromeProxyBenchmark): tag = 'fallback_probe' test = measurements.ChromeProxyHTTPFallbackProbeURL page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.fallback_probe.synthetic' class ChromeProxyHTTPFallbackViaHeader(ChromeProxyBenchmark): tag = 'fallback_viaheader' test = measurements.ChromeProxyHTTPFallbackViaHeader page_set = pagesets.FallbackViaHeaderPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.fallback_viaheader.fallback_viaheader' class ChromeProxyHTTPToDirectFallback(ChromeProxyBenchmark): tag = 'http_to_direct_fallback' test = measurements.ChromeProxyHTTPToDirectFallback page_set = pagesets.HTTPToDirectFallbackPageSet @classmethod def Name(cls): return ('chrome_proxy_benchmark.http_to_direct_fallback.' 'http_to_direct_fallback') class ChromeProxyReenableAfterBypass(ChromeProxyBenchmark): tag = 'reenable_after_bypass' test = measurements.ChromeProxyReenableAfterBypass page_set = pagesets.ReenableAfterBypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.reenable_after_bypass.reenable_after_bypass' class ChromeProxySmoke(ChromeProxyBenchmark): tag = 'smoke' test = measurements.ChromeProxySmoke page_set = pagesets.SmokePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.smoke.smoke' class ChromeProxyClientConfig(ChromeProxyBenchmark): tag = 'client_config' test = measurements.ChromeProxyClientConfig page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.client_config.synthetic' @benchmark.Enabled('desktop') class ChromeProxyVideoDirect(benchmark.Benchmark): tag = 'video' test = measurements.ChromeProxyVideoValidation page_set = pagesets.VideoDirectPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.direct' @benchmark.Enabled('desktop') class ChromeProxyVideoProxied(benchmark.Benchmark): tag = 'video' test = measurements.ChromeProxyVideoValidation page_set = pagesets.VideoProxiedPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.proxied' @benchmark.Enabled('desktop') class ChromeProxyVideoCompare(benchmark.Benchmark): """Comparison of direct and proxied video fetches. This benchmark runs the ChromeProxyVideoDirect and ChromeProxyVideoProxied benchmarks, then compares their results. """ tag = 'video' test = measurements.ChromeProxyVideoValidation page_set = pagesets.VideoComparePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.compare' @benchmark.Enabled('desktop') class ChromeProxyVideoFrames(benchmark.Benchmark): """Check for video frames similar to original video.""" tag = 'video' test = measurements.ChromeProxyInstrumentedVideoValidation page_set = pagesets.VideoFramePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.frames' @benchmark.Enabled('desktop') class ChromeProxyVideoAudio(benchmark.Benchmark): """Check that audio is similar to original video.""" tag = 'video' test = measurements.ChromeProxyInstrumentedVideoValidation page_set = pagesets.VideoAudioPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.audio'
SaschaMester/delicium
tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
Python
bsd-3-clause
6,825
0.016703
#!/usr/bin/env python # ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2015, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ Plot sequence prediction & perturbation experiment result """ import os from matplotlib import pyplot as plt import matplotlib as mpl from plot import plotAccuracy from plot import computeAccuracy from plot import readExperiment mpl.rcParams['pdf.fonttype'] = 42 plt.ion() plt.close('all') if __name__ == '__main__': experiments = [] experiments.append(os.path.join("tdnn/results", "high-order-distributed-random-perturbed-long-window/seed0.0learning_window3000.0", "0.log")) experiments.append(os.path.join("tm/results", "high-order-distributed-random-perturbed/seed0.0", "0.log")) experiments.append(os.path.join("lstm/results", "high-order-distributed-random-perturbed", "seed0.0learning_window3000.0", "0.log")) # experiments.append(os.path.join("tdnn/results", # "high-order-distributed-random-perturbed/seed0.0learning_window3000.0", # "0.log")) # experiments.append(os.path.join("tdnn/results", # "high-order-distributed-random-perturbed-short-window/seed0.0learning_window3000.0", # "0.log")) for experiment in experiments: data = readExperiment(experiment) (accuracy, x) = computeAccuracy(data['predictions'], data['truths'], data['iterations'], resets=data['resets'], randoms=data['randoms']) # perturbAt = data['sequenceCounter'][10000] plotAccuracy((accuracy, x), data['trains'], window=200, type=type, label='NoiseExperiment', hideTraining=True, lineSize=1.0) # plt.xlim([1200, 1750]) plt.xlabel('# of sequences seen') plt.axvline(x=10000, color='k') plt.legend(['HTM', 'LSTM-3000', 'TDNN'], loc=4) plt.savefig('./result/model_performance_high_order_prediction.pdf')
ThomasMiconi/nupic.research
projects/sequence_prediction/discrete_sequences/plotPerturbExperiment.py
Python
agpl-3.0
3,263
0.005516
#! /usr/bin/python3 import sys import argparse import itertools from PIL import Image if __name__ == '__main__': argparser = argparse.ArgumentParser() argparser.add_argument('input') argparser.add_argument('palette',type=argparse.FileType('wb')) argparser.add_argument('pattern',type=argparse.FileType('wb')) argparser.add_argument('name',type=argparse.FileType('wb')) argparser.add_argument('attribute',type=argparse.FileType('wb')) argparser.add_argument('-O','--optimize',action='store_true') args = argparser.parse_args() image = Image.open(args.input) palette = set() tiles = set() attribute_colors = [] for ay in range(15): attribute_colors.append([]) for ax in range(16): colors = set() for ny in [ay*2+i for i in range(2)]: for nx in [ax*2+i for i in range(2)]: for py in [ny*8+i for i in range(8)]: for px in [nx*8+i for i in range(8)]: pixel = image.getpixel((px,py)) colors.add(pixel) if len(colors) > 4: print('Too many colors in attribute {},{}'.format(ax,ay)) exit(1) palette.add(frozenset(colors)) attribute_colors[ay].append(frozenset(colors)) final_palette = set() for line in palette: small = False for other_line in palette: if other_line > line: small = True break if not small: final_palette.add(line) palette = final_palette if len(palette) > 4: print('Too many colors in image, {} sets'.format(len(final_palette))) for line in palette: print(','.join(map(hex,sorted(list(line))))) exit(1) bgs = set() for line in palette: for color in line: valid = True for other_line in palette: if color not in other_line and len(other_line) >= 4: valid = False break if valid: bgs.add(color) if len(bgs) < 1: print('No shared background color!') exit(1) raw_palette = [] bg = sorted(list(bgs))[0] for line in palette: raw = [bg] for color in sorted(list(line)): if color not in raw: raw.append(color) raw_palette.append(raw) lines_p = tuple([tuple(map(lambda i: (bg,*i),itertools.permutations(tuple(line[1:])))) for line in raw_palette]) palette_options = tuple(itertools.product(*lines_p)) pattern_options = [] tile_maps = [] attribute_tables = [] i = 0 for palette_option in palette_options: patterns = set() tile_map = [[None for j in range(32)] for i in range(30)] attribute_table = [] for ay in range(15): attribute_row = [] for ax in range(16): colors = None for line in palette_option: if set(line) >= attribute_colors[ay][ax]: colors = line break attribute_row.append(palette_option.index(colors)) for ny in [ay*2+i for i in range(2)]: for nx in [ax*2+i for i in range(2)]: tile = [] for py in [ny*8+i for i in range(8)]: row = [] for px in [nx*8+i for i in range(8)]: pixel = image.getpixel((px,py)) row.append(colors.index(pixel)) tile.append(tuple(row)) patterns.add(tuple(tile)) tile_map[ny][nx] = tuple(tile) attribute_table.append(attribute_row) print('{} of {}: maybe {} tiles...'.format(i,len(palette_options),len(patterns)).ljust(40), end='\r') i += 1 pattern_options.append(patterns) tile_maps.append(tile_map) attribute_tables.append(attribute_table) if not args.optimize and len(patterns) <= 256: break best_index = pattern_options.index(sorted(pattern_options,key=len)[0]) print() print('{} tiles'.format(len(pattern_options[best_index]))) for l in range(3,-1,-1): print(l) if l < len(palette_options[best_index]): line = palette_options[best_index][l] if len(line) < 4: line += tuple([bg for i in range(4-len(line))]) args.palette.write(bytes(line[::-1])) else: args.palette.write(bytes([bg for i in range(4)])) pattern_table = tuple(pattern_options[best_index]) for row in tile_maps[best_index]: for tile in row: args.name.write(bytes([pattern_table.index(tile)])) for tile in pattern_table: for plane in range(2): for row in tile: bits = 0 for pixel in row: bits <<= 1 if pixel & (plane+1): bits |= 1 args.pattern.write(bytes([bits])) print(len(tile_maps[best_index]),len(tile_maps[best_index][0])) for ay in range(8): for ax in range(8): bits = attribute_tables[best_index][ay*2][ax*2] bits |= attribute_tables[best_index][ay*2][ax*2+1] << 2 if ay < 7: bits |= attribute_tables[best_index][ay*2+1][ax*2] << 4 bits |= attribute_tables[best_index][ay*2+1][ax*2+1] << 6 args.attribute.write(bytes([bits]))
DrKylstein/gfxtools
nesimage.py
Python
mit
5,828
0.007378
# -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from helpers import unittest import luigi import namespace_test_helper # declares another Foo in namespace mynamespace class Foo(luigi.Task): pass class FooSubclass(Foo): pass class TestNamespacing(unittest.TestCase): def test_vanilla(self): self.assertEqual(Foo.task_namespace, None) self.assertEqual(Foo.task_family, "Foo") self.assertEqual(str(Foo()), "Foo()") self.assertEqual(FooSubclass.task_namespace, None) self.assertEqual(FooSubclass.task_family, "FooSubclass") self.assertEqual(str(FooSubclass()), "FooSubclass()") def test_namespace(self): self.assertEqual(namespace_test_helper.Foo.task_namespace, "mynamespace") self.assertEqual(namespace_test_helper.Foo.task_family, "mynamespace.Foo") self.assertEqual(str(namespace_test_helper.Foo(1)), "mynamespace.Foo(p=1)") self.assertEqual(namespace_test_helper.Bar.task_namespace, "othernamespace") self.assertEqual(namespace_test_helper.Bar.task_family, "othernamespace.Bar") self.assertEqual(str(namespace_test_helper.Bar(1)), "othernamespace.Bar(p=1)")
ivannotes/luigi
test/namespace_test.py
Python
apache-2.0
1,739
0.00345
import win32api import os import sys import subprocess import logging from itertools import izip_longest #itertools recipe def grouper(n, iterable, fillvalue=None): "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n return izip_longest(fillvalue=fillvalue, *args) def harddrive_enumerator(): """ Generator to get all (fixed) drive letters in the computers Returns tuples of (DriveName, VolumeName) - eg. ("D:", "Samsung Station") """ logger = logging.getLogger("keepitup") drivesDetailedList = [] if sys.platform == "win32": logger.debug("Enumerating win32 hard drives") getDrivesProc = subprocess.Popen('wmic logicaldisk where drivetype=3 get name, VolumeName /format:list', shell=True, stdout=subprocess.PIPE) output, err = getDrivesProc.communicate() logger.debug("Enumerated hard drives output: %s", output) drivesDetailedList = output.split(os.linesep) elif sys.platform in ["linux2", "darwin"]: logger.debug("Enumerating linux/osx hard drives") raise NotImplementedError() else: logger.error("Cannot enumeratre hard drives - unrecognized OS: %s", sys.platform) raise NotImplementedError() for name, volumeName in grouper(2, drivesDetailedList): if "Name=" in name and "VolumeName" in volumeName: name = name[len("Name="):].strip() volumeName = volumeName[len("VolumeName="):].strip() yield name, volumeName
yoavfrancis/KeepItUp
KeepItUp/harddrive_enumerator.py
Python
mit
1,589
0.003776
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for pdeo.database.sql""" import unittest # import responses from pdeo.databases import sql # if version_info[0] == 2: # utf8 for python2 # from codecs import open class PdeoDatabaseSqlTestCase(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testEntryPoints(self): sql.Database
oczkers/pdeo
tests/databases/test_sql.py
Python
gpl-3.0
407
0
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from enum import Enum class TagUpdateOperation(str, Enum): merge = "Merge" replace = "Replace" delete = "Delete"
yugangw-msft/azure-cli
src/azure-cli/azure/cli/command_modules/resource/parameters.py
Python
mit
474
0.00211
# -*- coding: utf-8 -*- '''twoq ordering mixins''' from threading import local from itertools import product, groupby from random import choice, shuffle, sample from twoq.support import zip_longest, imap class RandomMixin(local): '''random mixin''' def choice(self): '''random choice of/from incoming things''' with self._context(): return self._append(choice(list(self._iterable))) def sample(self, n): ''' random sampling drawn from `n` incoming things @param n: number of incoming things ''' with self._context(): return self._xtend(sample(list(self._iterable), n)) def shuffle(self): '''randomly order incoming things''' with self._context(): iterable = list(self._iterable) shuffle(iterable) return self._xtend(iterable) class OrderMixin(local): '''order mixin''' def group(self): ''' group incoming things, optionally using current call for key function ''' call_, list_ = self._call, list with self._context(): return self._xtend(imap( lambda x: [x[0], list_(x[1])], groupby(self._iterable, call_) )) def grouper(self, n, fill=None): ''' split incoming things into sequences of length `n`, using `fill` thing to pad incomplete sequences @param n: number of things @param fill: fill thing (default: None) ''' with self._context(): return self._xtend( zip_longest(fillvalue=fill, *[iter(self._iterable)] * n) ) def reverse(self): '''reverse order of incoming things''' with self._context(): return self._xtend(reversed(list(self._iterable))) def sort(self): ''' sort incoming things, optionally using current call as key function ''' call_ = self._call with self._context(): return self._xtend(sorted(self._iterable, key=call_)) class CombineMixin(local): '''combination mixin''' def product(self, n=1): ''' nested for each loops repeated `n` times @param n: number of repetitions (default: 1) ''' with self._context(): return self._xtend(product(*self._iterable, repeat=n)) class OrderingMixin(OrderMixin, RandomMixin, CombineMixin): '''ordering mixin'''
lcrees/twoq
twoq/ordering.py
Python
bsd-3-clause
2,474
0
#!/usr/bin/env python # This file is part of VoltDB. # Copyright (C) 2008-2017 VoltDB Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. from NotANormalizer import NotANormalizer from SQLCoverageReport import generate_html_reports def safecmp(x, y): """Calls the 'standard' safecmp function, which performs a comparison similar to cmp, including iterating over lists, but two None values are considered equal, and a TypeError is avoided when a None value and a datetime are corresponding members of a list. """ return NotANormalizer.safecmp(x,y) def normalize(table, sql): """Do nothing other than returning the table. """ return NotANormalizer.normalize(table, sql) def compare_results(suite, seed, statements_path, hsql_path, jni_path, output_dir, report_invalid, report_all, extra_stats, comparison_database, modified_sql_path, max_mismatches=0, within_minutes=0): """Just calls SQLCoverageReport.generate_html_reports(...). """ return generate_html_reports(suite, seed, statements_path, hsql_path, jni_path, output_dir, report_invalid, report_all, extra_stats, comparison_database, modified_sql_path, max_mismatches, within_minutes, cntonly=True)
deerwalk/voltdb
tests/sqlcoverage/normalizer/not-a-normalizer.py
Python
agpl-3.0
2,364
0.002538
#!/usr/bin/python import getopt import sys import pymqi, CMQC, CMQCFC STATE_OK = 0 STATE_WARNING = 1 STATE_CRITICAL = 2 STATE_UNKNOWN = 3 def usage(): print """Usage: rbh_check_mq_channel_status -H <HostName> -g <QMGRName> -p <PortNumber> -a <ChannelName for connection> -t <ChannelName for test>""" def show_help(): usage() print """ Checks MQ channel status -H, --host Host name -g, --qmgr Queue Manager Name -p, --port-number port number (default 1414) -a, --channel-name-conn channel name for connection -t, --channel-name channel name for test example: rbh_check_mq_channel_status.py -H host1 -g QM1 -a SYSTEM.ADMIN.SVRCONN -t nameofthechannel """ def exit_with_state(exit_code): global qmgr try: qmgr.disconnect() except: pass sys.exit(exit_code) def main(): try: opts, args = getopt.getopt(sys.argv[1:], "hH:g:p:a:t:", ["help", "host","qmgr=","port=","channel-name=","channel-name-conn="]) except getopt.GetoptError, err: print str(err) # will print something like "option -a not recognized" usage() sys.exit(2) hostName=None qmgrName=None portNumber=1414 channelNameConn=None channelNameTest=None for o, a in opts: if o in ("-h", "--help"): show_help() sys.exit() elif o in ("-H", "--host"): hostName = a elif o in ("-g", "--qmgr"): qmgrName = a elif o in ("-p", "--port"): portNumber = int(a) elif o in ("-a", "--channel-name-conn"): channelNameConn = a elif o in ("-t", "--channel-name"): channelNameTest = a else: assert False, "unhandled option" if not (hostName and portNumber and channelNameTest and qmgrName and channelNameConn): usage() exit_with_state(STATE_UNKNOWN) # if len(channelNameConn) > MQ_CHANNEL_NAME_LENGTH: # print "UNKNOWN - Channel name are too long." conn_info="%s(%s)" % (hostName,portNumber) global qmgr try: qmgr = pymqi.connect(qmgrName,channelNameConn,conn_info) except pymqi.MQMIError, e: print "UNKNOWN - unable to connect to Qmanager, reason: %s" % (e) exit_with_state(STATE_UNKNOWN) channel_name = '' try: pcf = pymqi.PCFExecute(qmgr) channel_names = pcf.MQCMD_INQUIRE_CHANNEL({CMQCFC.MQCACH_CHANNEL_NAME: channelNameTest}) if channel_names[0]: channel_name = channel_names[0][CMQCFC.MQCACH_CHANNEL_NAME].rstrip() channel_type = channel_names[0][CMQCFC.MQIACH_CHANNEL_TYPE] else: print("CRITICAL - Channel %s does not exists." % (channelNameTest)) exit_with_state(STATE_UNKNOWN) except pymqi.MQMIError,e : print("UNKNOWN - Can not list MQ channels. reason: %s" % (e)) exit_with_state(STATE_UNKNOWN) status_available = True try: attrs = "MQCACH_CHANNEL_NAME MQIACH_BYTES_RCVD MQIACH_BYTES_SENT" pcf = pymqi.PCFExecute(qmgr) channels = pcf.MQCMD_INQUIRE_CHANNEL_STATUS({CMQCFC.MQCACH_CHANNEL_NAME: channelNameTest}) except pymqi.MQMIError, e: if e.comp == CMQC.MQCC_FAILED and e.reason == CMQCFC.MQRCCF_CHL_STATUS_NOT_FOUND: status_available = False pass else: print "UNKNOWN - Can not get status information, reason: %s" % (e) exit_with_state(STATE_UNKNOWN) infomsg = {CMQCFC.MQCHS_INACTIVE:"Channel is inactive", CMQCFC.MQCHS_BINDING:"Channel is negotiating with the partner.", CMQCFC.MQCHS_STARTING:"Channel is waiting to become active.", CMQCFC.MQCHS_RUNNING:"Channel is transferring or waiting for messages.", CMQCFC.MQCHS_PAUSED:"Channel is paused.", CMQCFC.MQCHS_STOPPING:"Channel is in process of stopping.", CMQCFC.MQCHS_RETRYING:"Channel is reattempting to establish connection.", CMQCFC.MQCHS_STOPPED:"Channel is stopped.", CMQCFC.MQCHS_REQUESTING:"Requester channel is requesting connection.", CMQCFC.MQCHS_INITIALIZING:"Channel is initializing."} if status_available: status = channels[0][CMQCFC.MQIACH_CHANNEL_STATUS] msg = "Channel: %s state is %s (%s)" % (channel_name,status,infomsg[status]) if (status == CMQCFC.MQCHS_RUNNING or (status == CMQCFC.MQCHS_INACTIVE and not channel_type in (CMQC.MQCHT_REQUESTER,CMQC.MQCHT_CLUSSDR))): print("OK - %s" % (msg)) exit_with_state(STATE_OK) if status in (CMQCFC.MQCHS_PAUSED,CMQCFC.MQCHS_STOPPED): print("CRITICAL - %s" % (msg)) exit_with_state(STATE_CRITICAL) else: print("WARNING - %s" % (msg)) exit_with_state(STATE_WARNING) else: if channel_type in (CMQC.MQCHT_REQUESTER,CMQC.MQCHT_CLUSSDR): print("CRITICAL - Channel %s is defined, but status is not available. As this channel is defined as CLUSDR or REQUESTER type channel, therefore it should be running." % (channelNameTest)) exit_with_state(STATE_CRITICAL) else: print("OK - Channel %s is defined, but status is not available. This may indicate that the channel has not been used." % (channelNameTest)) exit_with_state(STATE_OK) if __name__ == "__main__": main()
klapper/nagios-plugins-mq
check_mq_channel.py
Python
mit
5,483
0.010213
import time from PyQt4 import QtGui, QtCore, QtOpenGL from PyQt4.QtOpenGL import QGLWidget import OpenGL.GL as gl import OpenGL.arrays.vbo as glvbo import numpy as np import raster import slider import draw_texture import qt_helpers raster_width = 1024 raster_height = 64 raster_n_neurons = 64 spikes_per_frame = 5 class GLPlotWidget(QGLWidget): # default window size width, height = 600, 600 t_last_msg = time.time() spike_count = 0 last_time = None def initializeGL(self): # program for drawing spikes self.raster = raster.RasterProgram(raster_width, raster_height, raster_n_neurons) self.raster.link() # program for fading sparkleplot self.slider = slider.SlideProgram(raster_width, raster_height) self.slider.link() # program for rendering a texture on the screen self.draw_texture = draw_texture.DrawTextureProgram() self.draw_texture.link() def paintGL(self): now = time.time() if self.last_time is None: decay = 0.0 self.dt = None else: dt = now - self.last_time if self.dt is None: self.dt = dt else: #self.dt = dt self.dt = (0.9) * self.dt + (0.1) * dt self.last_time = now if self.dt is not None: self.slider.swap_frame_buffer(int(self.dt/0.001)) self.slider.paint_slid() #data = self.data data = np.random.randint(raster_n_neurons, size=spikes_per_frame).astype('int32') # generate spike data self.spike_count += len(data) # paint the spikes onto the sparkle plot self.slider.swap_frame_buffer(0, False) self.raster.paint_spikes(data) # switch to rendering on the screen gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0) gl.glViewport(0, 0, self.width, self.height) # draw the sparkle plot on the screen self.draw_texture.paint(self.slider.get_current_texture()) # print out spike rate now = time.time() if now > self.t_last_msg + 1: dt = now - self.t_last_msg rate = self.spike_count * 0.000001 / dt print 'Mspikes per second = %g' % rate self.spike_count = 0 self.t_last_msg = now # flag a redraw self.update() def resizeGL(self, width, height): """Called upon window resizing: reinitialize the viewport.""" # update the window size self.width, self.height = width, height # paint within the whole window gl.glViewport(0, 0, width, height) if __name__ == '__main__': # define a Qt window with an OpenGL widget inside it class TestWindow(QtGui.QMainWindow): def __init__(self): super(TestWindow, self).__init__() # initialize the GL widget self.widget = GLPlotWidget() # put the window at the screen position (100, 100) self.setGeometry(100, 100, self.widget.width, self.widget.height) self.setCentralWidget(self.widget) self.show() # show the window win = qt_helpers.create_window(TestWindow)
tcstewar/opengl_texture_rendering
sparkle/main_raster.py
Python
gpl-2.0
3,315
0.00181
# -*- coding: utf-8 -*- ## ## ## This file is part of Indico. ## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN). ## ## Indico is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 3 of the ## License, or (at your option) any later version. ## ## Indico is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Indico;if not, see <http://www.gnu.org/licenses/>. """ Some utils for unit tests """ # system imports from flask import session from functools import wraps import unittest import new import contextlib # indico imports from indico.util.contextManager import ContextManager from indico.util.fossilize import clearCache from indico.util.i18n import setLocale # indico legacy imports from MaKaC.common.logger import Logger from indico.web.flask.app import make_app loadedFeatures = [] class FeatureLoadingObject(object): def __init__(self): self._activeFeatures = [] def _configFeature(self, ftr, obj): global loadedFeatures if type(ftr) == str: modName, ftrName = ftr.split('.') ftrClsName = "%s_Feature" % ftrName mod = __import__('indico.tests.python.unit.%s' % modName, globals(), locals(), [ftrClsName]) ftr = mod.__dict__[ftrClsName] else: pass for name, func in ftr.__dict__.iteritems(): if name.startswith('_action_'): setattr(obj, name[7:], new.instancemethod(func, obj, obj.__class__)) elif name.startswith('_context_'): setattr(obj, name, new.instancemethod(func, obj, obj.__class__)) ftrObj = ftr() if ftr not in loadedFeatures: ftrObj.start(obj) loadedFeatures.append(ftr) return ftrObj def _configFeatures(self, obj): # process requirements for ftr in self._requires: ftrObj = self._configFeature(ftr, obj) self._activeFeatures.append(ftrObj) def _unconfigFeatures(self, obj): global loadedFeatures for ftr in self._activeFeatures[::-1]: ftr.destroy(obj) del loadedFeatures[:] del self._activeFeatures[:] class IndicoTestFeature(FeatureLoadingObject): _requires = [] def start(self, obj): self._configFeatures(obj) def destroy(self, obj): self._unconfigFeatures(obj) def with_context(context): """ Decorator """ def wrapper(method): @wraps(method) def testWrapped(self, *args, **kwargs): with self._context(context): return method(self, *args, **kwargs) return testWrapped return wrapper class ContextManager_Feature(IndicoTestFeature): """ Creates a context manager """ def start(self, obj): super(ContextManager_Feature, self).start(obj) # create the context ContextManager.destroy() def destroy(self, obj): super(ContextManager_Feature, self).destroy(obj) ContextManager.destroy() class RequestEnvironment_Feature(IndicoTestFeature): """ Creates an environment that should be similar to a regular request """ def _action_endRequest(self): self._do._notify('requestFinished') def _action_startRequest(self): self._do._notify('requestStarted') def _action_make_app_request_context(self): app = make_app() env = { 'environ_base': { 'REMOTE_ADDR': '127.0.0.1' } } return app.test_request_context(**env) def _action_mock_session_user(self): # None of the current tests actually require a user in the session. # If this changes, assign a avatar mock object here session.user = None def _context_request(self): self._startRequest() with self._make_app_request_context(): self._mock_session_user() setLocale('en_GB') yield self._endRequest() class IndicoTestCase(unittest.TestCase, FeatureLoadingObject): """ IndicoTestCase is a normal TestCase on steroids. It allows you to load "features" that will empower your test classes """ _requires = [] def __init__(self, *args, **kwargs): unittest.TestCase.__init__(self, *args, **kwargs) FeatureLoadingObject.__init__(self) def setUp(self): setLocale('en_GB') Logger.removeHandler('smtp') clearCache() # init/clear fossil cache self._configFeatures(self) def tearDown(self): self._unconfigFeatures(self) @contextlib.contextmanager def _context(self, *contexts, **kwargs): ctxs = [] res = [] for ctxname in contexts: ctx = getattr(self, '_context_%s' % ctxname)(**kwargs) res.append(ctx.next()) ctxs.append(ctx) yield res if len(res) > 1 else res[0] for ctx in ctxs[::-1]: ctx.next()
Ictp/indico
indico/tests/python/unit/util.py
Python
gpl-3.0
5,388
0.002598
""" This module is essentially a broker to xmodule/tabs.py -- it was originally introduced to perform some LMS-specific tab display gymnastics for the Entrance Exams feature """ from django.conf import settings from django.utils.translation import ugettext as _, ugettext_noop from courseware.access import has_access from courseware.entrance_exams import user_must_complete_entrance_exam from student.models import UserProfile from openedx.core.lib.course_tabs import CourseTabPluginManager from student.models import CourseEnrollment from xmodule.tabs import CourseTab, CourseTabList, key_checker from xmodule.tabs import StaticTab class EnrolledTab(CourseTab): """ A base class for any view types that require a user to be enrolled. """ @classmethod def is_enabled(cls, course, user=None): if user is None: return True return bool(CourseEnrollment.is_enrolled(user, course.id) or has_access(user, 'staff', course, course.id)) class CoursewareTab(EnrolledTab): """ The main courseware view. """ type = 'courseware' title = ugettext_noop('Courseware') priority = 10 view_name = 'courseware' is_movable = False is_default = False is_visible_to_sneak_peek = True class CourseInfoTab(CourseTab): """ The course info view. """ type = 'course_info' title = ugettext_noop('Course Info') priority = 20 view_name = 'info' tab_id = 'info' is_movable = False is_default = False is_visible_to_sneak_peek = True @classmethod def is_enabled(cls, course, user=None): return True class SyllabusTab(EnrolledTab): """ A tab for the course syllabus. """ type = 'syllabus' title = ugettext_noop('Syllabus') priority = 30 view_name = 'syllabus' allow_multiple = True is_default = False is_visible_to_sneak_peek = True @classmethod def is_enabled(cls, course, user=None): if not super(SyllabusTab, cls).is_enabled(course, user=user): return False return getattr(course, 'syllabus_present', False) class ProgressTab(EnrolledTab): """ The course progress view. """ type = 'progress' title = ugettext_noop('Progress') priority = 40 view_name = 'progress' is_hideable = True is_default = False @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument if not super(ProgressTab, cls).is_enabled(course, user=user): return False return not course.hide_progress_tab class TextbookTabsBase(CourseTab): """ Abstract class for textbook collection tabs classes. """ # Translators: 'Textbooks' refers to the tab in the course that leads to the course' textbooks title = ugettext_noop("Textbooks") is_collection = True is_default = False @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument return user is None or user.is_authenticated() @classmethod def items(cls, course): """ A generator for iterating through all the SingleTextbookTab book objects associated with this collection of textbooks. """ raise NotImplementedError() class TextbookTabs(TextbookTabsBase): """ A tab representing the collection of all textbook tabs. """ type = 'textbooks' priority = None view_name = 'book' @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument parent_is_enabled = super(TextbookTabs, cls).is_enabled(course, user) return settings.FEATURES.get('ENABLE_TEXTBOOK') and parent_is_enabled @classmethod def items(cls, course): for index, textbook in enumerate(course.textbooks): yield SingleTextbookTab( name=textbook.title, tab_id='textbook/{0}'.format(index), view_name=cls.view_name, index=index ) class PDFTextbookTabs(TextbookTabsBase): """ A tab representing the collection of all PDF textbook tabs. """ type = 'pdf_textbooks' priority = None view_name = 'pdf_book' @classmethod def items(cls, course): for index, textbook in enumerate(course.pdf_textbooks): yield SingleTextbookTab( name=textbook['tab_title'], tab_id='pdftextbook/{0}'.format(index), view_name=cls.view_name, index=index ) class HtmlTextbookTabs(TextbookTabsBase): """ A tab representing the collection of all Html textbook tabs. """ type = 'html_textbooks' priority = None view_name = 'html_book' @classmethod def items(cls, course): for index, textbook in enumerate(course.html_textbooks): yield SingleTextbookTab( name=textbook['tab_title'], tab_id='htmltextbook/{0}'.format(index), view_name=cls.view_name, index=index ) class LinkTab(CourseTab): """ Abstract class for tabs that contain external links. """ link_value = '' def __init__(self, tab_dict=None, name=None, link=None): self.link_value = tab_dict['link'] if tab_dict else link def link_value_func(_course, _reverse_func): """ Returns the link_value as the link. """ return self.link_value self.type = tab_dict['type'] tab_dict['link_func'] = link_value_func super(LinkTab, self).__init__(tab_dict) def __getitem__(self, key): if key == 'link': return self.link_value else: return super(LinkTab, self).__getitem__(key) def __setitem__(self, key, value): if key == 'link': self.link_value = value else: super(LinkTab, self).__setitem__(key, value) def to_json(self): to_json_val = super(LinkTab, self).to_json() to_json_val.update({'link': self.link_value}) return to_json_val def __eq__(self, other): if not super(LinkTab, self).__eq__(other): return False return self.link_value == other.get('link') @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument return True class ExternalDiscussionCourseTab(LinkTab): """ A course tab that links to an external discussion service. """ type = 'external_discussion' # Translators: 'Discussion' refers to the tab in the courseware that leads to the discussion forums title = ugettext_noop('Discussion') priority = None is_default = False @classmethod def validate(cls, tab_dict, raise_error=True): """ Validate that the tab_dict for this course tab has the necessary information to render. """ return (super(ExternalDiscussionCourseTab, cls).validate(tab_dict, raise_error) and key_checker(['link'])(tab_dict, raise_error)) @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument if not super(ExternalDiscussionCourseTab, cls).is_enabled(course, user=user): return False return course.discussion_link class ExternalLinkCourseTab(LinkTab): """ A course tab containing an external link. """ type = 'external_link' priority = None is_default = False # An external link tab is not added to a course by default allow_multiple = True @classmethod def validate(cls, tab_dict, raise_error=True): """ Validate that the tab_dict for this course tab has the necessary information to render. """ return (super(ExternalLinkCourseTab, cls).validate(tab_dict, raise_error) and key_checker(['link', 'name'])(tab_dict, raise_error)) class SingleTextbookTab(CourseTab): """ A tab representing a single textbook. It is created temporarily when enumerating all textbooks within a Textbook collection tab. It should not be serialized or persisted. """ type = 'single_textbook' is_movable = False is_collection_item = True priority = None def __init__(self, name, tab_id, view_name, index): def link_func(course, reverse_func, index=index): """ Constructs a link for textbooks from a view name, a course, and an index. """ return reverse_func(view_name, args=[unicode(course.id), index]) tab_dict = dict() tab_dict['name'] = name tab_dict['tab_id'] = tab_id tab_dict['link_func'] = link_func super(SingleTextbookTab, self).__init__(tab_dict) def to_json(self): raise NotImplementedError('SingleTextbookTab should not be serialized.') def get_course_tab_list(request, course): """ Retrieves the course tab list from xmodule.tabs and manipulates the set as necessary """ user = request.user is_user_enrolled = user.is_authenticated() and CourseEnrollment.is_enrolled(user, course.id) xmodule_tab_list = CourseTabList.iterate_displayable( course, user=user, settings=settings, is_user_authenticated=user.is_authenticated(), is_user_staff=has_access(user, 'staff', course, course.id), is_user_enrolled=is_user_enrolled, is_user_sneakpeek=not UserProfile.has_registered(user), ) # Now that we've loaded the tabs for this course, perform the Entrance Exam work. # If the user has to take an entrance exam, we'll need to hide away all but the # "Courseware" tab. The tab is then renamed as "Entrance Exam". course_tab_list = [] for tab in xmodule_tab_list: if user_must_complete_entrance_exam(request, user, course): # Hide all of the tabs except for 'Courseware' # Rename 'Courseware' tab to 'Entrance Exam' if tab.type is not 'courseware': continue tab.name = _("Entrance Exam") course_tab_list.append(tab) # Add in any dynamic tabs, i.e. those that are not persisted course_tab_list += _get_dynamic_tabs(course, user) return course_tab_list def _get_dynamic_tabs(course, user): """ Returns the dynamic tab types for the current user. Note: dynamic tabs are those that are not persisted in the course, but are instead added dynamically based upon the user's role. """ dynamic_tabs = list() for tab_type in CourseTabPluginManager.get_tab_types(): if getattr(tab_type, "is_dynamic", False): tab = tab_type(dict()) if tab.is_enabled(course, user=user): dynamic_tabs.append(tab) dynamic_tabs.sort(key=lambda dynamic_tab: dynamic_tab.name) return dynamic_tabs
jbassen/edx-platform
lms/djangoapps/courseware/tabs.py
Python
agpl-3.0
10,813
0.001665
############################################################################### ## ## Copyright (C) 2006-2011, University of Utah. ## All rights reserved. ## Contact: contact@vistrails.org ## ## This file is part of VisTrails. ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## - Redistributions of source code must retain the above copyright notice, ## this list of conditions and the following disclaimer. ## - Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in the ## documentation and/or other materials provided with the distribution. ## - Neither the name of the University of Utah nor the names of its ## contributors may be used to endorse or promote products derived from ## this software without specific prior written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, ## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; ## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR ## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### from core.configuration import ConfigurationObject identifier = 'edu.utah.sci.vistrails.persistence.exp' version = '0.2.0' name = 'Persistence' configuration = ConfigurationObject(global_db=(None, str), local_db=(None, str), git_bin=(None, str), search_dbs=(None, str), compress_by_default=False, debug=False)
CMUSV-VisTrails/WorkflowRecommendation
vistrails/packages/persistence_exp/__init__.py
Python
bsd-3-clause
2,337
0.0184
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2016, MIT Probabilistic Computing Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Analyze and save .bdb files for Satellites. This script simultaneously serves two purposes: - To prepare an analyzed .bdb file for distribution to clients of the Satellites demo - To prepare a series of .bdb files for consumption by the stability probing script probe.py. Each produced file is named with a 'satellites' prefix. The file name additionally captures - a date stamp, - the running user, - the number of models [analysis snapshots only], and - the number of analysis iterations [analysis snapshots only]. For each foo.bdb, this program saves a foo-meta.txt containing the following information: - name and sha256sum of the file described; - # models; # iterations; time taken; initial entropy; parallelism level; date stamp; user stamp; - crosscat version; bayeslite version; bdbcontrib version (including a full copy of this driver script); and - logscore history plot [distributable .bdb only]. """ # Speed rules of thumb: # - 64 models and 1500 iterations took ~30 minutes on probcomp in late September # - 64 * 5 models and 300 iterations took ~18 minutes on probcomp 10/5/15 # - 64 * 50 models and 30 iterations took ~36 minutes on probcomp 10/5/15 import argparse import datetime import hashlib import logging import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import os import subprocess import sys import time import bayeslite import bayeslite.metamodels.crosscat import bdbcontrib import bdbcontrib.crosscat_utils import bdbcontrib.bql_utils import crosscat import crosscat.MultiprocessingEngine as ccme from bdbcontrib.experiments.probe import log def doit(out_dir, num_models, num_iters, checkpoint_freq, seed): then = time.time() timestamp = datetime.datetime.fromtimestamp(then).strftime('%Y-%m-%d') user = subprocess.check_output(["whoami"]).strip() host = subprocess.check_output(["hostname"]).strip() filestamp = '-' + timestamp + '-' + user def out_file_name(base, ext): return out_dir + '/' + base + filestamp + ext csv_file = os.path.join(os.path.dirname(__file__), 'satellites.csv') bdb_file = out_file_name('satellites', '.bdb') # so we can build bdb models os.environ['BAYESDB_WIZARD_MODE']='1' if not os.path.isdir(out_dir): os.makedirs(out_dir) if os.path.exists(bdb_file): print 'Error: File', bdb_file, 'already exists. Please remove it.' sys.exit(1) # create database mapped to filesystem log('opening bdb on disk: %s' % bdb_file) bdb = bayeslite.bayesdb_open(pathname=bdb_file, builtin_metamodels=False) def execute(bql): log("executing %s" % bql) bdb.execute(bql) # read csv into table log('reading data from %s' % csv_file) bayeslite.bayesdb_read_csv_file(bdb, 'satellites', csv_file, header=True, create=True, ifnotexists=True) # Add a "not applicable" orbit sub-type log('adding "not applicable" orbit sub-type') bdb.sql_execute('''UPDATE satellites SET type_of_orbit = 'N/A' WHERE (class_of_orbit = 'GEO' OR class_of_orbit = 'MEO') AND type_of_orbit = 'NaN' ''') # nullify "NaN" log('nullifying NaN') bdbcontrib.bql_utils.nullify(bdb, 'satellites', 'NaN') # register crosscat metamodel cc = ccme.MultiprocessingEngine(seed=seed) ccmm = bayeslite.metamodels.crosscat.CrosscatMetamodel(cc) bayeslite.bayesdb_register_metamodel(bdb, ccmm) # create the crosscat generator using execute(''' CREATE GENERATOR satellites_cc FOR satellites USING crosscat ( GUESS(*), name IGNORE, Country_of_Operator CATEGORICAL, Operator_Owner CATEGORICAL, Users CATEGORICAL, Purpose CATEGORICAL, Class_of_Orbit CATEGORICAL, Type_of_Orbit CATEGORICAL, Perigee_km NUMERICAL, Apogee_km NUMERICAL, Eccentricity NUMERICAL, Period_minutes NUMERICAL, Launch_Mass_kg NUMERICAL, Dry_Mass_kg NUMERICAL, Power_watts NUMERICAL, Date_of_Launch NUMERICAL, Anticipated_Lifetime NUMERICAL, Contractor CATEGORICAL, Country_of_Contractor CATEGORICAL, Launch_Site CATEGORICAL, Launch_Vehicle CATEGORICAL, Source_Used_for_Orbital_Data CATEGORICAL, longitude_radians_of_geo NUMERICAL, Inclination_radians NUMERICAL ) ''') execute('INITIALIZE %d MODELS FOR satellites_cc' % (num_models,)) cur_iter_ct = 0 def snapshot(): log('vacuuming') bdb.sql_execute('vacuum') cur_infix = '-%dm-%di' % (num_models, cur_iter_ct) save_file_name = out_file_name('satellites', cur_infix + '.bdb') meta_file_name = out_file_name('satellites', cur_infix + '-meta.txt') log('recording snapshot ' + save_file_name) os.system("cp %s %s" % (bdb_file, save_file_name)) report(save_file_name, meta_file_name) def record_metadata(f, saved_file_name, sha_sum, total_time, plot_file_name=None): f.write("DB file " + saved_file_name + "\n") f.write(sha_sum) f.write("built from " + csv_file + "\n") f.write("by %s@%s\n" % (user, host)) f.write("at seed %s\n" % seed) f.write("in %3.2f seconds\n" % total_time) f.write("with %s models analyzed for %s iterations\n" % (num_models, num_iters)) f.write("by bayeslite %s, with crosscat %s and bdbcontrib %s\n" % (bayeslite.__version__, crosscat.__version__, bdbcontrib.__version__)) if plot_file_name is not None: f.write("diagnostics recorded to %s\n" % plot_file_name) f.flush() def report(saved_file_name, metadata_file, echo=False, plot_file_name=None): sha256 = hashlib.sha256() with open(saved_file_name, 'rb') as fd: for chunk in iter(lambda: fd.read(65536), ''): sha256.update(chunk) sha_sum = sha256.hexdigest() + '\n' total_time = time.time() - then with open(metadata_file, 'w') as fd: record_metadata(fd, saved_file_name, sha_sum, total_time, plot_file_name) fd.write('using script ') fd.write('-' * 57) fd.write('\n') fd.flush() os.system("cat %s >> %s" % (__file__, metadata_file)) if echo: record_metadata(sys.stdout, saved_file_name, sha_sum, total_time, plot_file_name) def final_report(): # create a diagnostics plot plot_file_name = out_file_name('satellites', '-logscores.pdf') log('writing diagnostic plot to %s' % plot_file_name) _fig = bdbcontrib.crosscat_utils.plot_crosscat_chain_diagnostics( bdb, 'logscore', 'satellites_cc') plt.savefig(plot_file_name) final_metadata_file = out_file_name('satellites', '-meta.txt') report(bdb_file, final_metadata_file, echo=True, plot_file_name=plot_file_name) snapshot() while cur_iter_ct < num_iters: execute('ANALYZE satellites_cc FOR %d ITERATIONS WAIT' % checkpoint_freq) cur_iter_ct += checkpoint_freq snapshot() final_report() log('closing bdb %s' % bdb_file) bdb.close() os.system("cd %s && ln -s satellites%s.bdb satellites.bdb" % (out_dir, filestamp)) parser = argparse.ArgumentParser( description="Analyze and save .bdb files for Satellites.") parser.add_argument( '-o', '--outdir', default="output", help="Directory to save generated .bdb files [default: \"output\"]") parser.add_argument( '-m', '--models', type=int, help="Number of models to analyze") parser.add_argument( '-i', '--iters', type=int, help="Number of iterations of analysis to run") parser.add_argument( '-c', '--checkpoint_freq', type=int, help="Frequency of checkpoints to take [default: no checkpoints]") parser.add_argument( '-s', '--seed', type=int, default=0, help="Initial entropy [default: 0]") def main(): args = parser.parse_args() logging.basicConfig(level=logging.INFO) doit(args.outdir, args.models, args.iters, args.checkpoint_freq, args.seed) if __name__ == '__main__': main()
probcomp/bdbcontrib
examples/satellites/build_bdbs.py
Python
apache-2.0
9,056
0.002761
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2017 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from typing import Any, Dict, Set from snapcraft import project from snapcraft.internal.project_loader import grammar from snapcraft.internal import pluginhandler, repo from ._package_transformer import package_transformer class PartGrammarProcessor: """Process part properties that support grammar. Stage packages example: >>> from unittest import mock >>> import snapcraft >>> # Pretend that all packages are valid >>> repo = mock.Mock() >>> repo.is_valid.return_value = True >>> plugin = mock.Mock() >>> plugin.stage_packages = [{'try': ['foo']}] >>> processor = PartGrammarProcessor( ... plugin=plugin, ... properties={}, ... project=snapcraft.project.Project(), ... repo=repo) >>> processor.get_stage_packages() {'foo'} Build packages example: >>> from unittest import mock >>> import snapcraft >>> # Pretend that all packages are valid >>> repo = mock.Mock() >>> repo.is_valid.return_value = True >>> plugin = mock.Mock() >>> plugin.build_packages = [{'try': ['foo']}] >>> processor = PartGrammarProcessor( ... plugin=plugin, ... properties={}, ... project=snapcraft.project.Project(), ... repo=repo) >>> processor.get_build_packages() {'foo'} Source example: >>> from unittest import mock >>> import snapcraft >>> plugin = mock.Mock() >>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']} >>> processor = PartGrammarProcessor( ... plugin=plugin, ... properties=plugin.properties, ... project=snapcraft.project.Project(), ... repo=None) >>> processor.get_source() 'foo' """ def __init__( self, *, plugin: pluginhandler.PluginHandler, properties: Dict[str, Any], project: project.Project, repo: "repo.Ubuntu" ) -> None: self._project = project self._repo = repo self._build_snap_grammar = getattr(plugin, "build_snaps", []) self.__build_snaps = set() # type: Set[str] self._build_package_grammar = getattr(plugin, "build_packages", []) self.__build_packages = set() # type: Set[str] self._stage_package_grammar = getattr(plugin, "stage_packages", []) self.__stage_packages = set() # type: Set[str] source_grammar = properties.get("source", [""]) if not isinstance(source_grammar, list): self._source_grammar = [source_grammar] else: self._source_grammar = source_grammar self.__source = "" def get_source(self) -> str: if not self.__source: # The grammar is array-based, even though we only support a single # source. processor = grammar.GrammarProcessor( self._source_grammar, self._project, lambda s: True ) source_array = processor.process() if len(source_array) > 0: self.__source = source_array.pop() return self.__source def get_build_snaps(self) -> Set[str]: if not self.__build_snaps: processor = grammar.GrammarProcessor( self._build_snap_grammar, self._project, repo.snaps.SnapPackage.is_valid_snap, ) self.__build_snaps = processor.process() return self.__build_snaps def get_build_packages(self) -> Set[str]: if not self.__build_packages: processor = grammar.GrammarProcessor( self._build_package_grammar, self._project, self._repo.build_package_is_valid, transformer=package_transformer, ) self.__build_packages = processor.process() return self.__build_packages def get_stage_packages(self) -> Set[str]: if not self.__stage_packages: processor = grammar.GrammarProcessor( self._stage_package_grammar, self._project, self._repo.is_valid, transformer=package_transformer, ) self.__stage_packages = processor.process() return self.__stage_packages
sergiusens/snapcraft
snapcraft/internal/project_loader/grammar_processing/_part_grammar_processor.py
Python
gpl-3.0
4,927
0
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unit tests for send email function""" import os import sys import pytest sys.path.append(os.path.realpath(os.path.dirname(__file__) + "../..")) from send_email_function import main @pytest.fixture def mock_env(monkeypatch): """Setting mock environment variables""" monkeypatch.setenv("SIGNED_URL", "True") monkeypatch.setenv("FROM_EMAIL", "sender@gmail.com") monkeypatch.setenv("TO_EMAILS", "recepient@gmail.com") monkeypatch.setenv("EMAIL_SUBJECT", "BigQuery email export") monkeypatch.setenv("SENDGRID_API_KEY", "SG.key") monkeypatch.setenv("SIGNED_URL_EXPIRATION", "24") def test_raise_exception(): """Tests that KeyError exception is raised when no env vars are set""" with pytest.raises(KeyError): main.get_env('SIGNED_URL') @pytest.mark.parametrize("test_input,expected", [("SIGNED_URL", "True"), ("FROM_EMAIL", "sender@gmail.com"), ("TO_EMAILS", "recepient@gmail.com"), ("EMAIL_SUBJECT", "BigQuery email export"), ("SENDGRID_API_KEY", "SG.key"), ("SIGNED_URL_EXPIRATION", "24")]) def test_get_env(mock_env, test_input, expected): """Tests reading of env vars""" assert main.get_env(test_input) == expected @pytest.mark.parametrize( "test_input,expected", [("gs://bucket/object.txt", "https://storage.cloud.google.com/bucket/object.txt"), ("gs://bucket/dir/object.txt", "https://storage.cloud.google.com/bucket/dir/object.txt"), ("gs://bucket/dir/subdir/object.json", "https://storage.cloud.google.com/bucket/dir/subdir/object.json")]) def test_get_auth_url(test_input, expected): """Tests creation of authenticated GCS URL""" assert main.get_auth_url(test_input) == expected
GoogleCloudPlatform/professional-services
examples/bq-email-exports/tests/send_email_function/test_main.py
Python
apache-2.0
2,421
0.000413
def test1(): SINK(SOURCE) def test2(): s = SOURCE SINK(s) def source(): return SOURCE def sink(arg): SINK(arg) def test3(): t = source() SINK(t) def test4(): t = SOURCE sink(t) def test5(): t = source() sink(t) def test6(cond): if cond: t = "Safe" else: t = SOURCE if cond: SINK(t) def test7(cond): if cond: t = SOURCE else: t = "Safe" if cond: SINK(t) def source2(arg): return source(arg) def sink2(arg): sink(arg) def sink3(cond, arg): if cond: sink(arg) def test8(cond): t = source2() sink2(t) #False positive def test9(cond): if cond: t = "Safe" else: t = SOURCE sink3(cond, t) def test10(cond): if cond: t = SOURCE else: t = "Safe" sink3(cond, t) def hub(arg): return arg def test11(): t = SOURCE t = hub(t) SINK(t) def test12(): t = "safe" t = hub(t) SINK(t) import module def test13(): t = module.dangerous SINK(t) def test14(): t = module.safe SINK(t) def test15(): t = module.safe2 SINK(t) def test16(): t = module.dangerous_func() SINK(t) def test20(cond): if cond: t = CUSTOM_SOURCE else: t = SOURCE if cond: CUSTOM_SINK(t) else: SINK(t) def test21(cond): if cond: t = CUSTOM_SOURCE else: t = SOURCE if not cond: CUSTOM_SINK(t) else: SINK(t) def test22(cond): if cond: t = CUSTOM_SOURCE else: t = SOURCE t = TAINT_FROM_ARG(t) if cond: CUSTOM_SINK(t) else: SINK(t) from module import dangerous as unsafe SINK(unsafe) def test23(): with SOURCE as t: SINK(t) def test24(): s = SOURCE SANITIZE(s) SINK(s) def test_update_extend(x, y): l = [SOURCE] d = {"key" : SOURCE} x.extend(l) y.update(d) SINK(x[0]) SINK(y["key"]) l2 = list(l) d2 = dict(d) def test_truth(): t = SOURCE if t: SINK(t) else: SINK(t) if not t: SINK(t) else: SINK(t)
github/codeql
python/ql/test/library-tests/taint/dataflow/test.py
Python
mit
2,191
0.016431
""" json_io.py Functions related to reading/writing/mapping json """ import json import ijson from re import sub from datetime import datetime from os import listdir, SEEK_END from sys import exc_info, stdout from random import choice, randrange from nlp import feature TWEET_LINK_RE = "https://t.co/(\w)+" TWEET_HANDLE_RE = "@(\w)+" def list_from_json(json_file): """Return a list corresponding to contents of json file""" with open(json_file, 'r') as fp: return json.load(fp) def list_to_json(lst, path, old_format=True): """Save a list of tweets to a json file at corresponding path. old_format (optional, default=true): dump using sorted keys, indenting. Set to false for streaming friendlier format """ if old_format: with open(path, 'w') as fp: json.dump(lst, fp, sort_keys=True, indent=4) else: with open(path, 'w') as fp: for i, tweet in enumerate(lst): json.dump({"text": tweet["text"], "id": tweet['id'], "media": tweet["media"], "urls": tweet["urls"]}, fp) if i != len(lst) - 1: fp.write('\n') def merge_json_filenames(json_lst): """ Return filename encapsulating date range of passed in jsons ex: merge_json_filnames(["path/to/jsons/2017-01-27_2017-02-04.json", "path/to/jsons/2017-02-02_2017-02-09.json"]) returns "2017-01-27_2017-02-09.json" """ # Get earliest and latest date of jsons for naming purposes of merged file. parse_date_from_filename = lambda fn: fn.split('/')[-1].split('.')[0].split('_') sorted_dates = sorted([datetime.strptime(date, "%Y-%m-%d") for fn in json_lst for date in parse_date_from_filename(fn)]) from_date = datetime.strftime(sorted_dates[0], "%Y-%m-%d") to_date = datetime.strftime(sorted_dates[-1], "%Y-%m-%d") return "{}_{}.json".format(from_date, to_date) def tweet_map(json_file, tweet_func, save=False): """ Apply a function to each tweet in a json file json_file - path to tweet json file tweet_func - function that takes in a 'tweet' object, and returns a 'tweet' object save (optional) - overwrite json_file with modified json returns list where each tweet has tweet_func applied to it """ mapped_tweets = [] with open(json_file, 'r') as f: # stream through f using ijson.items for tweet in ijson.items(f, "item"): mapped_tweets.append(tweet_func(tweet)) if save: list_to_json(mapped_tweets, json_file) return mapped_tweets def tweet_map(tweets, tweet_func): """ Apply a function to each tweet in a list of tweets """ return [tweet_func(tweet) for tweet in tweets] def tweet_iterate(json_file, key=None): """ Stream through objects in a json file json_file - path to tweet json file key (optional) - single key value of interest (ex: return only "text" field, or only "id" field of each tweet) """ with open(json_file, 'r') as f: if key: for tweet in ijson.items(f, "item.{}".format(key)): yield tweet else: for tweet in ijson.items(f, "item"): yield tweet def replaceLinksMentions(tweet): """ Take tweet and return tweet with new field "ner_text" where links and handles are replaced by tokens """ # replace embedded urls/media with [url], [media], or [url_media] ner_text = tweet["text"] if tweet["media"] or tweet["urls"]: if tweet['media'] and tweet['urls']: replacement_word = 'UrlMediaTOK' elif tweet['media']: replacement_word = "MediaTOK" else: replacement_word = "UrlTok" # replace twitter links with appropriate tag ner_text = sub(TWEET_LINK_RE, replacement_word, ner_text) # replace handles with appropriate tag ner_text = sub(TWEET_HANDLE_RE, "NameTOK", ner_text) tweet["ner_text"] = ner_text return tweet def fileName(features_path, source, sarcastic, i=None): return features_path + source + ('sarcastic-' if sarcastic else 'serious-') + str(i) + ".json" def openFiles(features_path, sarcastic, source, n, mode='a'): """ takes in a directory path, a sarcastic boolean value, a source type and n Returns n file pointers in the specified (defaul append) mode with a large buffer located in the feature_path directory. feature_path= feats sarcastic = True source = tweet- n=5 Will create files like so: feats/tweet-sarcastic-0.json feats/tweet-sarcastic-1.json ... feats/tweet-sarcastic-5.json """ return [open(fileName(features_path, source, sarcastic, i), mode, buffering=2**24) for i in range(n)] def closeFiles(openFiles): """ Takes in a list of open file pointers flushes the buffer (done in file.close()) and closes the files. """ for file in openFiles: file.close() def processRandomizeJson(sarcastic, json_path, features_path, source, n, cleanTokens): """ takes in a sarcastic boolean, a path to json files, a path to store processed features, a source type an the number of files to create For each json file in the json_path directory it processes the features and saves it randomly to 1 of n files constructed using the openFiles function Periodically prints the file and time it took to process as well as the number of items processed so far. """ files = openFiles(features_path, sarcastic, source, n, mode='a') try: totalCount = 0 for filename in listdir(json_path): startTime = datetime.now() for line in open(json_path+filename): text = json.loads(line)['text'] features = feature(text, cleanTokens) featuresJson = json.dumps(features) + '\n' choice(files).write(featuresJson) totalCount += 1 stopTime = datetime.now() print("File %s\ttime:\t%s" % (filename, (stopTime - startTime))) print("Processed %d json lines"%totalCount) stdout.flush() closeFiles(files) except: closeFiles(files) print("Unexpected error:\n") for e in exc_info(): print(e) def loadProcessedFeatures(features_path, source, sarcastic, n=0, feature_filename=None, random=True, reduce=0): if feature_filename: with open(feature_path+feature_filename) as file: for line in file: yield (json.loads(line), sarcastic) elif random: with open(fileName(features_path, source, sarcastic, randrange(n))) as file: for line in file: yield (json.loads(line), sarcastic) else: if reduce != 0: cache = [] files = openFiles(features_path, sarcastic, source, n, mode='r') for file in files: for line in file: cache.append(line) if len(cache)==reduce: yield (json.loads(choice(cache)), sarcastic) cache = [] else: files = openFiles(features_path, sarcastic, source, n, mode='r') for file in files: for line in file: yield (json.loads(line), sarcastic)
TheWeiTheTruthAndTheLight/senior-design
src/json_io.py
Python
mit
7,324
0.005188
import demistomock as demisto from CommonServerPython import * from CommonServerUserPython import * from datetime import datetime, timezone from typing import Union, Any, Dict from dateparser import parse import urllib3 import traceback # Disable insecure warnings urllib3.disable_warnings() ''' GLOBAL VARIABLES ''' MALICIOUS_DICTIONARY: Dict[Any, int] = { 'low': Common.DBotScore.GOOD, 'medium': Common.DBotScore.SUSPICIOUS, 'high': Common.DBotScore.BAD } MALICIOUS_THRESHOLD = MALICIOUS_DICTIONARY.get(demisto.params().get('threshold', 'high')) ''' CLIENT ''' class Client: """ The integration's client """ def __init__(self, params: Dict[str, str]): self.cs_client: CrowdStrikeClient = CrowdStrikeClient(params=params) self.query_params: Dict[str, str] = {'offset': 'offset', 'limit': 'limit', 'sort': 'sort', 'free_search': 'q'} self.date_params: Dict[str, Dict[str, str]] = { 'created_date': {'operator': '', 'api_key': 'created_date'}, 'last_updated_date': {'operator': '', 'api_key': 'last_updated'}, 'max_last_modified_date': {'operator': '<=', 'api_key': 'last_modified_date'}, 'min_last_activity_date': {'operator': '>=', 'api_key': 'first_activity_date'}, 'max_last_activity_date': {'operator': '<=', 'api_key': 'last_activity_date'}, } def build_request_params(self, args: Dict[str, Any]) -> Dict[str, Any]: """ Build the params dict for the request :param args: Cortex XSOAR args :return: The params dict """ params: Dict[str, Any] = {key: args.get(arg) for arg, key in self.query_params.items()} query = args.get('query') params['filter'] = query if query else self.build_filter_query(args) return assign_params(**params) def build_filter_query(self, args: Dict[str, str]) -> str: """ Builds the filter query in Falcon Query Language (FQL) :param args: Cortex XSOAR args :return: The query """ filter_query: str = str() for key in args: if key not in self.query_params: if key not in self.date_params: values: List[str] = argToList(args[key], ',') for value in values: filter_query += f"{key}:'{value}'+" else: operator: Optional[str] = self.date_params.get(key, {}).get('operator') api_key: Optional[str] = self.date_params.get(key, {}).get('api_key') # Parsing date argument of ISO format or free language into datetime object, # replacing TZ with UTC, taking its timestamp format and rounding it up. filter_query += f"{api_key}:" \ f"{operator}{int(parse(args[key]).replace(tzinfo=timezone.utc).timestamp())}+" if filter_query.endswith('+'): filter_query = filter_query[:-1] return filter_query def get_indicator(self, indicator_value: str, indicator_type: str) -> Dict[str, Any]: # crowdstrike do not allow passing single quotes - so we encode them # we are not encoding the entire indicator value, as the other reserved chars (such as + and &) are allowed indicator_value = indicator_value.replace("'", "%27") args: Dict[str, Any] = { 'indicator': indicator_value, 'limit': 1 } if indicator_type == 'hash': args['type'] = get_indicator_hash_type(indicator_value) elif indicator_type == 'ip': args['type'] = 'ip_address' else: args['type'] = indicator_type params: Dict[str, Any] = self.build_request_params(args) return self.cs_client.http_request(method='GET', url_suffix='intel/combined/indicators/v1', params=params) def cs_actors(self, args: Dict[str, str]) -> Dict[str, Any]: params: Dict[str, Any] = self.build_request_params(args) return self.cs_client.http_request(method='GET', url_suffix='intel/combined/actors/v1', params=params) def cs_indicators(self, args: Dict[str, str]) -> Dict[str, Any]: params: Dict[str, Any] = self.build_request_params(args) return self.cs_client.http_request(method='GET', url_suffix='intel/combined/indicators/v1', params=params) def cs_reports(self, args: Dict[str, str]) -> Dict[str, Any]: params: Dict[str, Any] = self.build_request_params(args) return self.cs_client.http_request(method='GET', url_suffix='intel/combined/reports/v1', params=params) ''' HELPER FUNCTIONS ''' def get_dbot_score_type(indicator_type: str) -> Union[Exception, DBotScoreType, str]: """ Returns the dbot score type :param indicator_type: The indicator type :return: The dbot score type """ if indicator_type == 'ip': return DBotScoreType.IP elif indicator_type == 'domain': return DBotScoreType.DOMAIN elif indicator_type == 'file' or indicator_type == 'hash': return DBotScoreType.FILE elif indicator_type == 'url': return DBotScoreType.URL else: raise DemistoException('Indicator type is not supported.') def get_score_from_resource(r: Dict[str, Any]) -> int: """ Calculates the DBotScore for the resource :param r: The resource :return: The DBotScore """ malicious_confidence: int = MALICIOUS_DICTIONARY.get(r.get('malicious_confidence'), 0) if malicious_confidence == 3 or MALICIOUS_THRESHOLD == 1: score = 3 elif malicious_confidence == 2 or MALICIOUS_THRESHOLD == 2: score = 2 else: score = 1 return score def get_indicator_hash_type(indicator_value: str) -> Union[str, Exception]: """ Calculates the type of the hash :param indicator_value: The hash value :return: The hash type """ length: int = len(indicator_value) if length == 32: return 'hash_md5' elif length == 40: return 'hash_sha1' elif length == 64: return 'hash_sha256' else: raise DemistoException(f'Invalid hash. Hash length is: {length}. Please provide either MD5 (32 length)' f', SHA1 (40 length) or SHA256 (64 length) hash.') def get_indicator_object(indicator_value: Any, indicator_type: str, dbot_score: Common.DBotScore) \ -> Union[Common.IP, Common.URL, Common.File, Common.Domain, None]: """ Returns the corresponding indicator common object :param indicator_value: The indicator value :param indicator_type: The indicator value :param dbot_score: The indicator DBotScore :return: The indicator common object """ if indicator_type == 'ip': return Common.IP( ip=indicator_value, dbot_score=dbot_score ) elif indicator_type == 'url': return Common.URL( url=indicator_value, dbot_score=dbot_score ) elif indicator_type == 'hash': hash_type: Union[str, Exception] = get_indicator_hash_type(indicator_value) if hash_type == 'hash_md5': return Common.File( md5=indicator_value, dbot_score=dbot_score ) elif hash_type == 'hash_sha1': return Common.File( sha1=indicator_value, dbot_score=dbot_score ) else: return Common.File( sha256=indicator_value, dbot_score=dbot_score ) elif indicator_type == 'domain': return Common.Domain( domain=indicator_value, dbot_score=dbot_score ) else: return None def build_indicator(indicator_value: str, indicator_type: str, title: str, client: Client) -> List[CommandResults]: """ Builds an indicator entry :param indicator_value: The indicator value :param indicator_type: The indicator type :param title: The title to show to the user :param client: The integration's client :return: The indicator entry """ res: Dict[str, Any] = client.get_indicator(indicator_value, indicator_type) resources: List[Any] = res.get('resources', []) results: List[CommandResults] = [] if resources: for r in resources: output = get_indicator_outputs(r) score = get_score_from_resource(r) dbot_score = Common.DBotScore( indicator=indicator_value, indicator_type=get_dbot_score_type(indicator_type), integration_name='CrowdStrike Falcon Intel v2', malicious_description='High confidence', score=score ) indicator = get_indicator_object(indicator_value, indicator_type, dbot_score) results.append(CommandResults( outputs=output, outputs_prefix='FalconIntel.Indicator', outputs_key_field='ID', indicator=indicator, readable_output=tableToMarkdown(name=title, t=output, headerTransform=pascalToSpace), raw_response=res )) else: results.append(CommandResults( readable_output=f'No indicator found for {indicator_value}.' )) return results def get_values(items_list: List[Any], return_type: str = 'str', keys: Union[str, List[Any]] = 'value') \ -> Union[str, List[Union[str, Dict]]]: """ Returns the values of list's items :param items_list: The items list :param return_type: Whether to return string or list :param keys: The key to get the data :return: The values list """ new_list: List[Any] = list() if isinstance(keys, str): new_list = [item.get(keys) for item in items_list] elif isinstance(keys, list): new_list = [{underscoreToCamelCase(f): item.get(f) for f in item if f in keys} for item in items_list] if return_type == 'list': return new_list return ', '.join(str(item) for item in new_list) def get_indicator_outputs(resource: Dict[str, Any]) -> Dict[str, Any]: """ Build the output and extra context of an indicator :param resource: The indicator's object :return: The indicator's human readable """ output: Dict[str, Any] = dict() if resource: indicator_id = resource.get('id') indicator_value = resource.get('indicator') indicator_type = resource.get('type') last_update = resource.get('last_update') publish_date = resource.get('publish_date') malicious_confidence = resource.get('malicious_confidence') reports = resource.get('reports') actors = resource.get('actors') malware_families = resource.get('malware_families') kill_chains = resource.get('kill_chains') domain_types = resource.get('domain_types') ip_address_types = resource.get('ip_address_types') relations: List[Any] = resource.get('relations', [])[:10] labels: List[Any] = resource.get('labels', [])[:10] output = assign_params(**{ 'ID': indicator_id, 'Type': indicator_type, 'Value': indicator_value, 'LastUpdate': datetime.fromtimestamp(last_update, timezone.utc).isoformat() if last_update else None, 'PublishDate': datetime.fromtimestamp(publish_date, timezone.utc).isoformat() if publish_date else None, 'MaliciousConfidence': malicious_confidence, 'Reports': reports, 'Actors': actors, 'MalwareFamilies': malware_families, 'KillChains': kill_chains, 'DomainTypes': domain_types, 'IPAddressTypes': ip_address_types, 'Relations': [f'{item.get("Type")}: {item.get("Indicator")}' for item in # type: ignore get_values(relations, return_type='list', keys=['indicator', 'type'])], 'Labels': get_values(labels, return_type='list', keys='name') }) return output ''' COMMANDS ''' def run_test_module(client: Client) -> Union[str, Exception]: """ If a client is successfully constructed then an access token was successfully created, therefore the username and password are valid and a connection was made. On top of the above, this function validates the http request to indicators endpoint. :param client: the client object with an access token :return: ok if got a valid access token and not all the quota is used at the moment """ client.cs_client.http_request('GET', 'intel/combined/indicators/v1', params={'limit': 1}) return 'ok' def file_command(files: List, client: Client) -> List[CommandResults]: results: List[CommandResults] = [] for file in files: results += build_indicator(file, 'hash', 'Falcon Intel file reputation:\n', client) return results def ip_command(ips: List, client: Client) -> List[CommandResults]: results: List[CommandResults] = [] for ip in ips: results += build_indicator(ip, 'ip', 'Falcon Intel IP reputation:\n', client) return results def url_command(urls: List, client: Client) -> List[CommandResults]: results: List[CommandResults] = [] for url in urls: results += build_indicator(url, 'url', 'Falcon Intel URL reputation:\n', client) return results def domain_command(domains: List, client: Client) -> List[CommandResults]: results: List[CommandResults] = [] for domain in domains: results += build_indicator(domain, 'domain', 'Falcon Intel domain reputation:\n', client) return results def cs_actors_command(client: Client, args: Dict[str, str]) -> CommandResults: res: Dict[str, Any] = client.cs_actors(args) resources: List[Any] = res.get('resources', []) outputs: List[Dict[str, Any]] = list() md_outputs: List[Dict[str, Any]] = list() md: str = str() title: str = 'Falcon Intel Actor search:' if resources: for r in resources: image_url = r.get('image', {}).get('url') name = r.get('name') actor_id = r.get('id') url = r.get('url') slug = r.get('slug') short_description = r.get('short_description') first_activity_date = r.get('first_activity_date') last_activity_date = r.get('last_activity_date') active = r.get('active') known_as = r.get('known_as') target_industries = r.get('target_industries', []) target_countries = r.get('target_countries', []) origins = r.get('origins', []) motivations = r.get('motivations', []) capability = r.get('capability', {}).get('value') group = r.get('group') region = r.get('region', {}).get('value') kill_chain = r.get('kill_chain') output: Dict[str, Any] = assign_params(**{ 'ImageURL': image_url, 'Name': name, 'ID': actor_id, 'URL': url, 'Slug': slug, 'ShortDescription': short_description, 'FirstActivityDate': datetime.fromtimestamp(first_activity_date, timezone.utc).isoformat() if first_activity_date else None, 'LastActivityDate': datetime.fromtimestamp(last_activity_date, timezone.utc).isoformat() if last_activity_date else None, 'Active': active, 'KnownAs': known_as, 'TargetIndustries': get_values(target_industries, return_type='list'), 'TargetCountries': get_values(target_countries, return_type='list'), 'Origins': get_values(origins, return_type='list'), 'Motivations': get_values(motivations, return_type='list'), 'Capability': capability, 'Group': group, 'Region': region, 'KillChains': kill_chain }) outputs.append(output) md_output: Dict[str, Any] = output for key in ('URL', 'ImageURL'): if key in md_output: value = md_output[key] md_output[key] = f'[{value}]({value})' md_outputs.append(md_output) else: md = 'No actors found.' results: CommandResults = CommandResults( outputs=outputs, outputs_key_field='ID', outputs_prefix='FalconIntel.Actor', readable_output=md if md else tableToMarkdown(name=title, t=md_outputs, headerTransform=pascalToSpace), raw_response=res ) return results def cs_indicators_command(client: Client, args: Dict[str, str]) -> List[CommandResults]: res: Dict[str, Any] = client.cs_indicators(args) resources: List[Any] = res.get('resources', []) results: List[CommandResults] = [] title: str = 'Falcon Intel Indicator search:' if resources: for r in resources: output = get_indicator_outputs(r) indicator_value = output.get('Value') indicator_type = output.get('Type') indicator: Optional[Common.Indicator] = None if indicator_type in ('hash_md5', 'hash_sha256', 'hash_sha1', 'ip_address', 'url', 'domain'): if indicator_type in ('hash_md5', 'hash_sha1', 'hash_sha256'): indicator_type = 'hash' elif indicator_type == 'ip_address': indicator_type = 'ip' score = get_score_from_resource(r) dbot_score = Common.DBotScore( indicator=indicator_value, indicator_type=get_dbot_score_type(indicator_type), integration_name='CrowdStrike Falcon Intel v2', malicious_description='High confidence', score=score ) indicator = get_indicator_object(indicator_value, indicator_type, dbot_score) results.append(CommandResults( outputs=output, outputs_prefix='FalconIntel.Indicator', outputs_key_field='ID', readable_output=tableToMarkdown(name=title, t=output, headerTransform=pascalToSpace), raw_response=res, indicator=indicator )) else: results.append(CommandResults( readable_output='No indicators found.' )) return results def cs_reports_command(client: Client, args: Dict[str, str]) -> CommandResults: res: Dict[str, Any] = client.cs_reports(args) resources: List[Any] = res.get('resources', []) outputs: List[Dict[str, Any]] = list() md_outputs: List[Dict[str, Any]] = list() md: str = str() title: str = 'Falcon Intel Report search:' if resources: for r in resources: report_id: int = r.get('id') url: str = r.get('url') name: str = r.get('name') report_type: str = r.get('type', {}).get('name') sub_type: str = r.get('sub_type', {}).get('name') slug: str = r.get('slug') created_date: int = r.get('created_date') last_modified_date: int = r.get('last_modified_date') short_description: str = r.get('short_description') target_industries: List[Any] = r.get('target_industries', []) target_countries: List[Any] = r.get('target_countries', []) motivations: List[Any] = r.get('motivations', []) tags: List[Any] = r.get('tags', []) actors: List[Any] = r.get('actors', []) output: Dict[str, Any] = assign_params(**{ 'ID': report_id, 'URL': url, 'Name': name, 'Type': report_type, 'SubType': sub_type, 'Slug': slug, 'CreatedDate': datetime.fromtimestamp(created_date, timezone.utc).isoformat() if created_date else None, 'LastModifiedSate': datetime.fromtimestamp(last_modified_date, timezone.utc).isoformat() if last_modified_date else None, 'ShortDescription': short_description, 'TargetIndustries': get_values(target_industries, return_type='list'), 'TargetCountries': get_values(target_countries, return_type='list'), 'Motivations': get_values(motivations, return_type='list'), 'Tags': get_values(tags, return_type='list'), 'Actors': get_values(actors, return_type='list', keys='name') }) outputs.append(output) md_output: Dict[str, Any] = output if 'URL' in md_output: value = md_output['URL'] md_output['URL'] = f'[{value}]({value})' md_outputs.append(md_output) else: md = 'No reports found.' results: CommandResults = CommandResults( outputs_prefix='FalconIntel.Report', outputs=outputs, outputs_key_field='ID', readable_output=md if md else tableToMarkdown(name=title, t=outputs, headerTransform=pascalToSpace), raw_response=res ) return results def main(): params: Dict[str, str] = demisto.params() args: Dict[str, str] = demisto.args() results: Union[CommandResults, List[CommandResults]] try: command: str = demisto.command() LOG(f'Command being called in CrowdStrike Falcon Intel v2 is: {command}') client: Client = Client(params=params) if command == 'test-module': result: Union[str, Exception] = run_test_module(client) return_results(result) elif command == 'file': results = file_command(argToList(args['file']), client) return_results(results) elif command == 'ip': results = ip_command(argToList(args['ip']), client) return_results(results) elif command == 'url': results = url_command(argToList(args['url']), client) return_results(results) elif command == 'domain': results = domain_command(argToList(args['domain']), client) return_results(results) elif command == 'cs-actors': results = cs_actors_command(client, args) return_results(results) elif command == 'cs-indicators': results = cs_indicators_command(client, args) return_results(results) elif command == 'cs-reports': results = cs_reports_command(client, args) return_results(results) else: raise NotImplementedError(f'{command} command is not an existing CrowdStrike Falcon Intel v2 integration') except Exception as err: return_error(f'Unexpected error:\n{str(err)}', error=traceback.format_exc()) from CrowdStrikeApiModule import * # noqa: E402 if __name__ in ('__main__', 'builtin', 'builtins'): main()
demisto/content
Packs/CrowdStrikeIntel/Integrations/CrowdStrikeFalconIntel_v2/CrowdStrikeFalconIntel_v2.py
Python
mit
23,184
0.002459
# Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_serialization import jsonutils as json from tempest.lib.common import rest_client class EndpointsClient(rest_client.RestClient): api_version = "v2.0" def create_endpoint(self, service_id, region_id, **kwargs): """Create an endpoint for service.""" post_body = { 'service_id': service_id, 'region': region_id, 'publicurl': kwargs.get('publicurl'), 'adminurl': kwargs.get('adminurl'), 'internalurl': kwargs.get('internalurl') } post_body = json.dumps({'endpoint': post_body}) resp, body = self.post('/endpoints', post_body) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body) def list_endpoints(self): """List Endpoints - Returns Endpoints.""" resp, body = self.get('/endpoints') self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body) def delete_endpoint(self, endpoint_id): """Delete an endpoint.""" url = '/endpoints/%s' % endpoint_id resp, body = self.delete(url) self.expected_success(204, resp.status) return rest_client.ResponseBody(resp, body)
HybridF5/tempest_debug
tempest/services/identity/v2/json/endpoints_client.py
Python
apache-2.0
1,870
0
# Copyright (c) 2007-2008 The Hewlett-Packard Development Company # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' def macroop INS_M_R { # Find the constant we need to either add or subtract from rdi ruflag t0, 10 movi t3, t3, dsz, flags=(CEZF,), dataSize=asz subi t4, t0, dsz, dataSize=asz mov t3, t3, t4, flags=(nCEZF,), dataSize=asz zexti t2, reg, 15, dataSize=8 mfence ld t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8, \ nonSpec=True st t6, es, [1, t0, rdi] mfence add rdi, rdi, t3, dataSize=asz }; def macroop INS_E_M_R { and t0, rcx, rcx, flags=(EZF,), dataSize=asz br label("end"), flags=(CEZF,) # Find the constant we need to either add or subtract from rdi ruflag t0, 10 movi t3, t3, dsz, flags=(CEZF,), dataSize=asz subi t4, t0, dsz, dataSize=asz mov t3, t3, t4, flags=(nCEZF,), dataSize=asz zexti t2, reg, 15, dataSize=8 mfence topOfLoop: ld t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8, \ nonSpec=True st t6, es, [1, t0, rdi] subi rcx, rcx, 1, flags=(EZF,), dataSize=asz add rdi, rdi, t3, dataSize=asz br label("topOfLoop"), flags=(nCEZF,) end: mfence fault "NoFault" }; def macroop OUTS_R_M { # Find the constant we need to either add or subtract from rdi ruflag t0, 10 movi t3, t3, dsz, flags=(CEZF,), dataSize=asz subi t4, t0, dsz, dataSize=asz mov t3, t3, t4, flags=(nCEZF,), dataSize=asz zexti t2, reg, 15, dataSize=8 mfence ld t6, ds, [1, t0, rsi] st t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8, \ nonSpec=True mfence add rsi, rsi, t3, dataSize=asz }; def macroop OUTS_E_R_M { and t0, rcx, rcx, flags=(EZF,), dataSize=asz br label("end"), flags=(CEZF,) # Find the constant we need to either add or subtract from rdi ruflag t0, 10 movi t3, t3, dsz, flags=(CEZF,), dataSize=asz subi t4, t0, dsz, dataSize=asz mov t3, t3, t4, flags=(nCEZF,), dataSize=asz zexti t2, reg, 15, dataSize=8 mfence topOfLoop: ld t6, ds, [1, t0, rsi] st t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8, \ nonSpec=True subi rcx, rcx, 1, flags=(EZF,), dataSize=asz add rsi, rsi, t3, dataSize=asz br label("topOfLoop"), flags=(nCEZF,) end: mfence fault "NoFault" }; '''
austinharris/gem5-riscv
src/arch/x86/isa/insts/general_purpose/input_output/string_io.py
Python
bsd-3-clause
4,418
0
import sys from datetime import datetime, timedelta from array import array from numpy import hsplit, asarray class ECG: '''Checks validity of selected .ecg file. If it is valid .ecg file creates an instance with all the data stored in .ecg file''' def __init__(self, filename, enc='cp1250'): '''Default encoding is set to cp1250 - set accordingly to your needs''' self.leadNamesDict = {0:'Unknown', 1:'Bipolar', 2:'X biploar', 3:'Y bipolar', 4:'Z biploar', \ 5:'I', 6:'II', 7:'III', 8:'VR', 9:'VL', 10:'VF', \ 11:'V1', 12:'V2', 13:'V3', 14:'V4', 15:'V5', 16:'V6', \ 17:'ES', 18:'AS', 19:'AI'} self.fn = filename self.enc = enc if not self.fn: NoneFileSpecified() with open(self.fn, mode='rb') as ecgFile: self.magicNumber = ecgFile.read(8).decode(self.enc) if self.magicNumber != 'ISHNE1.0': raise Exception('File does not have \'ISHNE1.0\' string in the first 8 bytes') self.crc = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.headerFixedLength = 512 self.headerVariableLength = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True) #get back to 10th byte where header starts ecgFile.seek(10) self.headerWhole = ecgFile.read(self.headerFixedLength + self.headerVariableLength) crc = int(self.compute_crc(self.headerWhole),2) if (crc != self.crc): raise Exception('CRC check for file failed. Computed CRC: {0}, CRC in file: {1}'.format(crc, self.crc)) #get back to 14th byte just after headerVariableLength ecgFile.seek(14) self.channelNumberOfSamples = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True) self.headerVariableOffset = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True) self.ecgBytesBlockOffset = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True) self.fileVersion = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.patientFirstName = ecgFile.read(40).decode(self.enc) self.patientFirstName = self.patientFirstName.split('\x00', 1)[0] self.patientLastName = ecgFile.read(40).decode(self.enc) self.patientLastName = self.patientLastName.split('\x00', 1)[0] self.patientID = ecgFile.read(20).decode(self.enc) self.patientID = self.patientID.split('\x00', 1)[0] self.patientSex = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.patientRace = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) #patient date of birth as [dd,mm,yy] dob = list() for i in range(0,3): dob.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) self.patientDateOfBirth = datetime(dob[2], dob[1], dob[0]) # date of test recording as [dd,mm,yy] dor = list() for i in range(0,3): dor.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) #date of file creation as [dd,mm,yy] dof = list() for i in range(0,3): dof.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) self.dateOfFileCreation = datetime(dor[2], dor[1], dor[0]) #testStart - time of test begining HH:MM:SS testStart = list() for i in range(0,3): testStart.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) self.datetimeStartOfTest = datetime(dor[2],dor[1],dor[0],testStart[0],testStart[1],testStart[2]) self.numberOfLeads = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.leadsSpecs = list() self.leadsNames = list() for i in range(0,12): spec = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.leadsSpecs.append(spec) self.leadsNames.append(self.leadNamesDict[spec]) self.leadsQuality = list() for i in range(0,12): self.leadsQuality.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) self.leadsResolution = list() for i in range(0,12): self.leadsResolution.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=False)) self.pacemaker = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.recorderType = ecgFile.read(40).decode(self.enc) self.recorderType = self.recorderType.split('\x00', 1)[0] self.samplingRate = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.datetimeEndOfTest = self.datetimeStartOfTest + timedelta(seconds=int(self.channelNumberOfSamples/self.samplingRate)) self.fileProperiaty = ecgFile.read(80).decode(self.enc) self.fileProperiaty = self.fileProperiaty.split('\x00', 1)[0] self.fileCopyright = ecgFile.read(80).decode(self.enc) self.fileCopyright = self.fileCopyright.split('\x00', 1)[0] self.reserved = ecgFile.read(80).decode(self.enc) self.reserved = self.reserved.split('\x00', 1)[0] self.reserved = ecgFile.read(80).decode(self.enc) self.reserved = self.reserved.split('\x00', 1)[0] self.headerVariable = ecgFile.read(self.headerVariableLength).decode(self.enc) if len(self.headerVariable) > 0: self.headerVariable = self.headerVariable.split('\x00', 1)[0] ecgFile.seek(self.ecgBytesBlockOffset) ecgBytes = array('h') ecgBytes.fromfile(ecgFile, self.channelNumberOfSamples * self.numberOfLeads) ecgBytesArray = asarray(ecgBytes) ecgBytesArray = ecgBytesArray.reshape(-1,self.numberOfLeads) self.ecgInChannels = hsplit(ecgBytesArray, self.numberOfLeads) def compute_crc(self, data: bytes): rol = lambda val, r_bits, max_bits: \ (val << r_bits%max_bits) & (2**max_bits-1) | \ ((val & (2**max_bits-1)) >> (max_bits-(r_bits%max_bits))) b = bytearray() data = bytearray(data) crc=0xFFFF crchi, crclo = divmod(crc, 0x100) for a in data: a = a ^ crchi crchi = a a = a >> 4 a = a ^ crchi crchi = crclo crclo = a a = rol(a,4,8) b=a a = rol(a,1,8) a = a & 0x1F crchi = a ^ crchi a = b & 0xF0 crchi = a ^ crchi b = rol(b,1,8) b = b & 0xE0 crclo = b ^ crclo checksum = bin(crchi) + bin(crclo) checksum = checksum[:9] + '0' + checksum[11:] return checksum class NoneFileSpecified(Exception): '''Filename can not be empty'''
panrobot/ishneECGviewer
ecgReader.py
Python
gpl-2.0
7,145
0.012596
# Copyright (c) 2014 by Ecreall under licence AGPL terms # available on http://www.gnu.org/licenses/agpl.html # licence: AGPL # author: Amen Souissi from dace.processdefinition.processdef import ProcessDefinition from dace.processdefinition.activitydef import ActivityDefinition from dace.processdefinition.gatewaydef import ( ExclusiveGatewayDefinition, ParallelGatewayDefinition) from dace.processdefinition.transitiondef import TransitionDefinition from dace.processdefinition.eventdef import ( StartEventDefinition, EndEventDefinition) from dace.objectofcollaboration.services.processdef_container import ( process_definition) from pontus.core import VisualisableElement from .behaviors import ( Addapplications, AddFacebookApplication, AddTwitterApplication, AddGoogleApplication, SeeApplication, EditApplication, RemoveApplication ) from lac import _ @process_definition(name='socialapplicationsprocess', id='socialapplicationsprocess') class SocialApplicationsProcess(ProcessDefinition, VisualisableElement): isUnique = True def __init__(self, **kwargs): super(SocialApplicationsProcess, self).__init__(**kwargs) self.title = _('Social applications process') self.description = _('Social applications process') def _init_definition(self): self.defineNodes( start = StartEventDefinition(), pg = ParallelGatewayDefinition(), addapplication = ActivityDefinition(contexts=[Addapplications, AddFacebookApplication, AddTwitterApplication, AddGoogleApplication], description=_("Add a social application"), title=_("Add a social application"), groups=[]), seeapplication = ActivityDefinition(contexts=[SeeApplication], description=_("See the application"), title=_("See the application"), groups=[]), editapplication = ActivityDefinition(contexts=[EditApplication], description=_("Edit the application"), title=_("Edit"), groups=[]), removeapplication = ActivityDefinition(contexts=[RemoveApplication], description=_("Remove the application"), title=_("Remove"), groups=[]), eg = ExclusiveGatewayDefinition(), end = EndEventDefinition(), ) self.defineTransitions( TransitionDefinition('start', 'pg'), TransitionDefinition('pg', 'addapplication'), TransitionDefinition('addapplication', 'eg'), TransitionDefinition('pg', 'seeapplication'), TransitionDefinition('seeapplication', 'eg'), TransitionDefinition('pg', 'editapplication'), TransitionDefinition('editapplication', 'eg'), TransitionDefinition('pg', 'removeapplication'), TransitionDefinition('removeapplication', 'eg'), TransitionDefinition('eg', 'end'), )
ecreall/lagendacommun
lac/content/processes/social_applications_management/definition.py
Python
agpl-3.0
3,478
0.008051
import binascii import code import importlib import json import psycopg2 import readline import socket import struct import sys import time from Crypto.Cipher import AES from datetime import datetime from multiprocessing import Process from threading import Thread import config from plugins import sensor_ht, magnet, yeelight from utils import get_store from web.w import run_app as web_app conn = psycopg2.connect("dbname={} user={} password={}".format(config.DBNAME, config.DBUSER, config.DBPASS)) cursor = conn.cursor() MULTICAST = { 'mihome': ('224.0.0.50', 9898), 'yeelight': ('239.255.255.250', 1982) } SOCKET_BUFSIZE = 1024 IV = bytes([0x17, 0x99, 0x6d, 0x09, 0x3d, 0x28, 0xdd, 0xb3, 0xba, 0x69, 0x5a, 0x2e, 0x6f, 0x58, 0x56, 0x2e]) def receiver(service='mihome'): from plugins import gateway assert service in MULTICAST, 'No such service' store = get_store() address, port = MULTICAST.get(service) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind(("0.0.0.0", port)) mreq = struct.pack("=4sl", socket.inet_aton(address), socket.INADDR_ANY) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, SOCKET_BUFSIZE) sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) sock.settimeout(20) # 2x of heartbeat period current = {} while True: try: data, _ = sock.recvfrom(SOCKET_BUFSIZE) # buffer size is 1024 bytes except socket.timeout: continue print(datetime.now().isoformat(), data) if service == 'mihome': message = json.loads(data.decode()) data = json.loads(message['data']) if message.get('model') in ('sensor_ht', 'weather.v1') and not sensor_ht.process(conn, cursor, current, message, data): continue elif message.get('model') == 'magnet': magnet.process(store, message, data) elif message.get('model') == 'gateway': gateway.process(store, message, data) current = {} elif service == 'yeelight': yeelight.process(data.decode()) def send_command(command, timeout=10): _, port = MULTICAST.get('mihome') if isinstance(command.get('data'), dict): command['data'] = json.dumps(command['data']) address = get_store().get('gateway_addr') if address is None: print("Didn't receive any heartbeat from gateway yet. Delaying request for 10 seconds.") time.sleep(10) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.settimeout(timeout) sock.connect((address, port)) sock.send(json.dumps(command).encode('ascii')) data = None try: data, addr = sock.recvfrom(SOCKET_BUFSIZE) except ConnectionRefusedError: print("send_command :: recvfrom() connection refused: {}:{}".format(address.decode(), port)) except socket.timeout: print("send_command :: recvfrom() timed out: {}:{}".format(address.decode(), port)) finally: sock.close() return data def get_key(): """Get current gateway key""" cipher = AES.new(config.MIHOME_GATEWAY_PASSWORD, AES.MODE_CBC, IV) encrypted = cipher.encrypt(get_store().get('gateway_token')) return binascii.hexlify(encrypted) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == 'shell': vars = globals().copy() vars.update(locals()) shell = code.InteractiveConsole(vars) shell.interact() sys.exit() Thread(target=web_app).start() for app_name in config.ENABLED_APPS: try: app = importlib.import_module('apps.{}'.format(app_name)) except ImportError as e: print('Could not import app "{}": {}'.format(app_name, e)) continue kwargs = {'store': get_store(), 'conn': conn, 'cursor': cursor} Process(target=app.run, kwargs=kwargs).start() print('Loaded app: {}'.format(app_name)) for service in MULTICAST: Process(target=receiver, args=(service,)).start() # Discover Yeelight bulbs yeelight.discover()
aluminiumgeek/goodbye-mihome
mihome.py
Python
bsd-2-clause
4,258
0.001879
import urllib def command_oraakkeli(bot, user, channel, args): """Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)""" if not args: return args = urllib.quote_plus(args) answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent() answer = unicode(answer) answer = answer.encode("utf-8") return bot.say(channel, "Oraakkeli vastaa: %s" % answer)
nigeljonez/newpyfibot
modules/module_oraakkeli.py
Python
bsd-3-clause
459
0.010893
""" PymageJ Copyright (C) 2015 Jochem Smit This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ import numpy as np import struct import re from collections import namedtuple import os import warnings #todo figure out if x,y coords should be relative or absolute! -> relative # http://rsb.info.nih.gov/ij/developer/source/ij/io/RoiDecoder.java.html # http://rsb.info.nih.gov/ij/developer/source/ij/io/RoiEncoder.java.html # clippers polynomial? # Base class for all ROI classes class ROIObject(object): header = {} # todo overwrite get/set and print warnings when not supported by imagej def __init__(self, name=None): self.name = name def area(self): raise NotImplementedError('Area not implemented') class ROIPolygon(ROIObject): type = 'polygon' def __init__(self, top, left, x_coords, y_coords, *args, **kwargs): self.top = top self.left = left self.x_coords = np.array(x_coords) self.y_coords = np.array(y_coords) super(ROIPolygon, self).__init__(*args, **kwargs) @property def bottom(self): return self.y_coords.max() + self.top @property def right(self): return self.x_coords.max() + self.left @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError('Area of polygon ROI is not implemented') def __len__(self): return len(self.x_coords) class ROIRect(ROIObject): type = 'rect' def __init__(self, top, left, bottom, right, arc=0, *args, **kwargs): self.top = top self.left = left self.bottom = bottom self.right = right self.arc = arc super(ROIRect, self).__init__(*args, **kwargs) @property def width(self): return self.right - self.left @property def height(self): return self.bottom - self.top @property def area(self): if self.arc == 0: return self.width * self.height else: warnings.warn(r"Rounded rectangle area value differs from ImageJ value as it first 'smooths' to pixels") return self.width * self.height - ((4 - np.pi)*(self.arc/2.)**2) class ROIOval(ROIObject): type = 'oval' def __init__(self, top, left, bottom, right, *args, **kwargs): self.top = top self.left = left self.bottom = bottom self.right = right super(ROIOval, self).__init__(*args, **kwargs) @property def width(self): return self.right - self.left @property def height(self): return self.bottom - self.top @property def area(self): warnings.warn(r"Oval area value differs from ImageJ value as it first 'smooths' to pixels") return self.width*self.height*np.pi*0.25 class ROILine(ROIObject): type = 'line' def __init__(self, x1, y1, x2, y2, *args, **kwargs): self.x1 = x1 self.y1 = y1 self.x2 = x2 self.y2 = y2 super(ROILine, self).__init__(*args, **kwargs) @property def area(self): return 0 class ROIFreeLine(ROIObject): type = 'freeline' def __init__(self, top, left, x_coords, y_coords, *args, **kwargs): assert (len(x_coords) == len(y_coords)) self.top = top self.left = left self.x_coords = np.array(x_coords) self.y_coords = np.array(y_coords) super(ROIFreeLine, self).__init__(*args, **kwargs) @property def bottom(self): return self.y_coords.max() + self.top @property def right(self): return self.x_coords.max() + self.left @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError("Freeline area not implemented") def __len__(self): return len(self.x_coords) class ROIPolyline(ROIObject): type = 'polyline' def __init__(self, top, left, x_coords, y_coords, *args, **kwargs): assert(len(x_coords) == len(y_coords)) self.top = top self.left = left self.x_coords = np.array(x_coords) self.y_coords = np.array(y_coords) super(ROIPolyline, self).__init__(*args, **kwargs) @property def bottom(self): return self.y_coords.max() + self.top @property def right(self): return self.x_coords.max() + self.left @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError("Freeline area not implemented") def __len__(self): return len(self.x_coords) class ROINoRoi(ROIObject): type = 'no_roi' @property def area(self): return 0 class ROIFreehand(ROIObject): type = 'freehand' def __init__(self, top, left, x_coords, y_coords, *args, **kwargs): self.top = top self.left = left self.x_coords = np.array(x_coords) self.y_coords = np.array(y_coords) super(ROIFreehand, self).__init__(*args, **kwargs) @property def bottom(self): return self.y_coords.max() + self.top @property def right(self): return self.x_coords.max() + self.left @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError('Area of freehand ROI is not implemented') def __len__(self): return len(self.x_coords) class ROITraced(ROIObject): type = 'traced' def __init__(self, top, left, bottom, right, x_coords, y_coords, *args, **kwargs): self.top = top self.left = left self.bottom = bottom self.right = right self.x_coords = x_coords self.y_coords = y_coords super(ROITraced, self).__init__(*args, **kwargs) @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError('Area of traced ROI is not implemented') class ROIAngle(ROIObject): @property def area(self): return 0 class ROIPoint(ROIObject): @property def area(self): return 0 HeaderTuple = namedtuple('Header_variables', 'type size offset') class ROIFileObject(object): header1_fields = [ # 'VAR_NAME', 'type', offset' ['MAGIC', '4s', 0], ['VERSION_OFFSET', 'h', 4], ['TYPE', 'b', 6], ['TOP', 'h', 8], ['LEFT', 'h', 10], ['BOTTOM', 'h', 12], ['RIGHT', 'h', 14], ['N_COORDINATES', 'h', 16], ['X1', 'f', 18], ['Y1', 'f', 22], ['X2', 'f', 26], ['Y2', 'f', 30], ['XD', 'f', 18], # D vars for sub pixel resolution ROIs ['YD', 'f', 22], ['WIDTH', 'f', 26], ['HEIGHT', 'f', 30], ['STROKE_WIDTH', 'h', 34], ['SHAPE_ROI_SIZE', 'i', 36], ['STROKE_COLOR', 'i', 40], ['FILL_COLOR', 'i', 44], ['SUBTYPE', 'h', 48], ['OPTIONS', 'h', 50], ['ARROW_STYLE', 'b', 52], ['ELLIPSE_ASPECT_RATIO', 'b', 52], ['POINT_TYPE', 'b', 52], ['ARROW_HEAD_SIZE', 'b', 53], ['ROUNDED_RECT_ARC_SIZE', 'h', 54], ['POSITION', 'i', 56], ['HEADER2_OFFSET', 'i', 60] #['COORDINATES', 'i', 64] ] header2_fields = [ ['C_POSITION', 'i', 4], ['Z_POSITION', 'i', 8], ['T_POSITION', 'i', 12], ['NAME_OFFSET', 'i', 16], ['NAME_LENGTH', 'i', 20], ['OVERLAY_LABEL_COLOR', 'i', 24], ['OVERLAY_FONT_SIZE', 'h', 28], ['AVAILABLE_BYTE1', 'b', 30], ['IMAGE_OPACITY', 'b', 31], ['IMAGE_SIZE', 'i', 32], ['FLOAT_STROKE_WIDTH', 'f', 36], ['ROI_PROPS_OFFSET', 'i', 40], ['ROI_PROPS_LENGTH', 'i', 44] ] roi_types_rev = {'polygon': 0, 'rect': 1, 'oval': 2, 'line': 3, 'freeline': 4, 'polyline': 5, 'no_roi': 6, 'freehand': 7, 'traced': 8, 'angle': 9, 'point': 10} roi_types = {0: 'polygon', 1: 'rect', 2: 'oval', 3: 'line', 4: 'freeline', 5: 'polyline', 6: 'no_roi', 7: 'freehand', 8: 'traced', 9: 'angle', 10: 'point'} @staticmethod def _type_size(_type): sizes = {'h': 2, 'f': 4, 'i': 4, 's': 1, 'b': 1} char = re.findall('\D', _type)[0] size = sizes[char] number = re.findall('\d', _type) if number: size *= int(number[0]) return size class ROIEncoder(ROIFileObject): header2_offset = 64 name_offset = 128 def __init__(self, path, roi_obj, name=None): self.path = path self.roi_obj = roi_obj self.name = name self._header1_dict = {e[0]: HeaderTuple(e[1], self._type_size(e[1]), e[2]) for e in self.header1_fields} self._header2_dict = {e[0]: HeaderTuple(e[1], self._type_size(e[1]), e[2]) for e in self.header2_fields} def write(self): self._write_var('MAGIC', b'Iout') self._write_var('VERSION_OFFSET', 226) # todo or 225? for key, val in self.roi_obj.header.items(): self._write_var(key, val) roi_writer = getattr(self, '_write_roi_' + self.roi_obj.type) roi_writer() def __enter__(self): self.f_obj = open(self.path, 'wb') pad = struct.pack('128b', *[0]*128)#*np.zeros(128)) self.f_obj.write(pad) return self def __exit__(self, exc_type, exc_val, exc_tb): self.f_obj.close() return False def _write_roi_polygon(self): self._write_var('TYPE', self.roi_types_rev[self.roi_obj.type]) self._write_var('TOP', self.roi_obj.top) self._write_var('LEFT', self.roi_obj.left) self._write_var('BOTTOM', self.roi_obj.bottom) self._write_var('RIGHT', self.roi_obj.right) self._write_var('N_COORDINATES', len(self.roi_obj)) self._write_var('HEADER2_OFFSET', self.header2_offset) self._write_var('NAME_OFFSET', self.name_offset) self._write_coords(np.concatenate((self.roi_obj.x_coords, self.roi_obj.y_coords))) self._write_name() def _write_roi_rect(self): self._write_var('TYPE', self.roi_types_rev[self.roi_obj.type]) self._write_var('TOP', self.roi_obj.top) self._write_var('LEFT', self.roi_obj.left) self._write_var('BOTTOM', self.roi_obj.bottom) self._write_var('RIGHT', self.roi_obj.right) self._write_var('HEADER2_OFFSET', self.header2_offset) self._write_var('NAME_OFFSET', self.name_offset) self._write_name() def _write_roi_oval(self): self._write_var('TYPE', self.roi_types_rev[self.roi_obj.type]) self._write_var('TOP', self.roi_obj.top) self._write_var('LEFT', self.roi_obj.left) self._write_var('BOTTOM', self.roi_obj.bottom) self._write_var('RIGHT', self.roi_obj.right) self._write_var('HEADER2_OFFSET', self.header2_offset) self._write_var('NAME_OFFSET', self.name_offset) self._write_name() def _write_roi_line(self): self._write_var('TYPE', self.roi_types_rev[self.roi_obj.type]) for var_name, attr_name in zip(['X1', 'Y1', 'X2', 'Y2'], ['x1', 'y1', 'x2', 'y2']): self._write_var(var_name, getattr(self.roi_obj, attr_name)) self._write_var('HEADER2_OFFSET', self.header2_offset) self._write_var('NAME_OFFSET', self.name_offset) self._write_name() def _write_roi_freeline(self): self._write_var('TYPE', self.roi_types_rev[self.roi_obj.type]) self._write_var('TOP', self.roi_obj.top) self._write_var('LEFT', self.roi_obj.left) self._write_var('BOTTOM', self.roi_obj.bottom) self._write_var('RIGHT', self.roi_obj.right) self._write_var('N_COORDINATES', len(self.roi_obj)) self._write_var('HEADER2_OFFSET', self.header2_offset) self._write_var('NAME_OFFSET', self.name_offset) self._write_coords(np.concatenate((self.roi_obj.x_coords, self.roi_obj.y_coords))) self._write_name() def _write_roi_polyline(self): self._write_var('TYPE', self.roi_types_rev[self.roi_obj.type]) self._write_var('TOP', self.roi_obj.top) self._write_var('LEFT', self.roi_obj.left) self._write_var('BOTTOM', self.roi_obj.bottom) self._write_var('RIGHT', self.roi_obj.right) self._write_var('N_COORDINATES', len(self.roi_obj)) self._write_var('HEADER2_OFFSET', self.header2_offset) self._write_var('NAME_OFFSET', self.name_offset) self._write_coords(np.concatenate((self.roi_obj.x_coords, self.roi_obj.y_coords))) self._write_name() def _write_roi_no_roi(self): raise NotImplementedError('Writing roi type no roi is not implemented') def _write_roi_freehand(self): self._write_var('TYPE', self.roi_types_rev[self.roi_obj.type]) self._write_var('TOP', self.roi_obj.top) self._write_var('LEFT', self.roi_obj.left) self._write_var('BOTTOM', self.roi_obj.bottom) self._write_var('RIGHT', self.roi_obj.right) self._write_var('N_COORDINATES', len(self.roi_obj)) self._write_var('HEADER2_OFFSET', self.header2_offset) self._write_var('NAME_OFFSET', self.name_offset) self._write_coords(np.concatenate((self.roi_obj.x_coords, self.roi_obj.y_coords))) self._write_name() def _write_roi_traced(self): raise NotImplementedError('Writing roi type traced is not implemented') def _write_roi_angle(self): raise NotImplementedError('Writing roi type angle is not implemented') def _write_roi_point(self): raise NotImplementedError('Writing roi type point is not implemented') def _write_var(self, var_name, value): #todo typechecking of var values if var_name in self._header1_dict: var = self._header1_dict[var_name] offset = var.offset elif var_name in self._header2_dict: var = self._header2_dict[var_name] offset = var.offset + self.header2_offset else: raise Exception('Header variable %s not found' % var_name) self.f_obj.seek(offset) binary = struct.pack('>' + var.type, value) self.f_obj.write(binary) def _write_name(self): if self.roi_obj.name: name = self.roi_obj.name else: name = os.path.basename( os.path.splitext(self.path)[0] ) self._write_var('NAME_LENGTH', len(name)) name = ''.join(i for j in zip(len(name)*' ', name) for i in j) # interleave with with spaces self.f_obj.seek(self.name_offset) self.f_obj.write(name.encode()) def _write_coords(self, coords): self.f_obj.seek(64) binary = struct.pack('>' + str(len(coords)) + 'h', *coords) self.f_obj.write(binary) @property def header2_offset(self): if hasattr(self.roi_obj, 'x_coords'): return 64 + len(self.roi_obj)*2*2 # Header1 size + 2 bytes per pair of coords else: return 64 @property def name_offset(self): return self.header2_offset + 64 # Name is after header2 which as size 64 class ROIDecoder(ROIFileObject): def __init__(self, roi_path): self.roi_path = roi_path self.header = {} # Output header dict self._header1_dict = {e[0]: HeaderTuple(e[1], self._type_size(e[1]), e[2]) for e in self.header1_fields} self._header2_dict = {e[0]: HeaderTuple(e[1], self._type_size(e[1]), e[2]) for e in self.header2_fields} def __enter__(self): self.f_obj = open(self.roi_path, 'rb') #todo check r r return self def __exit__(self, type, value, traceback): self.f_obj.close() return False def read_header_all(self): to_read_h1 = [e[0] for e in self.header1_fields] # Read everything in header1 to_read_h2 = [e[0] for e in self.header2_fields] # Read everything in header2 for h in to_read_h1 + to_read_h2: self._set_header(h) def read_header(self): if str(self._get_var('MAGIC')) != str(b'Iout'): raise IOError('Invalid ROI file, magic number mismatch') # to_read_h1 = ['VERSION_OFFSET', 'TYPE', 'SUBTYPE', 'TOP', 'LEFT', 'BOTTOM', 'RIGHT', 'N_COORDINATES', # 'STROKE_WIDTH', 'SHAPE_ROI_SIZE', 'STROKE_COLOR', 'FILL_COLOR', 'SUBTYPE', 'OPTIONS', 'POSITION', # 'HEADER2_OFFSET'] to_read_h1 = [e[0] for e in self.header1_fields] # Read everything in header2 to_read_h2 = [e[0] for e in self.header2_fields] # Read everything in header2 #todo why is this here and is it still nessecary set_zero = ['OVERLAY_LABEL_COLOR', 'OVERLAY_FONT_SIZE', 'IMAGE_OPACITY'] for h in to_read_h1 + to_read_h2: self.header[h] = self._get_var(h) # self._set_header(h) for h in set_zero: self.header[h] = 0 def get_roi(self): if not self.header: self.read_header() try: roi_reader = getattr(self, '_get_roi_' + self.roi_types[self.header['TYPE']]) except AttributeError: raise NotImplementedError('Reading roi type %s not implemented' % self.roi_types[self.header['TYPE']]) roi_obj = roi_reader() roi_obj.name = self._get_name() roi_obj.header = self.header return roi_obj def _get_roi_polygon(self): params = ['TOP', 'LEFT', 'BOTTOM', 'RIGHT'] for p in params: self._set_header(p) top, left, bottom, right = [self.header[p] for p in params] n_coords = self.header['N_COORDINATES'] self.f_obj.seek(64) binary = self.f_obj.read(2*n_coords*2) coords = np.array(struct.unpack('>' + str(2*n_coords) + 'h', binary)) x_coords = np.array(coords[:n_coords]) y_coords = np.array(coords[n_coords:]) return ROIPolygon(top, left, x_coords, y_coords) def _get_roi_rect(self): self._set_header('ROUNDED_RECT_ARC_SIZE') arc = self.header['ROUNDED_RECT_ARC_SIZE'] params = ['TOP', 'LEFT', 'BOTTOM', 'RIGHT'] for p in params: self._set_header(p) top, left, bottom, right = [self.header[p] for p in params] return ROIRect(top, left, bottom, right, arc=arc) def _get_roi_oval(self): params = ['TOP', 'LEFT', 'BOTTOM', 'RIGHT'] for p in params: self._set_header(p) top, left, bottom, right = [self.header[p] for p in params] return ROIOval(top, left, bottom, right) def _get_roi_line(self): params = ['X1', 'Y1', 'X2', 'Y2'] for p in params: self._set_header(p) return ROILine(*[self.header[p] for p in params]) def _get_roi_freeline(self): params = ['TOP', 'LEFT', 'BOTTOM', 'RIGHT'] for p in params: self._set_header(p) top, left, bottom, right = [self.header[p] for p in params] n_coords = self.header['N_COORDINATES'] self.f_obj.seek(64) binary = self.f_obj.read(2*2*n_coords) # Two bytes per pair of coords coords = np.array(struct.unpack('>' + str(2*n_coords) + 'h', binary)) x_coords = np.array(coords[:n_coords]) y_coords = np.array(coords[n_coords:]) #todo read coord function return ROIFreeLine(top, left, x_coords, y_coords) def _get_roi_polyline(self): params = ['TOP', 'LEFT', 'BOTTOM', 'RIGHT'] for p in params: self._set_header(p) top, left, bottom, right = [self.header[p] for p in params] n_coords = self.header['N_COORDINATES'] self.f_obj.seek(64) binary = self.f_obj.read(2*2*n_coords) # Two bytes per pair of coords coords = np.array(struct.unpack('>' + str(2*n_coords) + 'h', binary)) x_coords = np.array(coords[:n_coords]) y_coords = np.array(coords[n_coords:]) return ROIPolyline(top, left, x_coords, y_coords) def _get_roi_no_roi(self): raise NotImplementedError('Reading roi type no roi is not implemented') def _get_roi_freehand(self): params = ['TOP', 'LEFT', 'BOTTOM', 'RIGHT'] for p in params: self._set_header(p) top, left, bottom, right = [self.header[p] for p in params] n_coords = self.header['N_COORDINATES'] self.f_obj.seek(64) binary = self.f_obj.read(2*2*n_coords) # Two bytes per pair of coords coords = np.array(struct.unpack('>' + str(2*n_coords) + 'h', binary)) x_coords = np.array(coords[:n_coords]) y_coords = np.array(coords[n_coords:]) return ROIFreehand(top, left, x_coords, y_coords) #todo removed bottom, right, why? def _get_roi_traced(self): params = ['TOP', 'LEFT', 'BOTTOM', 'RIGHT'] for p in params: self._set_header(p) top, left, bottom, right = [self.header[p] for p in params] n_coords = self.header['N_COORDINATES'] self.f_obj.seek(64) binary = self.f_obj.read(2*n_coords*2) coords = np.array(struct.unpack('>' + str(2*n_coords) + 'h', binary)) x_coords = np.array(coords[:n_coords]) y_coords = np.array(coords[n_coords:]) return ROITraced(top, left, bottom, right, x_coords, y_coords) def _get_roi_angle(self): raise NotImplementedError('Reading roi type angle is not implemented') def _get_roi_point(self): raise NotImplementedError('Reading roi type point is not implemented') #todo make public and docstring def _get_var(self, var_name): if var_name in self._header1_dict: var = self._header1_dict[var_name] offset = var.offset elif var_name in self._header2_dict: var = self._header2_dict[var_name] offset = var.offset + self._get_var('HEADER2_OFFSET') else: raise Exception('Header variable %s not found' % var_name) self.f_obj.seek(offset) binary = self.f_obj.read(var.size) return struct.unpack('>' + var.type, binary)[0] # read header variable, big endian def _get_name(self): name_length = self._get_var('NAME_LENGTH') name_offset = self._get_var('NAME_OFFSET') self.f_obj.seek(name_offset) binary = self.f_obj.read(2*name_length) return b''.join(struct.unpack('>' + str(2*name_length) + 'c', binary)[1::2]).decode("utf-8") def _set_header(self, var_name): self.header[var_name] = self._get_var(var_name) # PM code below import zipfile def read_imagej_roi_zip(filename, dict_format = True): roi_list = [] with zipfile.ZipFile(filename) as zf: for name in zf.namelist(): roi_path = zf.extract(name, '/tmp') roi = read_roi(roi_path) if roi is None: continue label = str(name).rstrip('.roi') if dict_format: roi_list.append({'label': label, 'polygons': roi.T}) else: roi_list.append([label, roi]) return roi_list def read_roi(roi_path): try: with ROIDecoder(roi_path) as roi: r = roi.get_roi() r.x_coords = r.left + r.x_coords r.y_coords = r.top + r.y_coords return np.array([r.x_coords, r.y_coords]) except Exception as other: print(roi_file, other) return None
nvladimus/zebrascope_targets
MultiviewRegistration/PymageJ-devel/pymagej/roi.py
Python
mit
24,830
0.002416
from aimacode.logic import PropKB from aimacode.planning import Action from aimacode.search import ( Node, Problem, ) from aimacode.utils import expr from lp_utils import ( FluentState, encode_state, decode_state, ) from my_planning_graph import PlanningGraph class AirCargoProblem(Problem): def __init__(self, cargos, planes, airports, initial: FluentState, goal: list): """ :param cargos: list of str cargos in the problem :param planes: list of str planes in the problem :param airports: list of str airports in the problem :param initial: FluentState object positive and negative literal fluents (as expr) describing initial state :param goal: list of expr literal fluents required for goal test """ self.state_map = initial.pos + initial.neg self.initial_state_TF = encode_state(initial, self.state_map) Problem.__init__(self, self.initial_state_TF, goal=goal) self.cargos = cargos self.planes = planes self.airports = airports self.actions_list = self.get_actions() def get_actions(self): ''' This method creates concrete actions (no variables) for all actions in the problem domain action schema and turns them into complete Action objects as defined in the aimacode.planning module. It is computationally expensive to call this method directly; however, it is called in the constructor and the results cached in the `actions_list` property. Returns: ---------- list<Action> list of Action objects ''' # creates concrete Action objects based on the domain action schema for: Load, Unload, and Fly # concrete actions definition: specific literal action that does not include variables as with the schema # for example, the action schema 'Load(c, p, a)' can represent the concrete actions 'Load(C1, P1, SFO)' # or 'Load(C2, P2, JFK)'. The actions for the planning problem must be concrete because the problems in # forward search and Planning Graphs must use Propositional Logic def load_actions(): '''Create all concrete Load actions and return a list :return: list of Action objects ''' loads = [] for a in self.airports: for p in self.planes: for c in self.cargos: precond_pos = [ expr("At({}, {})".format(c,a)), expr("At({}, {})".format(p,a)), ] precond_neg = [] effect_add = [ expr("In({}, {})".format(c,p)), ] effect_rem = [ expr("At({}, {})".format(c,a)) ] load = Action(expr("Load({}, {}, {})".format(c,p,a)), [precond_pos, precond_neg], [effect_add, effect_rem] ) loads.append(load) return loads def unload_actions(): '''Create all concrete Unload actions and return a list :return: list of Action objects ''' unloads = [] for a in self.airports: for p in self.planes: for c in self.cargos: precond_pos = [ expr("In({}, {})".format(c,p)), expr("At({}, {})".format(p,a)), ] precond_neg = [] effect_add = [ expr("At({}, {})".format(c,a)), ] effect_rem = [ expr("In({}, {})".format(c,p)) ] unload = Action( expr("Unload({}, {}, {})".format(c,p,a)), [precond_pos, precond_neg], [effect_add, effect_rem] ) unloads.append(unload) return unloads def fly_actions(): '''Create all concrete Fly actions and return a list :return: list of Action objects ''' flys = [] for fr in self.airports: for to in self.airports: if fr != to: for p in self.planes: precond_pos = [expr("At({}, {})".format(p, fr)), ] precond_neg = [] effect_add = [expr("At({}, {})".format(p, to))] effect_rem = [expr("At({}, {})".format(p, fr))] fly = Action(expr("Fly({}, {}, {})".format(p, fr, to)), [precond_pos, precond_neg], [effect_add, effect_rem]) flys.append(fly) return flys return load_actions() + unload_actions() + fly_actions() def actions(self, state: str) -> list: """ Return the actions that can be executed in the given state. :param state: str state represented as T/F string of mapped fluents (state variables) e.g. 'FTTTFF' :return: list of Action objects """ possible_actions = [] actual_state = decode_state(state, self.state_map) for a in self.actions_list: sat = True for p in a.precond_pos: if p not in actual_state.pos: sat = False break if sat: for n in a.precond_neg: if n not in actual_state.neg: sat = False break if sat: possible_actions.append(a) return possible_actions def result(self, state: str, action: Action): """ Return the state that results from executing the given action in the given state. The action must be one of self.actions(state). :param state: state entering node :param action: Action applied :return: resulting state after action """ actual_state = decode_state(state,self.state_map) pos_list = action.effect_add + [ p for p in actual_state.pos if p not in action.effect_rem] neg_list = action.effect_rem + [ p for p in actual_state.neg if p not in action.effect_add] new_state = FluentState(pos_list, neg_list) return encode_state(new_state, self.state_map) def goal_test(self, state: str) -> bool: """ Test the state to see if goal is reached :param state: str representing state :return: bool """ actual_state = decode_state(state,self.state_map) for s in self.goal: if s not in actual_state.pos: return False return True def h_1(self, node: Node): # note that this is not a true heuristic h_const = 1 return h_const def h_pg_levelsum(self, node: Node): ''' This heuristic uses a planning graph representation of the problem state space to estimate the sum of all actions that must be carried out from the current state in order to satisfy each individual goal condition. ''' # requires implemented PlanningGraph class pg = PlanningGraph(self, node.state) pg_levelsum = pg.h_levelsum() return pg_levelsum def h_ignore_preconditions(self, node: Node): ''' This heuristic estimates the minimum number of actions that must be carried out from the current state in order to satisfy all of the goal conditions by ignoring the preconditions required for an action to be executed. ''' # TODO implement (see Russell-Norvig Ed-3 10.2.3 or Russell-Norvig Ed-2 11.2) count = 0 goal_tf = encode_state(FluentState(self.goal,[]),self.state_map) for ii in range(len(goal_tf)): if goal_tf[ii] == 'T' and node.state[ii] == 'F': count = count + 1 return count def air_cargo_p1() -> AirCargoProblem: cargos = ['C1', 'C2'] planes = ['P1', 'P2'] airports = ['JFK', 'SFO'] pos = [expr('At(C1, SFO)'), expr('At(C2, JFK)'), expr('At(P1, SFO)'), expr('At(P2, JFK)'), ] neg = [expr('At(C2, SFO)'), expr('In(C2, P1)'), expr('In(C2, P2)'), expr('At(C1, JFK)'), expr('In(C1, P1)'), expr('In(C1, P2)'), expr('At(P1, JFK)'), expr('At(P2, SFO)'), ] init = FluentState(pos, neg) goal = [expr('At(C1, JFK)'), expr('At(C2, SFO)'), ] return AirCargoProblem(cargos, planes, airports, init, goal) def air_cargo_p2() -> AirCargoProblem: cargos = ['C1', 'C2', 'C3'] planes = ['P1', 'P2', 'P3'] airports = ['JFK', 'SFO', 'ATL'] pos = [expr('At(C1, SFO)'), expr('At(C2, JFK)'), expr('At(C3, ATL)'), expr('At(P1, SFO)'), expr('At(P2, JFK)'), expr('At(P3, ATL)'), ] neg = [expr('At(C2, SFO)'), expr('At(C2, ATL)'), expr('In(C2, P1)'), expr('In(C2, P2)'), expr('In(C2, P3)'), expr('At(C1, JFK)'), expr('At(C1, ATL)'), expr('In(C1, P1)'), expr('In(C1, P2)'), expr('In(C1, P3)'), expr('At(C3, JFK)'), expr('At(C3, SFO)'), expr('In(C3, P1)'), expr('In(C3, P2)'), expr('In(C3, P3)'), expr('At(P1, JFK)'), expr('At(P1, ATL)'), expr('At(P2, SFO)'), expr('At(P2, ATL)'), expr('At(P3, JFK)'), expr('At(P3, SFO)'), ] init = FluentState(pos, neg) goal = [expr('At(C1, JFK)'), expr('At(C2, SFO)'), expr('At(C3, SFO)'), ] return AirCargoProblem(cargos, planes, airports, init, goal) def air_cargo_p3() -> AirCargoProblem: cargos = ['C1', 'C2', 'C3', 'C4'] planes = ['P1', 'P2'] airports = ['JFK', 'SFO', 'ATL', 'ORD'] pos = [expr('At(C1, SFO)'), expr('At(C2, JFK)'), expr('At(C3, ATL)'), expr('At(C4, ORD)'), expr('At(P1, SFO)'), expr('At(P2, JFK)'), ] neg = [expr('At(C2, SFO)'), expr('At(C2, ATL)'), expr('At(C2, ORD)'), expr('In(C2, P1)'), expr('In(C2, P2)'), expr('At(C1, JFK)'), expr('At(C1, ATL)'), expr('At(C1, ORD)'), expr('In(C1, P1)'), expr('In(C1, P2)'), expr('At(C3, JFK)'), expr('At(C3, SFO)'), expr('At(C3, ORD)'), expr('In(C3, P1)'), expr('In(C3, P2)'), expr('At(C4, JFK)'), expr('At(C4, SFO)'), expr('At(C4, ATL)'), expr('In(C4, P1)'), expr('In(C4, P2)'), expr('At(P1, JFK)'), expr('At(P1, ATL)'), expr('At(P1, ORD)'), expr('At(P2, SFO)'), expr('At(P2, ATL)'), expr('At(P2, ORD)'), ] init = FluentState(pos, neg) goal = [expr('At(C1, JFK)'), expr('At(C2, SFO)'), expr('At(C3, JFK)'), expr('At(C4, SFO)'), ] return AirCargoProblem(cargos, planes, airports, init, goal)
fbrei/aind
planning/my_air_cargo_problems.py
Python
mit
11,755
0.003318
# This file is part of Shuup. # # Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. # test that admin actually saves catalog from __future__ import unicode_literals import datetime import pytest import pytz from django.test import override_settings from shuup.apps.provides import override_provides from shuup.campaigns.admin_module.form_parts import CatalogBaseFormPart from shuup.campaigns.admin_module.views import CatalogCampaignEditView from shuup.campaigns.models.campaigns import CatalogCampaign from shuup.testing.factories import get_default_shop from shuup.testing.utils import apply_request_middleware DEFAULT_CONDITION_FORMS = [ "shuup.campaigns.admin_module.forms:ContactGroupConditionForm", "shuup.campaigns.admin_module.forms:ContactConditionForm", ] DEFAULT_FILTER_FORMS = [ "shuup.campaigns.admin_module.forms:ProductTypeFilterForm", "shuup.campaigns.admin_module.forms:ProductFilterForm", "shuup.campaigns.admin_module.forms:CategoryFilterForm", ] DEFAULT_EFFECT_FORMS = [ "shuup.campaigns.admin_module.forms:ProductDiscountAmountForm", "shuup.campaigns.admin_module.forms:ProductDiscountPercentageForm", ] def get_form_parts(request, view, object): with override_provides("campaign_context_condition", DEFAULT_CONDITION_FORMS): with override_provides("campaign_catalog_filter", DEFAULT_FILTER_FORMS): with override_provides("campaign_product_discount_effect_form", DEFAULT_EFFECT_FORMS): initialized_view = view(request=request, kwargs={"pk": object.pk}) return initialized_view.get_form_parts(object) @pytest.mark.django_db def test_admin_campaign_edit_view_works(rf, admin_user): shop = get_default_shop() view_func = CatalogCampaignEditView.as_view() request = apply_request_middleware(rf.get("/"), user=admin_user) campaign = CatalogCampaign.objects.create(name="test campaign", active=True, shop=shop) response = view_func(request, pk=campaign.pk) assert campaign.name in response.rendered_content response = view_func(request, pk=None) assert response.rendered_content @pytest.mark.django_db def test_campaign_new_mode_view_formsets(rf, admin_user): view = CatalogCampaignEditView get_default_shop() request = apply_request_middleware(rf.get("/"), user=admin_user) form_parts = get_form_parts(request, view, view.model()) assert len(form_parts) == 1 assert issubclass(form_parts[0].__class__, CatalogBaseFormPart) @pytest.mark.django_db def test_campaign_edit_view_formsets(rf, admin_user): view = CatalogCampaignEditView shop = get_default_shop() object = CatalogCampaign.objects.create(name="test campaign", active=True, shop=shop) request = apply_request_middleware(rf.get("/"), user=admin_user) form_parts = get_form_parts(request, view, object) # form parts should include forms plus one for the base form assert len(form_parts) == (len(DEFAULT_CONDITION_FORMS) + len(DEFAULT_FILTER_FORMS) + len(DEFAULT_EFFECT_FORMS) + 1) @pytest.mark.django_db def test_campaign_creation(rf, admin_user): """ To make things little bit more simple let's use only english as a language. """ with override_settings(LANGUAGES=[("en", "en")]): view = CatalogCampaignEditView.as_view() data = { "base-name": "Test Campaign", "base-public_name__en": "Test Campaign", "base-shop": get_default_shop().id, "base-active": True, "base-basket_line_text": "Test campaign activated!", } campaigns_before = CatalogCampaign.objects.count() request = apply_request_middleware(rf.post("/", data=data), user=admin_user) response = view(request, pk=None) assert response.status_code in [200, 302] assert CatalogCampaign.objects.count() == (campaigns_before + 1) @pytest.mark.django_db def test_campaign_edit_save(rf, admin_user): """ To make things little bit more simple let's use only english as a language. """ with override_settings(LANGUAGES=[("en", "en")]): shop = get_default_shop() object = CatalogCampaign.objects.create(name="test campaign", active=True, shop=shop) object.save() view = CatalogCampaignEditView.as_view() new_name = "Test Campaign" new_end_datetime = datetime.datetime(year=2016, month=6, day=20) assert object.name != new_name assert object.end_datetime is None data = { "base-name": new_name, "base-public_name__en": "Test Campaign", "base-shop": get_default_shop().id, "base-active": True, "base-basket_line_text": "Test campaign activated!", "base-start_datetime": datetime.datetime(year=2016, month=6, day=19), "base-end_datetime": new_end_datetime, } methods_before = CatalogCampaign.objects.count() # Conditions, effects and effects is tested separately with override_provides("campaign_context_condition", []): with override_provides("campaign_catalog_filter", []): with override_provides("campaign_product_discount_effect_form", []): request = apply_request_middleware(rf.post("/", data=data), user=admin_user) response = view(request, pk=object.pk) assert response.status_code in [200, 302] assert CatalogCampaign.objects.count() == methods_before updated_object = CatalogCampaign.objects.get(pk=object.pk) assert updated_object.name == new_name assert updated_object.end_datetime == new_end_datetime.replace(tzinfo=pytz.UTC) @pytest.mark.django_db def test_campaign_end_date(rf, admin_user): """ To make things little bit more simple let's use only english as a language. """ with override_settings(LANGUAGES=[("en", "en")]): shop = get_default_shop() old_name = "test_campaign" object = CatalogCampaign.objects.create(name=old_name, active=True, shop=shop) object.save() view = CatalogCampaignEditView.as_view() new_name = "Test Campaign" assert object.name != new_name data = { "base-name": new_name, "base-public_name__en": "Test Campaign", "base-shop": get_default_shop().id, "base-active": True, "base-basket_line_text": "Test campaign activated!", "base-start_datetime": datetime.datetime(year=2016, month=6, day=19), "base-end_datetime": datetime.datetime(year=2016, month=6, day=10), } methods_before = CatalogCampaign.objects.count() # Conditions, effects and effects is tested separately with override_provides("campaign_context_condition", []): with override_provides("campaign_catalog_filter", []): with override_provides("campaign_product_discount_effect_form", []): request = apply_request_middleware(rf.post("/", data=data), user=admin_user) response = view(request, pk=object.pk) assert response.status_code in [200, 302] content = response.render().content.decode("utf-8") assert "Campaign end date can&#39;t be before a start date." in content assert CatalogCampaign.objects.count() == methods_before assert CatalogCampaign.objects.get(pk=object.pk).name == old_name
shoopio/shoop
shuup_tests/campaigns/test_catalog_campaign_admin.py
Python
agpl-3.0
7,668
0.002347
import logging import re from airflow.hooks import PigCliHook from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults class PigOperator(BaseOperator): """ Executes pig script. :param pig: the pig latin script to be executed :type pig: string :param pig_cli_conn_id: reference to the Hive database :type pig_cli_conn_id: string :param pigparams_jinja_translate: when True, pig params-type templating ${var} gets translated into jinja-type templating {{ var }}. Note that you may want to use this along with the ``DAG(user_defined_macros=myargs)`` parameter. View the DAG object documentation for more details. :type pigparams_jinja_translate: boolean """ template_fields = ('pig',) template_ext = ('.pig', '.piglatin',) ui_color = '#f0e4ec' @apply_defaults def __init__( self, pig, pig_cli_conn_id='pig_cli_default', pigparams_jinja_translate=False, *args, **kwargs): super(PigOperator, self).__init__(*args, **kwargs) self.pigparams_jinja_translate = pigparams_jinja_translate self.pig = pig self.pig_cli_conn_id = pig_cli_conn_id def get_hook(self): return PigCliHook(pig_cli_conn_id=self.pig_cli_conn_id) def prepare_template(self): if self.pigparams_jinja_translate: self.pig = re.sub( "(\$([a-zA-Z_][a-zA-Z0-9_]*))", "{{ \g<2> }}", self.pig) def execute(self, context): logging.info('Executing: ' + self.pig) self.hook = self.get_hook() self.hook.run_cli(pig=self.pig) def on_kill(self): self.hook.kill()
dud225/incubator-airflow
airflow/operators/pig_operator.py
Python
apache-2.0
1,716
0.001166
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for Check # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-service-control # [START servicecontrol_v1_generated_ServiceController_Check_async] from google.cloud import servicecontrol_v1 async def sample_check(): # Create a client client = servicecontrol_v1.ServiceControllerAsyncClient() # Initialize request argument(s) request = servicecontrol_v1.CheckRequest( ) # Make the request response = await client.check(request=request) # Handle the response print(response) # [END servicecontrol_v1_generated_ServiceController_Check_async]
googleapis/python-service-control
samples/generated_samples/servicecontrol_v1_generated_service_controller_check_async.py
Python
apache-2.0
1,437
0.000696
import sys sys.path.append("helper") import web from helper import session web.config.debug = False urls = ( "/", "controller.start.index", "/1", "controller.start.one", "/2", "controller.start.two", ) app = web.application(urls, globals()) sessions = session.Sessions() if __name__ == "__main__": app.run()
0x00/web.py-jinja2-pyjade-bootstrap
app.py
Python
apache-2.0
331
0.036254
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from lxml import etree import routes import unittest import webtest from melange import tests from melange.common import config from melange.common import wsgi class TestExtensions(unittest.TestCase): def test_extension_loads_with_melange_xmlns(self): options = {'config_file': tests.test_config_file()} conf, app = config.Config.load_paste_app('melangeapi', options, None) test_app = webtest.TestApp(app) response = test_app.get("/extensions.xml") root = etree.XML(response.body) self.assertEqual(root.tag.split('extensions')[0], "{http://docs.openstack.org/melange}") class ExtensionsTestApp(wsgi.Router): def __init__(self): mapper = routes.Mapper() super(ExtensionsTestApp, self).__init__(mapper) def app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return ExtensionsTestApp()
rajarammallya/melange
melange/tests/unit/test_extensions.py
Python
apache-2.0
1,657
0.000604
import os,sys parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0,parentdir) from pyN import * single_neuron = AdExPopulation(name='neuron', N=1) brain = Network(populations=[single_neuron]) stim = [{'start':10,'stop':100,'mV':14,'neurons':[0]}] results = brain.simulate(experiment_name='Single AdEx Neuron',T=100,dt=0.25,integration_time=30,I_ext={'neuron':stim}, save_data='../data/', properties_to_save=['v','w','psc','I_ext']) save_plots(results,'./')
ericjang/pyN
pyn_examples/6_AxEx_neuron.py
Python
bsd-2-clause
496
0.042339
# -*- coding: utf-8 -*- from django.views.generic import FormView class ChangePassword(FormView): pass change_password = ChangePassword.as_view()
tokyo-jesus/wampum
user/private_views/_change_password.py
Python
gpl-3.0
152
0.013158
import random import numpy as np import pprint import sys def generate_random_params(max_rumtime_mins = 2880, param_seed = 1234): random.seed(param_seed) np.random.seed(param_seed) # duration TIME_TAKEN_FOR_ONE_SIMULATION = 40 # minutes # params we are concerened about #CLUSTER_SIZE = ["2-2"] CLUSTER_SIZE = ["5-5", "2-5"] REMAP_PERIOD = [0.96, 2.4, 4.8, 7.2] LATENESS_RATIO = [0.1, 0.2, 0.4, 0.6, 0.8, 1.0] LOAN_AMOUNT = [1,2,5,8,10] list_of_patterns = {} total_cum_time = 0 total_permutations = len(CLUSTER_SIZE) * \ len(REMAP_PERIOD) * \ len(LOAN_AMOUNT) #print total_permutations #sys.exit() while ((total_cum_time < max_rumtime_mins) and (len(list_of_patterns) < total_permutations)): rand_CLUSTER_SIZE = random.choice(CLUSTER_SIZE) rand_REMAP_PERIOD = random.choice(REMAP_PERIOD) rand_LATENESS_RATIO = random.choice(LATENESS_RATIO) rand_LOAN_AMOUNT = random.choice(LOAN_AMOUNT) perm_key = "perm_" + \ str(rand_CLUSTER_SIZE) + "_" + \ str(rand_REMAP_PERIOD) + "_" + \ str(rand_LOAN_AMOUNT) + "_" fname_param_prefix = str(rand_CLUSTER_SIZE) + \ "_" + str(rand_REMAP_PERIOD) + \ "_" + str(rand_LOAN_AMOUNT) if (perm_key not in list_of_patterns): list_of_patterns[perm_key] = { "params" : { "rand_CLUSTER_SIZE" : rand_CLUSTER_SIZE, "rand_REMAP_PERIOD" : rand_REMAP_PERIOD, "rand_LOAN_AMOUNT" : rand_LOAN_AMOUNT, "rand_LATENESS_RATIO" : -1.0, }, "fname_param_prefix" : fname_param_prefix } total_cum_time += TIME_TAKEN_FOR_ONE_SIMULATION else: #ignore i=1 return list_of_patterns #print "finished!" #random_params = generate_random_params(max_rumtime_mins=5760) #print pprint.pprint(random_params) #print len(random_params)
roshantha9/AbstractManycoreSim
src/util_scripts/ccpbased_remapping_random_params_generation.py
Python
gpl-3.0
2,693
0.026365
import asyncio from typing import List import pytest from gql import Client, gql from gql.transport.exceptions import ( TransportClosed, TransportProtocolError, TransportQueryError, ) from .conftest import WebSocketServerHelper # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets invalid_query_str = """ query getContinents { continents { code bloh } } """ invalid_query1_server_answer = ( '{{"type":"next","id":"{query_id}",' '"payload":{{"errors":[' '{{"message":"Cannot query field \\"bloh\\" on type \\"Continent\\".",' '"locations":[{{"line":4,"column":5}}],' '"extensions":{{"code":"INTERNAL_SERVER_ERROR"}}}}]}}}}' ) invalid_query1_server = [invalid_query1_server_answer] @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [invalid_query1_server], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_graphqlws_invalid_query( event_loop, client_and_graphqlws_server, query_str ): session, server = client_and_graphqlws_server query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: await session.execute(query) exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["extensions"]["code"] == "INTERNAL_SERVER_ERROR" invalid_subscription_str = """ subscription getContinents { continents { code bloh } } """ async def server_invalid_subscription(ws, path): await WebSocketServerHelper.send_connection_ack(ws) await ws.recv() await ws.send(invalid_query1_server_answer.format(query_id=1)) await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_invalid_subscription], indirect=True ) @pytest.mark.parametrize("query_str", [invalid_subscription_str]) async def test_graphqlws_invalid_subscription( event_loop, client_and_graphqlws_server, query_str ): session, server = client_and_graphqlws_server query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: async for result in session.subscribe(query): pass exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["extensions"]["code"] == "INTERNAL_SERVER_ERROR" async def server_no_ack(ws, path): await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_no_ack], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_graphqlws_server_does_not_send_ack( event_loop, graphqlws_server, query_str ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" sample_transport = WebsocketsTransport(url=url, ack_timeout=1) with pytest.raises(asyncio.TimeoutError): async with Client(transport=sample_transport): pass invalid_query_server_answer = ( '{"id":"1","type":"error","payload":[{"message":"Cannot query field ' '\\"helo\\" on type \\"Query\\". Did you mean \\"hello\\"?",' '"locations":[{"line":2,"column":3}]}]}' ) async def server_invalid_query(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() print(f"Server received: {result}") await ws.send(invalid_query_server_answer) await WebSocketServerHelper.wait_connection_terminate(ws) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_invalid_query], indirect=True) async def test_graphqlws_sending_invalid_query(event_loop, client_and_graphqlws_server): session, server = client_and_graphqlws_server query = gql("{helo}") with pytest.raises(TransportQueryError) as exc_info: await session.execute(query) exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert ( error["message"] == 'Cannot query field "helo" on type "Query". Did you mean "hello"?' ) not_json_answer = ["BLAHBLAH"] missing_type_answer = ["{}"] missing_id_answer_1 = ['{"type": "next"}'] missing_id_answer_2 = ['{"type": "error"}'] missing_id_answer_3 = ['{"type": "complete"}'] data_without_payload = ['{"type": "next", "id":"1"}'] error_without_payload = ['{"type": "error", "id":"1"}'] error_with_payload_not_a_list = ['{"type": "error", "id":"1", "payload": "NOT A LIST"}'] payload_is_not_a_dict = ['{"type": "next", "id":"1", "payload": "BLAH"}'] empty_payload = ['{"type": "next", "id":"1", "payload": {}}'] sending_bytes = [b"\x01\x02\x03"] @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [ not_json_answer, missing_type_answer, missing_id_answer_1, missing_id_answer_2, missing_id_answer_3, data_without_payload, error_without_payload, payload_is_not_a_dict, error_with_payload_not_a_list, empty_payload, sending_bytes, ], indirect=True, ) async def test_graphqlws_transport_protocol_errors( event_loop, client_and_graphqlws_server ): session, server = client_and_graphqlws_server query = gql("query { hello }") with pytest.raises(TransportProtocolError): await session.execute(query) async def server_without_ack(ws, path): # Sending something else than an ack await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_without_ack], indirect=True) async def test_graphqlws_server_does_not_ack(event_loop, graphqlws_server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) with pytest.raises(TransportProtocolError): async with Client(transport=sample_transport): pass async def server_closing_directly(ws, path): await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_closing_directly], indirect=True) async def test_graphqlws_server_closing_directly(event_loop, graphqlws_server): import websockets from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) with pytest.raises(websockets.exceptions.ConnectionClosed): async with Client(transport=sample_transport): pass async def server_closing_after_ack(ws, path): await WebSocketServerHelper.send_connection_ack(ws) await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_closing_after_ack], indirect=True) async def test_graphqlws_server_closing_after_ack( event_loop, client_and_graphqlws_server ): import websockets session, server = client_and_graphqlws_server query = gql("query { hello }") with pytest.raises(websockets.exceptions.ConnectionClosed): await session.execute(query) await session.transport.wait_closed() with pytest.raises(TransportClosed): await session.execute(query)
graphql-python/gql
tests/test_graphqlws_exceptions.py
Python
mit
7,525
0.00093
#!/usr/bin/env python from runtest import TestBase class TestCase(TestBase): def __init__(self): TestBase.__init__(self, 'namespace', lang="C++", result=""" # DURATION TID FUNCTION [ 7102] | main() { 2.697 us [ 7102] | operator new(); 0.842 us [ 7102] | ns::ns1::foo::foo(); [ 7102] | ns::ns1::foo::bar() { [ 7102] | ns::ns1::foo::bar1() { 1.926 us [ 7102] | ns::ns1::foo::bar2(); 2.169 us [ 7102] | } /* ns::ns1::foo::bar1 */ 1.215 us [ 7102] | free(); 3.897 us [ 7102] | } /* ns::ns1::foo::bar */ 1.865 us [ 7102] | operator delete(); 0.274 us [ 7102] | operator new(); 0.115 us [ 7102] | ns::ns2::foo::foo(); 1.566 us [ 7102] | ns::ns2::foo::bar(); 0.168 us [ 7102] | operator delete(); 78.921 us [ 7102] | } /* main */ """) def prepare(self): self.subcmd = 'record' return self.runcmd() def setup(self): self.subcmd = 'replay' self.option = '-N "bar3$" -Tns::ns2::foo::bar@depth=1'
namhyung/uftrace
tests/t036_replay_filter_N.py
Python
gpl-2.0
1,056
0.000947
#------------------------------------------------------------------------------- # Name: demo_python_3 # # This file shows how to create and populate the ChParticleClones object. # Also, shows how to use POV ray for postprocessing, thanks to the # utility functions in the unit_POSTPROCESS of Chrono::Engine. # #------------------------------------------------------------------------------- #!/usr/bin/env python def main(): pass if __name__ == '__main__': main() # Load the Chrono::Engine unit and the postprocessing unit!!! import ChronoEngine_python_core as chrono import ChronoEngine_python_postprocess as postprocess # We will create two directories for saving some files, we need this: import os # Create a physical system, my_system = chrono.ChSystemNSC() # Set the default margins for collision detection, this is epecially # important for very large or very small objects. chrono.ChCollisionModel.SetDefaultSuggestedEnvelope(0.001) chrono.ChCollisionModel.SetDefaultSuggestedMargin(0.001) # Create the set of the particle clones (many rigid bodies that # share the same mass and collision shape, so they are memory efficient # in case you want to simulate granular material) body_particles = chrono.ChParticlesClones() body_particles.SetMass(0.01); inertia = 2/5*(pow(0.005,2))*0.01; body_particles.SetInertiaXX(chrono.ChVectorD(inertia,inertia,inertia)); # Collision shape (shared by all particle clones) Must be defined BEFORE adding particles body_particles.GetCollisionModel().ClearModel() body_particles.GetCollisionModel().AddSphere(0.005) body_particles.GetCollisionModel().BuildModel() body_particles.SetCollide(True) # add particles for ix in range(0,5): for iy in range(0,5): for iz in range(0,3): body_particles.AddParticle(chrono.ChCoordsysD(chrono.ChVectorD(ix/100,0.1+iy/100, iz/100))) # Visualization shape (shared by all particle clones) body_particles_shape = chrono.ChSphereShape() body_particles_shape.GetSphereGeometry().rad = 0.005 body_particles.GetAssets().push_back(body_particles_shape) my_system.Add(body_particles) # Create the floor: a simple fixed rigid body with a collision shape # and a visualization shape body_floor = chrono.ChBody() body_floor.SetBodyFixed(True) # Collision shape body_floor.GetCollisionModel().ClearModel() body_floor.GetCollisionModel().AddBox(0.1, 0.02, 0.1) # hemi sizes body_floor.GetCollisionModel().BuildModel() body_floor.SetCollide(True) # Visualization shape body_floor_shape = chrono.ChBoxShape() body_floor_shape.GetBoxGeometry().Size = chrono.ChVectorD(0.1, 0.02, 0.1) body_floor_shape.SetColor(chrono.ChColor(0.5,0.5,0.5)) body_floor.GetAssets().push_back(body_floor_shape) my_system.Add(body_floor) # Create boxes that fall # This is just for fun. for ix in range(0,2): for iz in range(0,4): body_brick = chrono.ChBody() body_brick.SetPos(chrono.ChVectorD(0.05+ix*0.021,0.04,0+iz*0.021)) body_brick.SetMass(0.02); inertia = 2/5*(pow(0.01,2))*0.02; body_brick.SetInertiaXX(chrono.ChVectorD(inertia,inertia,inertia)); # Collision shape body_brick.GetCollisionModel().ClearModel() body_brick.GetCollisionModel().AddBox(0.01, 0.01, 0.01) # hemi sizes body_brick.GetCollisionModel().BuildModel() body_brick.SetCollide(True) # Visualization shape body_brick_shape = chrono.ChBoxShape() body_brick_shape.GetBoxGeometry().Size = chrono.ChVectorD(0.01, 0.01, 0.01) body_brick.GetAssets().push_back(body_brick_shape) my_system.Add(body_brick) # --------------------------------------------------------------------- # # Render a short animation by generating scripts # to be used with POV-Ray # pov_exporter = postprocess.ChPovRay(my_system) # Sets some file names for in-out processes. pov_exporter.SetTemplateFile ("../../../data/_template_POV.pov") pov_exporter.SetOutputScriptFile ("rendering_frames.pov") if not os.path.exists("output"): os.mkdir("output") if not os.path.exists("anim"): os.mkdir("anim") pov_exporter.SetOutputDataFilebase("output/my_state") pov_exporter.SetPictureFilebase("anim/picture") pov_exporter.SetCamera(chrono.ChVectorD(0.2,0.3,0.5), chrono.ChVectorD(0,0,0), 35) pov_exporter.SetLight(chrono.ChVectorD(-2,2,-1), chrono.ChColor(1.1,1.2,1.2), True) pov_exporter.SetPictureSize(640,480) pov_exporter.SetAmbientLight(chrono.ChColor(2,2,2)) # Add additional POV objects/lights/materials in the following way pov_exporter.SetCustomPOVcommandsScript( ''' light_source{ <1,3,1.5> color rgb<1.1,1.1,1.1> } Grid(0.05,0.04, rgb<0.7,0.7,0.7>, rgbt<1,1,1,1>) ''') # Tell which physical items you want to render pov_exporter.AddAll() # Tell that you want to render the contacts pov_exporter.SetShowContacts(True, postprocess.ChPovRay.SYMBOL_VECTOR_SCALELENGTH, 0.2, # scale 0.0007, # width 0.1, # max size True,0,0.5 ) # colormap on, blue at 0, red at 0.5 # 1) Create the two .pov and .ini files for POV-Ray (this must be done # only once at the beginning of the simulation). pov_exporter.ExportScript() #my_system.SetSolverType(chrono.ChSolver.Type_PMINRES) my_system.SetMaxItersSolverSpeed(50) # Perform a short simulation while (my_system.GetChTime() < 0.7) : my_system.DoStepDynamics(0.005) print ('time=', my_system.GetChTime() ) # 2) Create the incremental nnnn.dat and nnnn.pov files that will be load # by the pov .ini script in POV-Ray (do this at each simulation timestep) pov_exporter.ExportData()
amelmquist/chrono
src/demos/python/demo_particleclones.py
Python
bsd-3-clause
5,706
0.015247
def function4(): return "function4"
bazelbuild/rules_python
gazelle/testdata/relative_imports/package2/module4.py
Python
apache-2.0
40
0
from __future__ import absolute_import from .MockPrinter import MockPrinter import mock from random import random class M201_Tests(MockPrinter): def setUp(self): self.printer.path_planner.native_planner.setAcceleration = mock.Mock() self.printer.axis_config = self.printer.AXIS_CONFIG_XY self.printer.speed_factor = 1.0 def exercise(self): values = {} gcode = "M201" for i, v in enumerate(self.printer.acceleration): axis = self.printer.AXES[i] values[axis] = round(random() * 9000.0, 0) gcode += " {:s}{:.0f}".format(axis, values[axis]) self.execute_gcode(gcode) return { "values": values, "call_args": self.printer.path_planner.native_planner.setAcceleration.call_args[0][0] } def test_gcodes_M201_all_axes_G21_mm(self): test_data = self.exercise() for i, axis in enumerate(self.printer.AXES): expected = round(test_data["values"][axis] * self.printer.factor / 3600.0, 4) result = test_data["call_args"][i] self.assertEqual(expected, result, axis + ": expected {:.0f} but got {:.0f}".format(expected, result)) def test_gcodes_M201_all_axes_G20_inches(self): self.printer.factor = 25.4 test_data = self.exercise() for i, axis in enumerate(self.printer.AXES): expected = round(test_data["values"][axis] * self.printer.factor / 3600.0, 4) result = test_data["call_args"][i] self.assertEqual(expected, result, axis + ": expected {:.0f} but got {:.0f}".format(expected, result)) def test_gcodes_M201_CoreXY(self): self.printer.axis_config = self.printer.AXIS_CONFIG_CORE_XY while True: # account for remote possibility of two equal random numbers for X and Y test_data = self.exercise() if test_data["values"]["X"] != test_data["values"]["Y"]: break self.assertEqual( test_data["call_args"][0], test_data["call_args"][1], "For CoreXY mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1])) def test_gcodes_M201_H_belt(self): self.printer.axis_config = self.printer.AXIS_CONFIG_H_BELT while True: # account for remote possibility of two equal random numbers for X and Y test_data = self.exercise() if test_data["values"]["X"] != test_data["values"]["Y"]: break self.assertEqual( test_data["call_args"][0], test_data["call_args"][1], "For H-Belt mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1])) def test_gcodes_M201_Delta(self): self.printer.axis_config = self.printer.AXIS_CONFIG_DELTA while True: # account for super, ultra-duper remote possibility of three equal random numbers for X , Y and Z test_data = self.exercise() if (test_data["values"]["X"] + test_data["values"]["Y"] + test_data["values"]["Y"]) != ( test_data["values"]["X"] * 3): break self.assertEqual( test_data["call_args"][0] + test_data["call_args"][1] + test_data["call_args"][2], test_data["call_args"][0] * 3, "For CoreXY mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1], test_data["call_args"][2]))
intelligent-agent/redeem
tests/gcode/test_M201.py
Python
gpl-3.0
3,414
0.010838
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and import sys import unittest from libcloud.utils.py3 import httplib from libcloud.dns.types import ZoneDoesNotExistError from libcloud.dns.types import RecordDoesNotExistError from libcloud.dns.drivers.google import GoogleDNSDriver from libcloud.common.google import (GoogleBaseAuthConnection, GoogleInstalledAppAuthConnection, GoogleBaseConnection) from libcloud.test.common.test_google import GoogleAuthMockHttp from libcloud.test import MockHttpTestCase, LibcloudTestCase from libcloud.test.file_fixtures import DNSFileFixtures from libcloud.test.secrets import DNS_PARAMS_GOOGLE, DNS_KEYWORD_PARAMS_GOOGLE class GoogleTests(LibcloudTestCase): GoogleBaseConnection._get_token_info_from_file = lambda x: None GoogleBaseConnection._write_token_info_to_file = lambda x: None GoogleInstalledAppAuthConnection.get_code = lambda x: '1234' def setUp(self): GoogleDNSMockHttp.test = self GoogleDNSDriver.connectionCls.conn_classes = (GoogleDNSMockHttp, GoogleDNSMockHttp) GoogleBaseAuthConnection.conn_classes = (GoogleAuthMockHttp, GoogleAuthMockHttp) GoogleDNSMockHttp.type = None kwargs = DNS_KEYWORD_PARAMS_GOOGLE.copy() kwargs['auth_type'] = 'IA' self.driver = GoogleDNSDriver(*DNS_PARAMS_GOOGLE, **kwargs) def test_default_scopes(self): self.assertEqual(self.driver.scopes, None) def test_list_zones(self): zones = self.driver.list_zones() self.assertEqual(len(zones), 2) def test_list_records(self): zone = self.driver.list_zones()[0] records = self.driver.list_records(zone=zone) self.assertEqual(len(records), 3) def test_get_zone(self): zone = self.driver.get_zone('example-com') self.assertEqual(zone.id, 'example-com') self.assertEqual(zone.domain, 'example.com.') def test_get_zone_does_not_exist(self): GoogleDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST' try: self.driver.get_zone('example-com') except ZoneDoesNotExistError: e = sys.exc_info()[1] self.assertEqual(e.zone_id, 'example-com') else: self.fail('Exception not thrown') def test_get_record(self): GoogleDNSMockHttp.type = 'FILTER_ZONES' zone = self.driver.list_zones()[0] record = self.driver.get_record(zone.id, "A:foo.example.com.") self.assertEqual(record.id, 'A:foo.example.com.') self.assertEqual(record.name, 'foo.example.com.') self.assertEqual(record.type, 'A') self.assertEqual(record.zone.id, 'example-com') def test_get_record_zone_does_not_exist(self): GoogleDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST' try: self.driver.get_record('example-com', 'a:a') except ZoneDoesNotExistError: e = sys.exc_info()[1] self.assertEqual(e.zone_id, 'example-com') else: self.fail('Exception not thrown') def test_get_record_record_does_not_exist(self): GoogleDNSMockHttp.type = 'RECORD_DOES_NOT_EXIST' try: self.driver.get_record('example-com', "A:foo") except RecordDoesNotExistError: e = sys.exc_info()[1] self.assertEqual(e.record_id, 'A:foo') else: self.fail('Exception not thrown') def test_create_zone(self): extra = {'description': 'new domain for example.org'} zone = self.driver.create_zone('example.org.', extra) self.assertEqual(zone.domain, 'example.org.') self.assertEqual(zone.extra['description'], extra['description']) self.assertEqual(len(zone.extra['nameServers']), 4) def test_delete_zone(self): zone = self.driver.get_zone('example-com') res = self.driver.delete_zone(zone) self.assertTrue(res) class GoogleDNSMockHttp(MockHttpTestCase): fixtures = DNSFileFixtures('google') def _dns_v1beta1_projects_project_name_managedZones(self, method, url, body, headers): if method == 'POST': body = self.fixtures.load('zone_create.json') else: body = self.fixtures.load('zone_list.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_FILTER_ZONES( self, method, url, body, headers): body = self.fixtures.load('zone_list.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_rrsets_FILTER_ZONES( self, method, url, body, headers): body = self.fixtures.load('record.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_rrsets( self, method, url, body, headers): body = self.fixtures.load('records_list.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com( self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('managed_zones_1.json') elif method == 'DELETE': body = None return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_ZONE_DOES_NOT_EXIST( self, method, url, body, headers): body = self.fixtures.load('get_zone_does_not_exists.json') return (httplib.NOT_FOUND, body, {}, httplib.responses[httplib.NOT_FOUND]) def _dns_v1beta1_projects_project_name_managedZones_example_com_RECORD_DOES_NOT_EXIST( self, method, url, body, headers): body = self.fixtures.load('managed_zones_1.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_rrsets_RECORD_DOES_NOT_EXIST( self, method, url, body, headers): body = self.fixtures.load('no_record.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_rrsets_ZONE_DOES_NOT_EXIST( self, method, url, body, headers): body = self.fixtures.load('get_zone_does_not_exists.json') return (httplib.NOT_FOUND, body, {}, httplib.responses[httplib.NOT_FOUND]) def _dns_v1beta1_projects_project_name_managedZones_example_com_FILTER_ZONES( self, method, url, body, headers): body = self.fixtures.load('zone.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) if __name__ == '__main__': sys.exit(unittest.main())
Hybrid-Cloud/badam
patches_tool/aws_patch/aws_deps/libcloud/test/dns/test_google.py
Python
apache-2.0
7,924
0.000883
# -*- coding: utf-8 -*- # Copyright (C) 2008-2011, Luis Pedro Coelho <luis@luispedro.org> # vim: set ts=4 sts=4 sw=4 expandtab smartindent: # # License: MIT. See COPYING.MIT file in the milk distribution from __future__ import division from collections import defaultdict import numpy as np from .base import supervised_model __all__ = [ 'kNN', ] class kNN(object): ''' k-Nearest Neighbour Classifier Naive implementation of a k-nearest neighbour classifier. C = kNN(k) Attributes: ----------- k : integer number of neighbours to consider ''' def __init__(self, k=1): self.k = k def train(self, features, labels, normalisedlabels=False, copy_features=False): features = np.asanyarray(features) labels = np.asanyarray(labels) if copy_features: features = features.copy() labels = labels.copy() features2 = np.sum(features**2, axis=1) return kNN_model(self.k, features, features2, labels) class kNN_model(supervised_model): def __init__(self, k, features, features2, labels): self.k = k self.features = features self.f2 = features2 self.labels = labels def apply(self, features): features = np.asanyarray(features) diff2 = np.dot(self.features, (-2.)*features) diff2 += self.f2 neighbours = diff2.argsort()[:self.k] labels = self.labels[neighbours] votes = defaultdict(int) for L in labels: votes[L] += 1 v,L = max( (v,L) for L,v in votes.items() ) return L
arnaudsj/milk
milk/supervised/knn.py
Python
mit
1,620
0.006173
# ********************************************************************************************************************************* # # Name: api_example_python.py # # Desc: full api example # # Auth: john mcilwain (jmac) - (jmac@cdnetworks.com) # # Ver : .90 # # License: # # This sample code is provided on an "AS IS" basis. THERE ARE NO # # WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED # # WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR # # PURPOSE, REGARDING THE SOFTWARE OR ITS USE AND OPERATION ALONE OR IN # # COMBINATION WITH YOUR PRODUCTS. # # ********************************************************************************************************************************* # import os import sys import json import pygal import re import urllib.request USER = os.popen('cat ./_user.db').read() # Create _user.db with your username inside PASS = os.popen('cat ./_pass.db').read() # Create _pass.db with your password inside SVCGRP = 'YourServiceGRP' # Change to your desired SERVICE GROUP APIKEY = 'YourDomainPAD' # Change to your desired APIKEY (website) TRAFFICDATA = '&fromDate=20170201&toDate=20170201&timeInterval=1' # Change to your desired graph date/time GRAPHFILE = 'api_example_python_graph' # Change to your desired graph filename APIENDPOINT = 'https://openapi.cdnetworks.com/api/rest/' # Don't change APIFORMAT = '&output=json' # Don't change API_SUCCESS = 0 # Don't change # Command: LOGIN : send login, receive list of service groups (logial grouping, like a directory) print('Control Groups') url = APIENDPOINT + 'login?user=' + USER + '&pass=' + PASS + APIFORMAT; print('\tURL: ' + APIENDPOINT + 'login?user=xxx&pass=xxx') parsed = json.load(urllib.request.urlopen(url)) retval = parsed['loginResponse']['resultCode'] print('\tloginResponse: resultCode = %s' % retval) # Loop through and find SVCGRP specific Service Group sessToken = ''; sessions = parsed['loginResponse']['session'] for session in sessions: if session['svcGroupName'] == SVCGRP: print('\tFound: %s' % session['svcGroupName']) print('\t\tSelected: %s' % session['sessionToken']) sessToken = session['sessionToken'] break # Command: APIKEYLIST : get list of APIs for service groups print('\nAPI Key List') url = APIENDPOINT + 'getApiKeyList?sessionToken=' + sessToken + APIFORMAT; print('\tURL: %s' % url) parsed = json.load(urllib.request.urlopen(url)) retval = parsed['apiKeyInfo']['returnCode'] if retval != API_SUCCESS: print('API Failed, code: %s' % retval) sys.exit() print('\tapiKeyInfo: returnCode = %s' % retval) # Loop through and find the APIKEY specific API Key apiKey = '' apikeys = parsed['apiKeyInfo']['apiKeyInfoItem'] for apikey in apikeys: if apikey['serviceName'] == APIKEY: print('\tFound: %s' % apikey['serviceName']) print('\t\tSelected: %s' % apikey['apiKey']) apiKey = apikey['apiKey'] break # Command: EDGE TRAFFIC : get edge traffic raw data print('\nTraffic/Edge') url = APIENDPOINT + 'traffic/edge?sessionToken=' + sessToken + '&apiKey=' + apiKey + TRAFFICDATA + APIFORMAT; print('\tURL: %s' % url) parsed = json.load(urllib.request.urlopen(url)) retval = parsed['trafficResponse']['returnCode'] if retval != API_SUCCESS: print('API Failed, code: %s' % retval) sys.exit() print('\tapiKeyInfo: returnCode = %s' % retval) # Show all Traffic details chartListTimes = [] chartListTrans = [] trafficItems = parsed['trafficResponse']['trafficItem'] for item in trafficItems: print('\tFound: %s' % item['dateTime']) print('\tFound: %s' % item['dataTransferred']) chartListTimes.append(item['dateTime']) chartListTrans.append(item['dataTransferred']) # Generate and save graph (create nice looking labels first) chartListTimesPretty = [] for date in chartListTimes: #format with hyphens: 201702011700 chartListTimesPretty.append( "%s-%s-%s-%s" % (str(date)[:4], str(date)[4:6], str(date)[6:8], str(date)[8:])) bar_chart = pygal.Bar(width=1024, height=768) bar_chart.title = "Edge Traffic" bar_chart.x_title = "Date/Time" bar_chart.y_title = "Data Transferred (bytes)" bar_chart.x_label_rotation = 270 bar_chart.legend_at_bottom = 1 bar_chart.x_labels = chartListTimesPretty bar_chart.add(APIKEY, chartListTrans) bar_chart.render_to_file(GRAPHFILE + '.svg') bar_chart.render_to_png(GRAPHFILE + '.png') # Command: LOGOUT : send token to invalidate print('\nLogout') url = APIENDPOINT + 'logout?sessionToken=' + sessToken + APIFORMAT print('\tURL: %s' % url) parsed = json.load(urllib.request.urlopen(url)) retval = parsed['logoutResponse']['resultCode'] # Ignoring retval print('\tlogout: resultCode = %s' % retval)
johnmcilwain/cdnetworks
api_example_python.py
Python
apache-2.0
5,957
0.008058
from ply import lex, yacc class Lexer: reserved = { #'ikawna' : 'IF', #'syapala' : 'ELSE', #'akona' : 'ELSEIF', #'kamipa' : 'WHILE', #'ibalik' : 'RETURN', 'tayona' : 'MAIN', 'ayokona' : 'END', 'nbsb' : 'READ', 'pda' : 'PRINT', #'paasa' : 'FOR', #'habang' : 'DO', 'solo' : 'INTN', 'pafall' : 'FLOATN', 'feelingera' : 'CHARN', 'assumera' : 'STRINGN', 'friendzone' : 'BOOLN', #'lovemosya' : 'LT', #'lovekita' : 'GT', #'maslovemosya' : 'LEQ', #'maslovekita' : 'GEQ', #'pataskami' : 'EQ', #'lamangsiya' : 'NEQ', #'basted' : 'NOT', #'ot' : 'OR', #'at' : 'AND', } tokens = [ 'INT','FLOAT', 'EOL','ID','STRING', 'PLUS','MINUS','MUL','DIV','MOD','ASSIGN', 'OPENPAR','CLOSEPAR', 'OPENCURLY','CLOSECURLY', 'COMMA', #'OPENBRACE','CLOSEBRACE' ] + list(reserved.values()) #tokens += reserved.values() def t_ID(self, t): r'[a-zA-Z_][a-zA-Z_0-9]*' t.type = Lexer.reserved.get(t.value,'ID') # Check for reserved words # redis is case sensitive in hash keys but we want the sql to be case insensitive, # so we lowercase identifiers t.value = t.value.lower() return t # Read in a float. This rule has to be done before the int rule. def t_FLOAT(self, t): r'-?\d+\.\d*(e-?\d+)?' t.value = float(t.value) return t def t_INT(self, t): r'\d+' try: t.value = int(t.value) except ValueError: print("Integer value too large %d", t.value) t.value = 0 return t def t_STRING(self, t): # TODO: unicode... # Note: this regex is from pyparsing, # TODO: may be better to refer to http://docs.python.org/reference/lexical_analysis.html '(?:"(?:[^"\\n\\r\\\\]|(?:"")|(?:\\\\x[0-9a-fA-F]+)|(?:\\\\.))*")|(?:\'(?:[^\'\\n\\r\\\\]|(?:\'\')|(?:\\\\x[0-9a-fA-F]+)| (?:\\\\.))*\')' t.value = eval(t.value) t.value[1:-1] return t # Tokens t_COMMA = r'\,' t_EOL = r';' #t_QUOTE = r'\"' t_OPENCURLY = r'\{' t_CLOSECURLY = r'\}' #t_OPENBRACE = r'\[' #t_CLOSEBRACE = r'\]' t_PLUS = r'\+' t_MINUS = r'-' t_MUL = r'\*' t_DIV = r'/' t_MOD = r'%' t_ASSIGN = r'=' t_OPENPAR = r'\(' t_CLOSEPAR = r'\)' # Ignored characters t_ignore = " \t" ''' literals = [ '{', '}' ] def t_lbrace(self, t): r'\{' t.type = '{' # Set token type to the expected literal return t def t_rbrace(self, t): r'\}' t.type = '}' # Set token type to the expected literal return t ''' def t_COMMENT(self,t): r'\#.*' pass # No return value. Token discarded def t_newline(self,t): r'\n+' t.lexer.lineno += t.value.count("\n") ''' # Compute column. # input is the input text string # token is a token instance def find_column(input,token): last_cr = input.rfind('\n',0,token.lexpos) if last_cr < 0: last_cr = 0 column = (token.lexpos - last_cr) + 1 return column ''' def t_error(self,t): print("Illegal character '%s'" % t.value[0]) t.lexer.skip(1) ''' # EOF handling rule def t_eof(self, t): # Get more input (Example) more = raw_input('... ') if more: self.lexer.input(more) #return self.lexer.token() return None ''' # Build the lexer def build(self, **kwargs): self.lexer = lex.lex(module=self, **kwargs) return self.lexer ''' # Test it output def test(self,data): self.lexer.input(data) while True: tok = self.lexer.token() if not tok: break print(tok) ''' # Build the lexer and try it out #m = Lexer() #m.build() #m.test("solo x = 4") #m.test("ayokona 0;") # Build the lexer #m.test("pda () { x1 = [ 4 + 3 ] ; }") # Test it #m.test("#\"hello\"") #m.test("\"Hello World\"") #m.test("\'Hi Universe!\'") #m.test(" syapala() { \'Hi Universe!\' }") variableNames=[] statementlist=[] # dictionary of names names = { } class Parser: tokens = Lexer.tokens # Parsing Rules precedence = ( ('left','PLUS','MINUS'), ('left','MUL','DIV', 'MOD'), ('right','UMINUS'), # ('left', 'OR'), # ('left', 'AND'), # ('left', 'EQ', 'NEQ', 'LT', 'GT') ) def p_program_start_start(self, t): 'progStart : programHeading OPENCURLY decl statement endprog CLOSECURLY' t[0] = 0 def p_program_main(self, t): 'programHeading : MAIN OPENPAR CLOSEPAR' t[0] = 0 def p_program_decl(self, t): 'decl : type ID nextdecl EOL decl' variableNames.append(t[2]) names[t[2]]= '' #print(names) def p_program_vempty(self, t): 'decl : empty' def p_program_decl_value(self, t): 'decl : type ID ASSIGN value nextdecl EOL decl ' #print(variableNames) #variableNames.append(t[2]) #for j in range(0, len(variableNames)): #if variableNames[j]==t[2]: # print(variableNames[j]) # print("variable already exist") # break #else: variableNames.append(t[2]) names[t[2]] = t[4] def p_program_nextdecl(self, t): 'nextdecl : COMMA ID nextdecl' variableNames.append(t[2]) names[t[2]]= '' def p_program_declassign(self, t): 'nextdecl : COMMA ID ASSIGN value nextdecl ' variableNames.append(t[2]) names[t[2]]=t[4] def p_program_emptydecl(self, t): 'nextdecl : empty' def p_program_number(self, t): '''value : INT | FLOAT''' t[0] = t[1] def p_program_type(self, t): '''type : INTN | FLOATN | CHARN | STRINGN | BOOLN''' t[0] = t[1] #def p_program_assignvar(self, t): # 'assignvar : ID ASSIGN expression EOL assignvar' # names[t[1]] = t[3] #def p_program_varempty(self, t): # 'assignvar : empty' def p_program_print(self, t): '''statement : PRINT OPENPAR STRING CLOSEPAR EOL statement | PRINT OPENPAR statement CLOSEPAR EOL statement''' #state = i + 1 statementlist.append(t[3]) #print(t[3]) #print(statementlist) #print(state) def p_program_input(self, t): 'statement : READ OPENPAR ID CLOSEPAR EOL statement' #x = input() statementlist.append("SCANVALUE") names[t[3]] = 'SCANVAL' #statementlist.append(t[3]) #def p_program_if(self, t): # 'statement : FOR OPENPAR logicOp CLOSEPAR OPENCURLY statement CLOSECURLY' #def p_program_cond(self, t): # 'logicOp : ID EQ ID' def p_statement_assign(self, t): 'statement : ID ASSIGN expression EOL statement' #print(t[3]) #if(names[t[1]] == ''): names[t[1]] = t[3] def p_statement_emptyState(self, t): 'statement : empty' pass def p_statement_expr(self, t): 'statement : expression' t[0] = t[1] #print(t[1]) # prints the value of evaluated expression def p_expression_binop(self, t): '''expression : expression PLUS expression | expression MINUS expression | expression MUL expression | expression DIV expression | expression MOD expression''' if t[2] == '+' : t[0] = t[1] + t[3] elif t[2] == '-': t[0] = t[1] - t[3] elif t[2] == '*': t[0] = t[1] * t[3] elif t[2] == '/': t[0] = t[1] / t[3] elif t[2] == '%': t[0] = t[1] % t[3] def p_expression_uminus(self, t): 'expression : MINUS expression %prec UMINUS' t[0] = -t[2] def p_expression_group(self, t): 'expression : OPENPAR expression CLOSEPAR' t[0] = t[2] def p_expression_number(self, t): '''expression : INT | FLOAT''' t[0] = t[1] def p_expression_name(self, t): '''expression : ID''' try: t[0] = names[t[1]] except LookupError: #names[t[1]] = 0 print("Undefined name '%s'" % t[1]) t[0] = 0 #exit(0) def p_empty(self, t): 'empty :' pass def p_program_end(self, t): 'endprog : END INT EOL' #print(statementlist) #print(names) x = len(statementlist) for i in range(0, x): if(statementlist[x-1]=='SCANVALUE'): input() else: print(statementlist[x-1]) x = x-1 if t[2] == 0 : t[0] = 0 else: print("Invalid return value") def p_error(self, t): print("Parser: Syntax error at '%s'" % t.value) def build(self, **kwargs): self.parser = yacc.yacc(module=self, **kwargs) return self.parser
habagat/hugot214
LexerParser.py
Python
gpl-3.0
9,650
0.023316
#!/usr/bin/env python from utils import * ######################################################## # Test: Node Tuning Operator: custom tuning is working # ######################################################## # Changes: # # skordas: # # Updating Test Case to work with OCP 4.4 # ######################################################## def cleanup(): print("Cleaning after test") execute_command("oc delete tuned nf-conntrack-max -n openshift-cluster-node-tuning-operator") execute_command("oc delete project my-logging-project") def test(): # Test execution: print_title("Node Tuning Operator: Custom tuning is working") # Getting all nodes print_step("Gathering information about nodes") nodes = execute_command("oc get nodes --no-headers -o=custom-columns=NAME:.metadata.name").split("\n") del nodes[-1] passed("List of nodes:\n" + str(nodes)) # Getting all tuned pods in project print_step("Getting information about tuned pods pods") tuned_pods = execute_command("oc get pods -n openshift-cluster-node-tuning-operator --no-headers -o=custom-columns=NAME:.metadata.name | grep tuned").split("\n") del tuned_pods[-1] # split method is giving extra empty field after last line from response passed("List of tuned pods:\n" + str(tuned_pods)) # Creating test project print_step("Create project and get information where app is running") execute_command("oc new-project my-logging-project") execute_command("oc create -f https://raw.githubusercontent.com/hongkailiu/svt-case-doc/master/files/pod_test.yaml") countdown(10) execute_command("oc label pod web -n my-logging-project tuned.openshift.io/elasticsearch=") # Getting node where pod with 'web' name is running node_where_app_is_running = execute_command("oc get pod web --no-headers -o=custom-columns=NODE:.spec.nodeName").rstrip() tuned_operator_pod = execute_command("oc get pods -n openshift-cluster-node-tuning-operator -o wide | grep {} | cut -d ' ' -f 1".format(node_where_app_is_running)) # Creation a new profile print_step("Create new profile: router") execute_command("oc create -f content/tuned-nf-conntrack-max.yml") # Verification if new tuned exist print_step("Verify if new tuned exist") number_of_tuned_router = int(execute_command("oc get tuned -n openshift-cluster-node-tuning-operator | grep -c nf-conntrack-max")) print("Number of tuned nf-conntrack-max: {}".format(number_of_tuned_router)) if number_of_tuned_router == 1: passed(None) else: fail("There should be one tuned router but it was: {}".format(number_of_tuned_router), cleanup) return False # Verification if correct tuned applied on node print_step("Verify if correct profile is active on node") tuned_profiles_actual = execute_command("oc get profiles.tuned.openshift.io {} -n openshift-cluster-node-tuning-operator -o json | jq -r '.spec.config.tunedProfile'".format(node_where_app_is_running)).rstrip() if tuned_profiles_actual.replace(" ", "") == "nf-conntrack-max": passed(None) else: fail("Expected value:\nnf-conntrack-max\nActual value:\n{}".format(tuned_profiles_actual), cleanup) return False # Checking all nodes for net.ipv4.ip_local_port_range values on all nodes: print_step("Check all nodes for etfilter.nf_conntrack_max value") for node in nodes: conntrack_max = execute_command_on_node(node, "sysctl net.netfilter.nf_conntrack_max | cut -d ' ' -f 3 | sed 's/\t/ /g'").rstrip() print("Node: {}".format(node)) print("etfilter.nf_conntrack_max: {}".format(conntrack_max)) if (node in node_where_app_is_running and conntrack_max != "1048578") or (node not in node_where_app_is_running and conntrack_max == "1048578"): fail("On node {} net.netfilter.nf_conntrack_max is {}".format(node, conntrack_max), cleanup) return False passed(None) # Checking logs on every pod: print_step("Check logs on every pod") for pod in tuned_pods: log = execute_command("oc logs {} -n openshift-cluster-node-tuning-operator | grep profile | tail -n1".format(pod)).rstrip() print("Pod: {}".format(pod)) print('Log: {}'.format(log)) if (pod in tuned_operator_pod and "nf-conntrack-max" not in log) or (pod not in tuned_operator_pod and "nf-conntrack-max" in log): fail("On pod: {} founded log: {}".format(pod, log), cleanup) return False passed(None) # Cleaning after test print_step("Cleaning after test") cleanup() number_of_tuned_router = int(execute_command("oc get tuned | grep -c nf-conntrack-max")) if number_of_tuned_router == 0: passed(None) else: fail("It shouldn't be any tuned nf-conntrack-max, but it was: {}".format(number_of_tuned_router), cleanup) return False # All steps passed return True if __name__ == "__main__": test()
mffiedler/svt
openshift_tooling/node_tuning_operator/nto_test_custom_tuning.py
Python
apache-2.0
5,119
0.003907
# -*- coding: utf-8 -*- import sys import datetime from django.core.management.base import BaseCommand, CommandError from django.core.cache import cache from registrations.models import Registration from forecasts import handlers from forecasts.lib.forecast import Forecast class Command(BaseCommand): def handle(self, *args, **options): print "Starting Daily Email Run: %s" % datetime.datetime.now() for registration in Registration.objects.filter(status=1, latitude__isnull=False, longitude__isnull=False): cache_key = "%.2f,%.2f" % (registration.latitude, registration.longitude) forecasts_list = cache.get(cache_key) # If we don't have a value, it was not found in the cache. Look up and cache it. if not forecasts_list: forecasts_list = Forecast.get_forecast(registration.latitude, registration.longitude) print "Caching the fetched forecasts for %s" % cache_key cache.set(cache_key, forecasts_list, 3600) success = handlers.send_forecast_email(registration, forecasts_list) if success: print "Forecast Email Sent: %s" % registration.email else: print "Error sending email."
greencoder/hopefullysunny-django
forecasts/management/commands/send_daily_forecasts.py
Python
mit
1,298
0.007704
# -*- coding: utf-8 -*- ######################################################################### # # Copyright (C) 2012 OpenPlans # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### from django.contrib import admin from geonode.base.admin import MediaTranslationAdmin, ResourceBaseAdminForm from geonode.layers.models import Layer, Attribute, Style from geonode.layers.models import LayerFile, UploadSession import autocomplete_light class LayerAdminForm(ResourceBaseAdminForm): class Meta: model = Layer class AttributeInline(admin.TabularInline): model = Attribute class LayerAdmin(MediaTranslationAdmin): list_display = ( 'id', 'typename', 'service_type', 'title', 'Floodplains', 'SUC', 'date', 'category') list_display_links = ('id',) list_editable = ('title', 'category') list_filter = ('owner', 'category', 'restriction_code_type__identifier', 'date', 'date_type') # def get_queryset(self, request): # return super(LayerAdmin, # self).get_queryset(request).prefetch_related('floodplain_tag','SUC_tag') def Floodplains(self, obj): return u", ".join(o.name for o in obj.floodplain_tag.all()) def SUC(self, obj): return u", ".join(o.name for o in obj.SUC_tag.all()) # def get_queryset(self, request): # return super(LayerAdmin, self).get_queryset(request).prefetch_related('SUC_tag') # def SUC(self, obj): # return u", ".join(o.name for o in obj.SUC_tag.all()) inlines = [AttributeInline] search_fields = ('typename', 'title', 'abstract', 'purpose',) filter_horizontal = ('contacts',) date_hierarchy = 'date' readonly_fields = ('uuid', 'typename', 'workspace') form = LayerAdminForm class AttributeAdmin(admin.ModelAdmin): model = Attribute list_display_links = ('id',) list_display = ( 'id', 'layer', 'attribute', 'description', 'attribute_label', 'attribute_type', 'display_order') list_filter = ('layer', 'attribute_type') search_fields = ('attribute', 'attribute_label',) class StyleAdmin(admin.ModelAdmin): model = Style list_display_links = ('sld_title',) list_display = ('id', 'name', 'sld_title', 'workspace', 'sld_url') list_filter = ('workspace',) search_fields = ('name', 'workspace',) class LayerFileInline(admin.TabularInline): model = LayerFile class UploadSessionAdmin(admin.ModelAdmin): model = UploadSession list_display = ('date', 'user', 'processed') inlines = [LayerFileInline] admin.site.register(Layer, LayerAdmin) admin.site.register(Attribute, AttributeAdmin) admin.site.register(Style, StyleAdmin) admin.site.register(UploadSession, UploadSessionAdmin)
PhilLidar-DAD/geonode
geonode/layers/admin.py
Python
gpl-3.0
3,475
0.000288
"""Tests for user-friendly public interface to polynomial functions. """ from sympy.polys.polytools import ( Poly, PurePoly, poly, parallel_poly_from_expr, degree, degree_list, LC, LM, LT, pdiv, prem, pquo, pexquo, div, rem, quo, exquo, half_gcdex, gcdex, invert, subresultants, resultant, discriminant, terms_gcd, cofactors, gcd, gcd_list, lcm, lcm_list, trunc, monic, content, primitive, compose, decompose, sturm, gff_list, gff, sqf_norm, sqf_part, sqf_list, sqf, factor_list, factor, intervals, refine_root, count_roots, real_roots, nroots, ground_roots, nth_power_roots_poly, cancel, reduced, groebner, GroebnerBasis, is_zero_dimensional, _torational_factor_list, to_rational_coeffs) from sympy.polys.polyerrors import ( MultivariatePolynomialError, ExactQuotientFailed, PolificationFailed, ComputationFailed, UnificationFailed, RefinementFailed, GeneratorsNeeded, GeneratorsError, PolynomialError, CoercionFailed, DomainError, OptionError, FlagError) from sympy.polys.polyclasses import DMP from sympy.polys.fields import field from sympy.polys.domains import FF, ZZ, QQ, RR, EX from sympy.polys.domains.realfield import RealField from sympy.polys.orderings import lex, grlex, grevlex from sympy import ( S, Integer, Rational, Float, Mul, Symbol, sqrt, Piecewise, Derivative, exp, sin, tanh, expand, oo, I, pi, re, im, rootof, Eq, Tuple, Expr, diff) from sympy.core.basic import _aresame from sympy.core.compatibility import iterable from sympy.core.mul import _keep_coeff from sympy.utilities.pytest import raises, XFAIL from sympy.simplify import simplify from sympy.abc import a, b, c, d, p, q, t, w, x, y, z from sympy import MatrixSymbol def _epsilon_eq(a, b): for x, y in zip(a, b): if abs(x - y) > 1e-10: return False return True def _strict_eq(a, b): if type(a) == type(b): if iterable(a): if len(a) == len(b): return all(_strict_eq(c, d) for c, d in zip(a, b)) else: return False else: return isinstance(a, Poly) and a.eq(b, strict=True) else: return False def test_Poly_from_dict(): K = FF(3) assert Poly.from_dict( {0: 1, 1: 2}, gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_dict( {0: 1, 1: 5}, gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_dict( {(0,): 1, (1,): 5}, gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_dict({(0, 0): 1, (1, 1): 2}, gens=( x, y), domain=K).rep == DMP([[K(2), K(0)], [K(1)]], K) assert Poly.from_dict({0: 1, 1: 2}, gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_dict( {0: 1, 1: 2}, gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_dict( {0: 1, 1: 2}, gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_dict( {0: 1, 1: 2}, gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_dict({(1,): sin(y)}, gens=x, composite=False) == \ Poly(sin(y)*x, x, domain='EX') assert Poly.from_dict({(1,): y}, gens=x, composite=False) == \ Poly(y*x, x, domain='EX') assert Poly.from_dict({(1, 1): 1}, gens=(x, y), composite=False) == \ Poly(x*y, x, y, domain='ZZ') assert Poly.from_dict({(1, 0): y}, gens=(x, z), composite=False) == \ Poly(y*x, x, z, domain='EX') def test_Poly_from_list(): K = FF(3) assert Poly.from_list([2, 1], gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_list([5, 1], gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_list([2, 1], gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_list([2, 1], gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_list([2, 1], gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_list([2, 1], gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_list([0, 1.0], gens=x).rep == DMP([RR(1.0)], RR) assert Poly.from_list([1.0, 0], gens=x).rep == DMP([RR(1.0), RR(0.0)], RR) raises(MultivariatePolynomialError, lambda: Poly.from_list([[]], gens=(x, y))) def test_Poly_from_poly(): f = Poly(x + 7, x, domain=ZZ) g = Poly(x + 2, x, modulus=3) h = Poly(x + y, x, y, domain=ZZ) K = FF(3) assert Poly.from_poly(f) == f assert Poly.from_poly(f, domain=K).rep == DMP([K(1), K(1)], K) assert Poly.from_poly(f, domain=ZZ).rep == DMP([1, 7], ZZ) assert Poly.from_poly(f, domain=QQ).rep == DMP([1, 7], QQ) assert Poly.from_poly(f, gens=x) == f assert Poly.from_poly(f, gens=x, domain=K).rep == DMP([K(1), K(1)], K) assert Poly.from_poly(f, gens=x, domain=ZZ).rep == DMP([1, 7], ZZ) assert Poly.from_poly(f, gens=x, domain=QQ).rep == DMP([1, 7], QQ) assert Poly.from_poly(f, gens=y) == Poly(x + 7, y, domain='ZZ[x]') raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=K)) raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=ZZ)) raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=QQ)) assert Poly.from_poly(f, gens=(x, y)) == Poly(x + 7, x, y, domain='ZZ') assert Poly.from_poly( f, gens=(x, y), domain=ZZ) == Poly(x + 7, x, y, domain='ZZ') assert Poly.from_poly( f, gens=(x, y), domain=QQ) == Poly(x + 7, x, y, domain='QQ') assert Poly.from_poly( f, gens=(x, y), modulus=3) == Poly(x + 7, x, y, domain='FF(3)') K = FF(2) assert Poly.from_poly(g) == g assert Poly.from_poly(g, domain=ZZ).rep == DMP([1, -1], ZZ) raises(CoercionFailed, lambda: Poly.from_poly(g, domain=QQ)) assert Poly.from_poly(g, domain=K).rep == DMP([K(1), K(0)], K) assert Poly.from_poly(g, gens=x) == g assert Poly.from_poly(g, gens=x, domain=ZZ).rep == DMP([1, -1], ZZ) raises(CoercionFailed, lambda: Poly.from_poly(g, gens=x, domain=QQ)) assert Poly.from_poly(g, gens=x, domain=K).rep == DMP([K(1), K(0)], K) K = FF(3) assert Poly.from_poly(h) == h assert Poly.from_poly( h, domain=ZZ).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ) assert Poly.from_poly( h, domain=QQ).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ) assert Poly.from_poly(h, domain=K).rep == DMP([[K(1)], [K(1), K(0)]], K) assert Poly.from_poly(h, gens=x) == Poly(x + y, x, domain=ZZ[y]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, domain=ZZ)) assert Poly.from_poly( h, gens=x, domain=ZZ[y]) == Poly(x + y, x, domain=ZZ[y]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, domain=QQ)) assert Poly.from_poly( h, gens=x, domain=QQ[y]) == Poly(x + y, x, domain=QQ[y]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, modulus=3)) assert Poly.from_poly(h, gens=y) == Poly(x + y, y, domain=ZZ[x]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, domain=ZZ)) assert Poly.from_poly( h, gens=y, domain=ZZ[x]) == Poly(x + y, y, domain=ZZ[x]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, domain=QQ)) assert Poly.from_poly( h, gens=y, domain=QQ[x]) == Poly(x + y, y, domain=QQ[x]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, modulus=3)) assert Poly.from_poly(h, gens=(x, y)) == h assert Poly.from_poly( h, gens=(x, y), domain=ZZ).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ) assert Poly.from_poly( h, gens=(x, y), domain=QQ).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ) assert Poly.from_poly( h, gens=(x, y), domain=K).rep == DMP([[K(1)], [K(1), K(0)]], K) assert Poly.from_poly( h, gens=(y, x)).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ) assert Poly.from_poly( h, gens=(y, x), domain=ZZ).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ) assert Poly.from_poly( h, gens=(y, x), domain=QQ).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ) assert Poly.from_poly( h, gens=(y, x), domain=K).rep == DMP([[K(1)], [K(1), K(0)]], K) assert Poly.from_poly( h, gens=(x, y), field=True).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ) assert Poly.from_poly( h, gens=(x, y), field=True).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ) def test_Poly_from_expr(): raises(GeneratorsNeeded, lambda: Poly.from_expr(S(0))) raises(GeneratorsNeeded, lambda: Poly.from_expr(S(7))) F3 = FF(3) assert Poly.from_expr(x + 5, domain=F3).rep == DMP([F3(1), F3(2)], F3) assert Poly.from_expr(y + 5, domain=F3).rep == DMP([F3(1), F3(2)], F3) assert Poly.from_expr(x + 5, x, domain=F3).rep == DMP([F3(1), F3(2)], F3) assert Poly.from_expr(y + 5, y, domain=F3).rep == DMP([F3(1), F3(2)], F3) assert Poly.from_expr(x + y, domain=F3).rep == DMP([[F3(1)], [F3(1), F3(0)]], F3) assert Poly.from_expr(x + y, x, y, domain=F3).rep == DMP([[F3(1)], [F3(1), F3(0)]], F3) assert Poly.from_expr(x + 5).rep == DMP([1, 5], ZZ) assert Poly.from_expr(y + 5).rep == DMP([1, 5], ZZ) assert Poly.from_expr(x + 5, x).rep == DMP([1, 5], ZZ) assert Poly.from_expr(y + 5, y).rep == DMP([1, 5], ZZ) assert Poly.from_expr(x + 5, domain=ZZ).rep == DMP([1, 5], ZZ) assert Poly.from_expr(y + 5, domain=ZZ).rep == DMP([1, 5], ZZ) assert Poly.from_expr(x + 5, x, domain=ZZ).rep == DMP([1, 5], ZZ) assert Poly.from_expr(y + 5, y, domain=ZZ).rep == DMP([1, 5], ZZ) assert Poly.from_expr(x + 5, x, y, domain=ZZ).rep == DMP([[1], [5]], ZZ) assert Poly.from_expr(y + 5, x, y, domain=ZZ).rep == DMP([[1, 5]], ZZ) def test_Poly__new__(): raises(GeneratorsError, lambda: Poly(x + 1, x, x)) raises(GeneratorsError, lambda: Poly(x + y, x, y, domain=ZZ[x])) raises(GeneratorsError, lambda: Poly(x + y, x, y, domain=ZZ[y])) raises(OptionError, lambda: Poly(x, x, symmetric=True)) raises(OptionError, lambda: Poly(x + 2, x, modulus=3, domain=QQ)) raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, gaussian=True)) raises(OptionError, lambda: Poly(x + 2, x, modulus=3, gaussian=True)) raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, extension=[sqrt(3)])) raises(OptionError, lambda: Poly(x + 2, x, modulus=3, extension=[sqrt(3)])) raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, extension=True)) raises(OptionError, lambda: Poly(x + 2, x, modulus=3, extension=True)) raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, greedy=True)) raises(OptionError, lambda: Poly(x + 2, x, domain=QQ, field=True)) raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, greedy=False)) raises(OptionError, lambda: Poly(x + 2, x, domain=QQ, field=False)) raises(NotImplementedError, lambda: Poly(x + 1, x, modulus=3, order='grlex')) raises(NotImplementedError, lambda: Poly(x + 1, x, order='grlex')) raises(GeneratorsNeeded, lambda: Poly({1: 2, 0: 1})) raises(GeneratorsNeeded, lambda: Poly([2, 1])) raises(GeneratorsNeeded, lambda: Poly((2, 1))) raises(GeneratorsNeeded, lambda: Poly(1)) f = a*x**2 + b*x + c assert Poly({2: a, 1: b, 0: c}, x) == f assert Poly(iter([a, b, c]), x) == f assert Poly([a, b, c], x) == f assert Poly((a, b, c), x) == f f = Poly({}, x, y, z) assert f.gens == (x, y, z) and f.as_expr() == 0 assert Poly(Poly(a*x + b*y, x, y), x) == Poly(a*x + b*y, x) assert Poly(3*x**2 + 2*x + 1, domain='ZZ').all_coeffs() == [3, 2, 1] assert Poly(3*x**2 + 2*x + 1, domain='QQ').all_coeffs() == [3, 2, 1] assert Poly(3*x**2 + 2*x + 1, domain='RR').all_coeffs() == [3.0, 2.0, 1.0] raises(CoercionFailed, lambda: Poly(3*x**2/5 + 2*x/5 + 1, domain='ZZ')) assert Poly( 3*x**2/5 + 2*x/5 + 1, domain='QQ').all_coeffs() == [S(3)/5, S(2)/5, 1] assert _epsilon_eq( Poly(3*x**2/5 + 2*x/5 + 1, domain='RR').all_coeffs(), [0.6, 0.4, 1.0]) assert Poly(3.0*x**2 + 2.0*x + 1, domain='ZZ').all_coeffs() == [3, 2, 1] assert Poly(3.0*x**2 + 2.0*x + 1, domain='QQ').all_coeffs() == [3, 2, 1] assert Poly( 3.0*x**2 + 2.0*x + 1, domain='RR').all_coeffs() == [3.0, 2.0, 1.0] raises(CoercionFailed, lambda: Poly(3.1*x**2 + 2.1*x + 1, domain='ZZ')) assert Poly(3.1*x**2 + 2.1*x + 1, domain='QQ').all_coeffs() == [S(31)/10, S(21)/10, 1] assert Poly(3.1*x**2 + 2.1*x + 1, domain='RR').all_coeffs() == [3.1, 2.1, 1.0] assert Poly({(2, 1): 1, (1, 2): 2, (1, 1): 3}, x, y) == \ Poly(x**2*y + 2*x*y**2 + 3*x*y, x, y) assert Poly(x**2 + 1, extension=I).get_domain() == QQ.algebraic_field(I) f = 3*x**5 - x**4 + x**3 - x** 2 + 65538 assert Poly(f, x, modulus=65537, symmetric=True) == \ Poly(3*x**5 - x**4 + x**3 - x** 2 + 1, x, modulus=65537, symmetric=True) assert Poly(f, x, modulus=65537, symmetric=False) == \ Poly(3*x**5 + 65536*x**4 + x**3 + 65536*x** 2 + 1, x, modulus=65537, symmetric=False) assert isinstance(Poly(x**2 + x + 1.0).get_domain(), RealField) def test_Poly__args(): assert Poly(x**2 + 1).args == (x**2 + 1,) def test_Poly__gens(): assert Poly((x - p)*(x - q), x).gens == (x,) assert Poly((x - p)*(x - q), p).gens == (p,) assert Poly((x - p)*(x - q), q).gens == (q,) assert Poly((x - p)*(x - q), x, p).gens == (x, p) assert Poly((x - p)*(x - q), x, q).gens == (x, q) assert Poly((x - p)*(x - q), x, p, q).gens == (x, p, q) assert Poly((x - p)*(x - q), p, x, q).gens == (p, x, q) assert Poly((x - p)*(x - q), p, q, x).gens == (p, q, x) assert Poly((x - p)*(x - q)).gens == (x, p, q) assert Poly((x - p)*(x - q), sort='x > p > q').gens == (x, p, q) assert Poly((x - p)*(x - q), sort='p > x > q').gens == (p, x, q) assert Poly((x - p)*(x - q), sort='p > q > x').gens == (p, q, x) assert Poly((x - p)*(x - q), x, p, q, sort='p > q > x').gens == (x, p, q) assert Poly((x - p)*(x - q), wrt='x').gens == (x, p, q) assert Poly((x - p)*(x - q), wrt='p').gens == (p, x, q) assert Poly((x - p)*(x - q), wrt='q').gens == (q, x, p) assert Poly((x - p)*(x - q), wrt=x).gens == (x, p, q) assert Poly((x - p)*(x - q), wrt=p).gens == (p, x, q) assert Poly((x - p)*(x - q), wrt=q).gens == (q, x, p) assert Poly((x - p)*(x - q), x, p, q, wrt='p').gens == (x, p, q) assert Poly((x - p)*(x - q), wrt='p', sort='q > x').gens == (p, q, x) assert Poly((x - p)*(x - q), wrt='q', sort='p > x').gens == (q, p, x) def test_Poly_zero(): assert Poly(x).zero == Poly(0, x, domain=ZZ) assert Poly(x/2).zero == Poly(0, x, domain=QQ) def test_Poly_one(): assert Poly(x).one == Poly(1, x, domain=ZZ) assert Poly(x/2).one == Poly(1, x, domain=QQ) def test_Poly__unify(): raises(UnificationFailed, lambda: Poly(x)._unify(y)) F3 = FF(3) F5 = FF(5) assert Poly(x, x, modulus=3)._unify(Poly(y, y, modulus=3))[2:] == ( DMP([[F3(1)], []], F3), DMP([[F3(1), F3(0)]], F3)) assert Poly(x, x, modulus=3)._unify(Poly(y, y, modulus=5))[2:] == ( DMP([[F5(1)], []], F5), DMP([[F5(1), F5(0)]], F5)) assert Poly(y, x, y)._unify(Poly(x, x, modulus=3))[2:] == (DMP([[F3(1), F3(0)]], F3), DMP([[F3(1)], []], F3)) assert Poly(x, x, modulus=3)._unify(Poly(y, x, y))[2:] == (DMP([[F3(1)], []], F3), DMP([[F3(1), F3(0)]], F3)) assert Poly(x + 1, x)._unify(Poly(x + 2, x))[2:] == (DMP([1, 1], ZZ), DMP([1, 2], ZZ)) assert Poly(x + 1, x, domain='QQ')._unify(Poly(x + 2, x))[2:] == (DMP([1, 1], QQ), DMP([1, 2], QQ)) assert Poly(x + 1, x)._unify(Poly(x + 2, x, domain='QQ'))[2:] == (DMP([1, 1], QQ), DMP([1, 2], QQ)) assert Poly(x + 1, x)._unify(Poly(x + 2, x, y))[2:] == (DMP([[1], [1]], ZZ), DMP([[1], [2]], ZZ)) assert Poly(x + 1, x, domain='QQ')._unify(Poly(x + 2, x, y))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ)) assert Poly(x + 1, x)._unify(Poly(x + 2, x, y, domain='QQ'))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ)) assert Poly(x + 1, x, y)._unify(Poly(x + 2, x))[2:] == (DMP([[1], [1]], ZZ), DMP([[1], [2]], ZZ)) assert Poly(x + 1, x, y, domain='QQ')._unify(Poly(x + 2, x))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ)) assert Poly(x + 1, x, y)._unify(Poly(x + 2, x, domain='QQ'))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ)) assert Poly(x + 1, x, y)._unify(Poly(x + 2, x, y))[2:] == (DMP([[1], [1]], ZZ), DMP([[1], [2]], ZZ)) assert Poly(x + 1, x, y, domain='QQ')._unify(Poly(x + 2, x, y))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ)) assert Poly(x + 1, x, y)._unify(Poly(x + 2, x, y, domain='QQ'))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ)) assert Poly(x + 1, x)._unify(Poly(x + 2, y, x))[2:] == (DMP([[1, 1]], ZZ), DMP([[1, 2]], ZZ)) assert Poly(x + 1, x, domain='QQ')._unify(Poly(x + 2, y, x))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ)) assert Poly(x + 1, x)._unify(Poly(x + 2, y, x, domain='QQ'))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ)) assert Poly(x + 1, y, x)._unify(Poly(x + 2, x))[2:] == (DMP([[1, 1]], ZZ), DMP([[1, 2]], ZZ)) assert Poly(x + 1, y, x, domain='QQ')._unify(Poly(x + 2, x))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ)) assert Poly(x + 1, y, x)._unify(Poly(x + 2, x, domain='QQ'))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ)) assert Poly(x + 1, x, y)._unify(Poly(x + 2, y, x))[2:] == (DMP([[1], [1]], ZZ), DMP([[1], [2]], ZZ)) assert Poly(x + 1, x, y, domain='QQ')._unify(Poly(x + 2, y, x))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ)) assert Poly(x + 1, x, y)._unify(Poly(x + 2, y, x, domain='QQ'))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ)) assert Poly(x + 1, y, x)._unify(Poly(x + 2, x, y))[2:] == (DMP([[1, 1]], ZZ), DMP([[1, 2]], ZZ)) assert Poly(x + 1, y, x, domain='QQ')._unify(Poly(x + 2, x, y))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ)) assert Poly(x + 1, y, x)._unify(Poly(x + 2, x, y, domain='QQ'))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ)) F, A, B = field("a,b", ZZ) assert Poly(a*x, x, domain='ZZ[a]')._unify(Poly(a*b*x, x, domain='ZZ(a,b)'))[2:] == \ (DMP([A, F(0)], F.to_domain()), DMP([A*B, F(0)], F.to_domain())) assert Poly(a*x, x, domain='ZZ(a)')._unify(Poly(a*b*x, x, domain='ZZ(a,b)'))[2:] == \ (DMP([A, F(0)], F.to_domain()), DMP([A*B, F(0)], F.to_domain())) raises(CoercionFailed, lambda: Poly(Poly(x**2 + x**2*z, y, field=True), domain='ZZ(x)')) f = Poly(t**2 + t/3 + x, t, domain='QQ(x)') g = Poly(t**2 + t/3 + x, t, domain='QQ[x]') assert f._unify(g)[2:] == (f.rep, f.rep) def test_Poly_free_symbols(): assert Poly(x**2 + 1).free_symbols == {x} assert Poly(x**2 + y*z).free_symbols == {x, y, z} assert Poly(x**2 + y*z, x).free_symbols == {x, y, z} assert Poly(x**2 + sin(y*z)).free_symbols == {x, y, z} assert Poly(x**2 + sin(y*z), x).free_symbols == {x, y, z} assert Poly(x**2 + sin(y*z), x, domain=EX).free_symbols == {x, y, z} def test_PurePoly_free_symbols(): assert PurePoly(x**2 + 1).free_symbols == set([]) assert PurePoly(x**2 + y*z).free_symbols == set([]) assert PurePoly(x**2 + y*z, x).free_symbols == {y, z} assert PurePoly(x**2 + sin(y*z)).free_symbols == set([]) assert PurePoly(x**2 + sin(y*z), x).free_symbols == {y, z} assert PurePoly(x**2 + sin(y*z), x, domain=EX).free_symbols == {y, z} def test_Poly__eq__(): assert (Poly(x, x) == Poly(x, x)) is True assert (Poly(x, x, domain=QQ) == Poly(x, x)) is True assert (Poly(x, x) == Poly(x, x, domain=QQ)) is True assert (Poly(x, x, domain=ZZ[a]) == Poly(x, x)) is True assert (Poly(x, x) == Poly(x, x, domain=ZZ[a])) is True assert (Poly(x*y, x, y) == Poly(x, x)) is False assert (Poly(x, x, y) == Poly(x, x)) is False assert (Poly(x, x) == Poly(x, x, y)) is False assert (Poly(x**2 + 1, x) == Poly(y**2 + 1, y)) is False assert (Poly(y**2 + 1, y) == Poly(x**2 + 1, x)) is False f = Poly(x, x, domain=ZZ) g = Poly(x, x, domain=QQ) assert f.eq(g) is True assert f.ne(g) is False assert f.eq(g, strict=True) is False assert f.ne(g, strict=True) is True t0 = Symbol('t0') f = Poly((t0/2 + x**2)*t**2 - x**2*t, t, domain='QQ[x,t0]') g = Poly((t0/2 + x**2)*t**2 - x**2*t, t, domain='ZZ(x,t0)') assert (f == g) is True def test_PurePoly__eq__(): assert (PurePoly(x, x) == PurePoly(x, x)) is True assert (PurePoly(x, x, domain=QQ) == PurePoly(x, x)) is True assert (PurePoly(x, x) == PurePoly(x, x, domain=QQ)) is True assert (PurePoly(x, x, domain=ZZ[a]) == PurePoly(x, x)) is True assert (PurePoly(x, x) == PurePoly(x, x, domain=ZZ[a])) is True assert (PurePoly(x*y, x, y) == PurePoly(x, x)) is False assert (PurePoly(x, x, y) == PurePoly(x, x)) is False assert (PurePoly(x, x) == PurePoly(x, x, y)) is False assert (PurePoly(x**2 + 1, x) == PurePoly(y**2 + 1, y)) is True assert (PurePoly(y**2 + 1, y) == PurePoly(x**2 + 1, x)) is True f = PurePoly(x, x, domain=ZZ) g = PurePoly(x, x, domain=QQ) assert f.eq(g) is True assert f.ne(g) is False assert f.eq(g, strict=True) is False assert f.ne(g, strict=True) is True f = PurePoly(x, x, domain=ZZ) g = PurePoly(y, y, domain=QQ) assert f.eq(g) is True assert f.ne(g) is False assert f.eq(g, strict=True) is False assert f.ne(g, strict=True) is True def test_PurePoly_Poly(): assert isinstance(PurePoly(Poly(x**2 + 1)), PurePoly) is True assert isinstance(Poly(PurePoly(x**2 + 1)), Poly) is True def test_Poly_get_domain(): assert Poly(2*x).get_domain() == ZZ assert Poly(2*x, domain='ZZ').get_domain() == ZZ assert Poly(2*x, domain='QQ').get_domain() == QQ assert Poly(x/2).get_domain() == QQ raises(CoercionFailed, lambda: Poly(x/2, domain='ZZ')) assert Poly(x/2, domain='QQ').get_domain() == QQ assert isinstance(Poly(0.2*x).get_domain(), RealField) def test_Poly_set_domain(): assert Poly(2*x + 1).set_domain(ZZ) == Poly(2*x + 1) assert Poly(2*x + 1).set_domain('ZZ') == Poly(2*x + 1) assert Poly(2*x + 1).set_domain(QQ) == Poly(2*x + 1, domain='QQ') assert Poly(2*x + 1).set_domain('QQ') == Poly(2*x + 1, domain='QQ') assert Poly(S(2)/10*x + S(1)/10).set_domain('RR') == Poly(0.2*x + 0.1) assert Poly(0.2*x + 0.1).set_domain('QQ') == Poly(S(2)/10*x + S(1)/10) raises(CoercionFailed, lambda: Poly(x/2 + 1).set_domain(ZZ)) raises(CoercionFailed, lambda: Poly(x + 1, modulus=2).set_domain(QQ)) raises(GeneratorsError, lambda: Poly(x*y, x, y).set_domain(ZZ[y])) def test_Poly_get_modulus(): assert Poly(x**2 + 1, modulus=2).get_modulus() == 2 raises(PolynomialError, lambda: Poly(x**2 + 1).get_modulus()) def test_Poly_set_modulus(): assert Poly( x**2 + 1, modulus=2).set_modulus(7) == Poly(x**2 + 1, modulus=7) assert Poly( x**2 + 5, modulus=7).set_modulus(2) == Poly(x**2 + 1, modulus=2) assert Poly(x**2 + 1).set_modulus(2) == Poly(x**2 + 1, modulus=2) raises(CoercionFailed, lambda: Poly(x/2 + 1).set_modulus(2)) def test_Poly_add_ground(): assert Poly(x + 1).add_ground(2) == Poly(x + 3) def test_Poly_sub_ground(): assert Poly(x + 1).sub_ground(2) == Poly(x - 1) def test_Poly_mul_ground(): assert Poly(x + 1).mul_ground(2) == Poly(2*x + 2) def test_Poly_quo_ground(): assert Poly(2*x + 4).quo_ground(2) == Poly(x + 2) assert Poly(2*x + 3).quo_ground(2) == Poly(x + 1) def test_Poly_exquo_ground(): assert Poly(2*x + 4).exquo_ground(2) == Poly(x + 2) raises(ExactQuotientFailed, lambda: Poly(2*x + 3).exquo_ground(2)) def test_Poly_abs(): assert Poly(-x + 1, x).abs() == abs(Poly(-x + 1, x)) == Poly(x + 1, x) def test_Poly_neg(): assert Poly(-x + 1, x).neg() == -Poly(-x + 1, x) == Poly(x - 1, x) def test_Poly_add(): assert Poly(0, x).add(Poly(0, x)) == Poly(0, x) assert Poly(0, x) + Poly(0, x) == Poly(0, x) assert Poly(1, x).add(Poly(0, x)) == Poly(1, x) assert Poly(1, x, y) + Poly(0, x) == Poly(1, x, y) assert Poly(0, x).add(Poly(1, x, y)) == Poly(1, x, y) assert Poly(0, x, y) + Poly(1, x, y) == Poly(1, x, y) assert Poly(1, x) + x == Poly(x + 1, x) assert Poly(1, x) + sin(x) == 1 + sin(x) assert Poly(x, x) + 1 == Poly(x + 1, x) assert 1 + Poly(x, x) == Poly(x + 1, x) def test_Poly_sub(): assert Poly(0, x).sub(Poly(0, x)) == Poly(0, x) assert Poly(0, x) - Poly(0, x) == Poly(0, x) assert Poly(1, x).sub(Poly(0, x)) == Poly(1, x) assert Poly(1, x, y) - Poly(0, x) == Poly(1, x, y) assert Poly(0, x).sub(Poly(1, x, y)) == Poly(-1, x, y) assert Poly(0, x, y) - Poly(1, x, y) == Poly(-1, x, y) assert Poly(1, x) - x == Poly(1 - x, x) assert Poly(1, x) - sin(x) == 1 - sin(x) assert Poly(x, x) - 1 == Poly(x - 1, x) assert 1 - Poly(x, x) == Poly(1 - x, x) def test_Poly_mul(): assert Poly(0, x).mul(Poly(0, x)) == Poly(0, x) assert Poly(0, x) * Poly(0, x) == Poly(0, x) assert Poly(2, x).mul(Poly(4, x)) == Poly(8, x) assert Poly(2, x, y) * Poly(4, x) == Poly(8, x, y) assert Poly(4, x).mul(Poly(2, x, y)) == Poly(8, x, y) assert Poly(4, x, y) * Poly(2, x, y) == Poly(8, x, y) assert Poly(1, x) * x == Poly(x, x) assert Poly(1, x) * sin(x) == sin(x) assert Poly(x, x) * 2 == Poly(2*x, x) assert 2 * Poly(x, x) == Poly(2*x, x) def test_Poly_sqr(): assert Poly(x*y, x, y).sqr() == Poly(x**2*y**2, x, y) def test_Poly_pow(): assert Poly(x, x).pow(10) == Poly(x**10, x) assert Poly(x, x).pow(Integer(10)) == Poly(x**10, x) assert Poly(2*y, x, y).pow(4) == Poly(16*y**4, x, y) assert Poly(2*y, x, y).pow(Integer(4)) == Poly(16*y**4, x, y) assert Poly(7*x*y, x, y)**3 == Poly(343*x**3*y**3, x, y) assert Poly(x*y + 1, x, y)**(-1) == (x*y + 1)**(-1) assert Poly(x*y + 1, x, y)**x == (x*y + 1)**x def test_Poly_divmod(): f, g = Poly(x**2), Poly(x) q, r = g, Poly(0, x) assert divmod(f, g) == (q, r) assert f // g == q assert f % g == r assert divmod(f, x) == (q, r) assert f // x == q assert f % x == r q, r = Poly(0, x), Poly(2, x) assert divmod(2, g) == (q, r) assert 2 // g == q assert 2 % g == r assert Poly(x)/Poly(x) == 1 assert Poly(x**2)/Poly(x) == x assert Poly(x)/Poly(x**2) == 1/x def test_Poly_eq_ne(): assert (Poly(x + y, x, y) == Poly(x + y, x, y)) is True assert (Poly(x + y, x) == Poly(x + y, x, y)) is False assert (Poly(x + y, x, y) == Poly(x + y, x)) is False assert (Poly(x + y, x) == Poly(x + y, x)) is True assert (Poly(x + y, y) == Poly(x + y, y)) is True assert (Poly(x + y, x, y) == x + y) is True assert (Poly(x + y, x) == x + y) is True assert (Poly(x + y, x, y) == x + y) is True assert (Poly(x + y, x) == x + y) is True assert (Poly(x + y, y) == x + y) is True assert (Poly(x + y, x, y) != Poly(x + y, x, y)) is False assert (Poly(x + y, x) != Poly(x + y, x, y)) is True assert (Poly(x + y, x, y) != Poly(x + y, x)) is True assert (Poly(x + y, x) != Poly(x + y, x)) is False assert (Poly(x + y, y) != Poly(x + y, y)) is False assert (Poly(x + y, x, y) != x + y) is False assert (Poly(x + y, x) != x + y) is False assert (Poly(x + y, x, y) != x + y) is False assert (Poly(x + y, x) != x + y) is False assert (Poly(x + y, y) != x + y) is False assert (Poly(x, x) == sin(x)) is False assert (Poly(x, x) != sin(x)) is True def test_Poly_nonzero(): assert not bool(Poly(0, x)) is True assert not bool(Poly(1, x)) is False def test_Poly_properties(): assert Poly(0, x).is_zero is True assert Poly(1, x).is_zero is False assert Poly(1, x).is_one is True assert Poly(2, x).is_one is False assert Poly(x - 1, x).is_sqf is True assert Poly((x - 1)**2, x).is_sqf is False assert Poly(x - 1, x).is_monic is True assert Poly(2*x - 1, x).is_monic is False assert Poly(3*x + 2, x).is_primitive is True assert Poly(4*x + 2, x).is_primitive is False assert Poly(1, x).is_ground is True assert Poly(x, x).is_ground is False assert Poly(x + y + z + 1).is_linear is True assert Poly(x*y*z + 1).is_linear is False assert Poly(x*y + z + 1).is_quadratic is True assert Poly(x*y*z + 1).is_quadratic is False assert Poly(x*y).is_monomial is True assert Poly(x*y + 1).is_monomial is False assert Poly(x**2 + x*y).is_homogeneous is True assert Poly(x**3 + x*y).is_homogeneous is False assert Poly(x).is_univariate is True assert Poly(x*y).is_univariate is False assert Poly(x*y).is_multivariate is True assert Poly(x).is_multivariate is False assert Poly( x**16 + x**14 - x**10 + x**8 - x**6 + x**2 + 1).is_cyclotomic is False assert Poly( x**16 + x**14 - x**10 - x**8 - x**6 + x**2 + 1).is_cyclotomic is True def test_Poly_is_irreducible(): assert Poly(x**2 + x + 1).is_irreducible is True assert Poly(x**2 + 2*x + 1).is_irreducible is False assert Poly(7*x + 3, modulus=11).is_irreducible is True assert Poly(7*x**2 + 3*x + 1, modulus=11).is_irreducible is False def test_Poly_subs(): assert Poly(x + 1).subs(x, 0) == 1 assert Poly(x + 1).subs(x, x) == Poly(x + 1) assert Poly(x + 1).subs(x, y) == Poly(y + 1) assert Poly(x*y, x).subs(y, x) == x**2 assert Poly(x*y, x).subs(x, y) == y**2 def test_Poly_replace(): assert Poly(x + 1).replace(x) == Poly(x + 1) assert Poly(x + 1).replace(y) == Poly(y + 1) raises(PolynomialError, lambda: Poly(x + y).replace(z)) assert Poly(x + 1).replace(x, x) == Poly(x + 1) assert Poly(x + 1).replace(x, y) == Poly(y + 1) assert Poly(x + y).replace(x, x) == Poly(x + y) assert Poly(x + y).replace(x, z) == Poly(z + y, z, y) assert Poly(x + y).replace(y, y) == Poly(x + y) assert Poly(x + y).replace(y, z) == Poly(x + z, x, z) raises(PolynomialError, lambda: Poly(x + y).replace(x, y)) raises(PolynomialError, lambda: Poly(x + y).replace(z, t)) assert Poly(x + y, x).replace(x, z) == Poly(z + y, z) assert Poly(x + y, y).replace(y, z) == Poly(x + z, z) raises(PolynomialError, lambda: Poly(x + y, x).replace(x, y)) raises(PolynomialError, lambda: Poly(x + y, y).replace(y, x)) def test_Poly_reorder(): raises(PolynomialError, lambda: Poly(x + y).reorder(x, z)) assert Poly(x + y, x, y).reorder(x, y) == Poly(x + y, x, y) assert Poly(x + y, x, y).reorder(y, x) == Poly(x + y, y, x) assert Poly(x + y, y, x).reorder(x, y) == Poly(x + y, x, y) assert Poly(x + y, y, x).reorder(y, x) == Poly(x + y, y, x) assert Poly(x + y, x, y).reorder(wrt=x) == Poly(x + y, x, y) assert Poly(x + y, x, y).reorder(wrt=y) == Poly(x + y, y, x) def test_Poly_ltrim(): f = Poly(y**2 + y*z**2, x, y, z).ltrim(y) assert f.as_expr() == y**2 + y*z**2 and f.gens == (y, z) raises(PolynomialError, lambda: Poly(x*y**2 + y**2, x, y).ltrim(y)) def test_Poly_has_only_gens(): assert Poly(x*y + 1, x, y, z).has_only_gens(x, y) is True assert Poly(x*y + z, x, y, z).has_only_gens(x, y) is False raises(GeneratorsError, lambda: Poly(x*y**2 + y**2, x, y).has_only_gens(t)) def test_Poly_to_ring(): assert Poly(2*x + 1, domain='ZZ').to_ring() == Poly(2*x + 1, domain='ZZ') assert Poly(2*x + 1, domain='QQ').to_ring() == Poly(2*x + 1, domain='ZZ') raises(CoercionFailed, lambda: Poly(x/2 + 1).to_ring()) raises(DomainError, lambda: Poly(2*x + 1, modulus=3).to_ring()) def test_Poly_to_field(): assert Poly(2*x + 1, domain='ZZ').to_field() == Poly(2*x + 1, domain='QQ') assert Poly(2*x + 1, domain='QQ').to_field() == Poly(2*x + 1, domain='QQ') assert Poly(x/2 + 1, domain='QQ').to_field() == Poly(x/2 + 1, domain='QQ') assert Poly(2*x + 1, modulus=3).to_field() == Poly(2*x + 1, modulus=3) assert Poly(2.0*x + 1.0).to_field() == Poly(2.0*x + 1.0) def test_Poly_to_exact(): assert Poly(2*x).to_exact() == Poly(2*x) assert Poly(x/2).to_exact() == Poly(x/2) assert Poly(0.1*x).to_exact() == Poly(x/10) def test_Poly_retract(): f = Poly(x**2 + 1, x, domain=QQ[y]) assert f.retract() == Poly(x**2 + 1, x, domain='ZZ') assert f.retract(field=True) == Poly(x**2 + 1, x, domain='QQ') assert Poly(0, x, y).retract() == Poly(0, x, y) def test_Poly_slice(): f = Poly(x**3 + 2*x**2 + 3*x + 4) assert f.slice(0, 0) == Poly(0, x) assert f.slice(0, 1) == Poly(4, x) assert f.slice(0, 2) == Poly(3*x + 4, x) assert f.slice(0, 3) == Poly(2*x**2 + 3*x + 4, x) assert f.slice(0, 4) == Poly(x**3 + 2*x**2 + 3*x + 4, x) assert f.slice(x, 0, 0) == Poly(0, x) assert f.slice(x, 0, 1) == Poly(4, x) assert f.slice(x, 0, 2) == Poly(3*x + 4, x) assert f.slice(x, 0, 3) == Poly(2*x**2 + 3*x + 4, x) assert f.slice(x, 0, 4) == Poly(x**3 + 2*x**2 + 3*x + 4, x) def test_Poly_coeffs(): assert Poly(0, x).coeffs() == [0] assert Poly(1, x).coeffs() == [1] assert Poly(2*x + 1, x).coeffs() == [2, 1] assert Poly(7*x**2 + 2*x + 1, x).coeffs() == [7, 2, 1] assert Poly(7*x**4 + 2*x + 1, x).coeffs() == [7, 2, 1] assert Poly(x*y**7 + 2*x**2*y**3).coeffs('lex') == [2, 1] assert Poly(x*y**7 + 2*x**2*y**3).coeffs('grlex') == [1, 2] def test_Poly_monoms(): assert Poly(0, x).monoms() == [(0,)] assert Poly(1, x).monoms() == [(0,)] assert Poly(2*x + 1, x).monoms() == [(1,), (0,)] assert Poly(7*x**2 + 2*x + 1, x).monoms() == [(2,), (1,), (0,)] assert Poly(7*x**4 + 2*x + 1, x).monoms() == [(4,), (1,), (0,)] assert Poly(x*y**7 + 2*x**2*y**3).monoms('lex') == [(2, 3), (1, 7)] assert Poly(x*y**7 + 2*x**2*y**3).monoms('grlex') == [(1, 7), (2, 3)] def test_Poly_terms(): assert Poly(0, x).terms() == [((0,), 0)] assert Poly(1, x).terms() == [((0,), 1)] assert Poly(2*x + 1, x).terms() == [((1,), 2), ((0,), 1)] assert Poly(7*x**2 + 2*x + 1, x).terms() == [((2,), 7), ((1,), 2), ((0,), 1)] assert Poly(7*x**4 + 2*x + 1, x).terms() == [((4,), 7), ((1,), 2), ((0,), 1)] assert Poly( x*y**7 + 2*x**2*y**3).terms('lex') == [((2, 3), 2), ((1, 7), 1)] assert Poly( x*y**7 + 2*x**2*y**3).terms('grlex') == [((1, 7), 1), ((2, 3), 2)] def test_Poly_all_coeffs(): assert Poly(0, x).all_coeffs() == [0] assert Poly(1, x).all_coeffs() == [1] assert Poly(2*x + 1, x).all_coeffs() == [2, 1] assert Poly(7*x**2 + 2*x + 1, x).all_coeffs() == [7, 2, 1] assert Poly(7*x**4 + 2*x + 1, x).all_coeffs() == [7, 0, 0, 2, 1] def test_Poly_all_monoms(): assert Poly(0, x).all_monoms() == [(0,)] assert Poly(1, x).all_monoms() == [(0,)] assert Poly(2*x + 1, x).all_monoms() == [(1,), (0,)] assert Poly(7*x**2 + 2*x + 1, x).all_monoms() == [(2,), (1,), (0,)] assert Poly(7*x**4 + 2*x + 1, x).all_monoms() == [(4,), (3,), (2,), (1,), (0,)] def test_Poly_all_terms(): assert Poly(0, x).all_terms() == [((0,), 0)] assert Poly(1, x).all_terms() == [((0,), 1)] assert Poly(2*x + 1, x).all_terms() == [((1,), 2), ((0,), 1)] assert Poly(7*x**2 + 2*x + 1, x).all_terms() == \ [((2,), 7), ((1,), 2), ((0,), 1)] assert Poly(7*x**4 + 2*x + 1, x).all_terms() == \ [((4,), 7), ((3,), 0), ((2,), 0), ((1,), 2), ((0,), 1)] def test_Poly_termwise(): f = Poly(x**2 + 20*x + 400) g = Poly(x**2 + 2*x + 4) def func(monom, coeff): (k,) = monom return coeff//10**(2 - k) assert f.termwise(func) == g def func(monom, coeff): (k,) = monom return (k,), coeff//10**(2 - k) assert f.termwise(func) == g def test_Poly_length(): assert Poly(0, x).length() == 0 assert Poly(1, x).length() == 1 assert Poly(x, x).length() == 1 assert Poly(x + 1, x).length() == 2 assert Poly(x**2 + 1, x).length() == 2 assert Poly(x**2 + x + 1, x).length() == 3 def test_Poly_as_dict(): assert Poly(0, x).as_dict() == {} assert Poly(0, x, y, z).as_dict() == {} assert Poly(1, x).as_dict() == {(0,): 1} assert Poly(1, x, y, z).as_dict() == {(0, 0, 0): 1} assert Poly(x**2 + 3, x).as_dict() == {(2,): 1, (0,): 3} assert Poly(x**2 + 3, x, y, z).as_dict() == {(2, 0, 0): 1, (0, 0, 0): 3} assert Poly(3*x**2*y*z**3 + 4*x*y + 5*x*z).as_dict() == {(2, 1, 3): 3, (1, 1, 0): 4, (1, 0, 1): 5} def test_Poly_as_expr(): assert Poly(0, x).as_expr() == 0 assert Poly(0, x, y, z).as_expr() == 0 assert Poly(1, x).as_expr() == 1 assert Poly(1, x, y, z).as_expr() == 1 assert Poly(x**2 + 3, x).as_expr() == x**2 + 3 assert Poly(x**2 + 3, x, y, z).as_expr() == x**2 + 3 assert Poly( 3*x**2*y*z**3 + 4*x*y + 5*x*z).as_expr() == 3*x**2*y*z**3 + 4*x*y + 5*x*z f = Poly(x**2 + 2*x*y**2 - y, x, y) assert f.as_expr() == -y + x**2 + 2*x*y**2 assert f.as_expr({x: 5}) == 25 - y + 10*y**2 assert f.as_expr({y: 6}) == -6 + 72*x + x**2 assert f.as_expr({x: 5, y: 6}) == 379 assert f.as_expr(5, 6) == 379 raises(GeneratorsError, lambda: f.as_expr({z: 7})) def test_Poly_lift(): assert Poly(x**4 - I*x + 17*I, x, gaussian=True).lift() == \ Poly(x**16 + 2*x**10 + 578*x**8 + x**4 - 578*x**2 + 83521, x, domain='QQ') def test_Poly_deflate(): assert Poly(0, x).deflate() == ((1,), Poly(0, x)) assert Poly(1, x).deflate() == ((1,), Poly(1, x)) assert Poly(x, x).deflate() == ((1,), Poly(x, x)) assert Poly(x**2, x).deflate() == ((2,), Poly(x, x)) assert Poly(x**17, x).deflate() == ((17,), Poly(x, x)) assert Poly( x**2*y*z**11 + x**4*z**11).deflate() == ((2, 1, 11), Poly(x*y*z + x**2*z)) def test_Poly_inject(): f = Poly(x**2*y + x*y**3 + x*y + 1, x) assert f.inject() == Poly(x**2*y + x*y**3 + x*y + 1, x, y) assert f.inject(front=True) == Poly(y**3*x + y*x**2 + y*x + 1, y, x) def test_Poly_eject(): f = Poly(x**2*y + x*y**3 + x*y + 1, x, y) assert f.eject(x) == Poly(x*y**3 + (x**2 + x)*y + 1, y, domain='ZZ[x]') assert f.eject(y) == Poly(y*x**2 + (y**3 + y)*x + 1, x, domain='ZZ[y]') ex = x + y + z + t + w g = Poly(ex, x, y, z, t, w) assert g.eject(x) == Poly(ex, y, z, t, w, domain='ZZ[x]') assert g.eject(x, y) == Poly(ex, z, t, w, domain='ZZ[x, y]') assert g.eject(x, y, z) == Poly(ex, t, w, domain='ZZ[x, y, z]') assert g.eject(w) == Poly(ex, x, y, z, t, domain='ZZ[w]') assert g.eject(t, w) == Poly(ex, x, y, z, domain='ZZ[w, t]') assert g.eject(z, t, w) == Poly(ex, x, y, domain='ZZ[w, t, z]') raises(DomainError, lambda: Poly(x*y, x, y, domain=ZZ[z]).eject(y)) raises(NotImplementedError, lambda: Poly(x*y, x, y, z).eject(y)) def test_Poly_exclude(): assert Poly(x, x, y).exclude() == Poly(x, x) assert Poly(x*y, x, y).exclude() == Poly(x*y, x, y) assert Poly(1, x, y).exclude() == Poly(1, x, y) def test_Poly__gen_to_level(): assert Poly(1, x, y)._gen_to_level(-2) == 0 assert Poly(1, x, y)._gen_to_level(-1) == 1 assert Poly(1, x, y)._gen_to_level( 0) == 0 assert Poly(1, x, y)._gen_to_level( 1) == 1 raises(PolynomialError, lambda: Poly(1, x, y)._gen_to_level(-3)) raises(PolynomialError, lambda: Poly(1, x, y)._gen_to_level( 2)) assert Poly(1, x, y)._gen_to_level(x) == 0 assert Poly(1, x, y)._gen_to_level(y) == 1 assert Poly(1, x, y)._gen_to_level('x') == 0 assert Poly(1, x, y)._gen_to_level('y') == 1 raises(PolynomialError, lambda: Poly(1, x, y)._gen_to_level(z)) raises(PolynomialError, lambda: Poly(1, x, y)._gen_to_level('z')) def test_Poly_degree(): assert Poly(0, x).degree() == -oo assert Poly(1, x).degree() == 0 assert Poly(x, x).degree() == 1 assert Poly(0, x).degree(gen=0) == -oo assert Poly(1, x).degree(gen=0) == 0 assert Poly(x, x).degree(gen=0) == 1 assert Poly(0, x).degree(gen=x) == -oo assert Poly(1, x).degree(gen=x) == 0 assert Poly(x, x).degree(gen=x) == 1 assert Poly(0, x).degree(gen='x') == -oo assert Poly(1, x).degree(gen='x') == 0 assert Poly(x, x).degree(gen='x') == 1 raises(PolynomialError, lambda: Poly(1, x).degree(gen=1)) raises(PolynomialError, lambda: Poly(1, x).degree(gen=y)) raises(PolynomialError, lambda: Poly(1, x).degree(gen='y')) assert Poly(1, x, y).degree() == 0 assert Poly(2*y, x, y).degree() == 0 assert Poly(x*y, x, y).degree() == 1 assert Poly(1, x, y).degree(gen=x) == 0 assert Poly(2*y, x, y).degree(gen=x) == 0 assert Poly(x*y, x, y).degree(gen=x) == 1 assert Poly(1, x, y).degree(gen=y) == 0 assert Poly(2*y, x, y).degree(gen=y) == 1 assert Poly(x*y, x, y).degree(gen=y) == 1 assert degree(1, x) == 0 assert degree(x, x) == 1 assert degree(x*y**2, gen=x) == 1 assert degree(x*y**2, gen=y) == 2 assert degree(x*y**2, x, y) == 1 assert degree(x*y**2, y, x) == 2 raises(ComputationFailed, lambda: degree(1)) def test_Poly_degree_list(): assert Poly(0, x).degree_list() == (-oo,) assert Poly(0, x, y).degree_list() == (-oo, -oo) assert Poly(0, x, y, z).degree_list() == (-oo, -oo, -oo) assert Poly(1, x).degree_list() == (0,) assert Poly(1, x, y).degree_list() == (0, 0) assert Poly(1, x, y, z).degree_list() == (0, 0, 0) assert Poly(x**2*y + x**3*z**2 + 1).degree_list() == (3, 1, 2) assert degree_list(1, x) == (0,) assert degree_list(x, x) == (1,) assert degree_list(x*y**2) == (1, 2) raises(ComputationFailed, lambda: degree_list(1)) def test_Poly_total_degree(): assert Poly(x**2*y + x**3*z**2 + 1).total_degree() == 5 assert Poly(x**2 + z**3).total_degree() == 3 assert Poly(x*y*z + z**4).total_degree() == 4 assert Poly(x**3 + x + 1).total_degree() == 3 def test_Poly_homogenize(): assert Poly(x**2+y).homogenize(z) == Poly(x**2+y*z) assert Poly(x+y).homogenize(z) == Poly(x+y, x, y, z) assert Poly(x+y**2).homogenize(y) == Poly(x*y+y**2) def test_Poly_homogeneous_order(): assert Poly(0, x, y).homogeneous_order() == -oo assert Poly(1, x, y).homogeneous_order() == 0 assert Poly(x, x, y).homogeneous_order() == 1 assert Poly(x*y, x, y).homogeneous_order() == 2 assert Poly(x + 1, x, y).homogeneous_order() is None assert Poly(x*y + x, x, y).homogeneous_order() is None assert Poly(x**5 + 2*x**3*y**2 + 9*x*y**4).homogeneous_order() == 5 assert Poly(x**5 + 2*x**3*y**3 + 9*x*y**4).homogeneous_order() is None def test_Poly_LC(): assert Poly(0, x).LC() == 0 assert Poly(1, x).LC() == 1 assert Poly(2*x**2 + x, x).LC() == 2 assert Poly(x*y**7 + 2*x**2*y**3).LC('lex') == 2 assert Poly(x*y**7 + 2*x**2*y**3).LC('grlex') == 1 assert LC(x*y**7 + 2*x**2*y**3, order='lex') == 2 assert LC(x*y**7 + 2*x**2*y**3, order='grlex') == 1 def test_Poly_TC(): assert Poly(0, x).TC() == 0 assert Poly(1, x).TC() == 1 assert Poly(2*x**2 + x, x).TC() == 0 def test_Poly_EC(): assert Poly(0, x).EC() == 0 assert Poly(1, x).EC() == 1 assert Poly(2*x**2 + x, x).EC() == 1 assert Poly(x*y**7 + 2*x**2*y**3).EC('lex') == 1 assert Poly(x*y**7 + 2*x**2*y**3).EC('grlex') == 2 def test_Poly_coeff(): assert Poly(0, x).coeff_monomial(1) == 0 assert Poly(0, x).coeff_monomial(x) == 0 assert Poly(1, x).coeff_monomial(1) == 1 assert Poly(1, x).coeff_monomial(x) == 0 assert Poly(x**8, x).coeff_monomial(1) == 0 assert Poly(x**8, x).coeff_monomial(x**7) == 0 assert Poly(x**8, x).coeff_monomial(x**8) == 1 assert Poly(x**8, x).coeff_monomial(x**9) == 0 assert Poly(3*x*y**2 + 1, x, y).coeff_monomial(1) == 1 assert Poly(3*x*y**2 + 1, x, y).coeff_monomial(x*y**2) == 3 p = Poly(24*x*y*exp(8) + 23*x, x, y) assert p.coeff_monomial(x) == 23 assert p.coeff_monomial(y) == 0 assert p.coeff_monomial(x*y) == 24*exp(8) assert p.as_expr().coeff(x) == 24*y*exp(8) + 23 raises(NotImplementedError, lambda: p.coeff(x)) raises(ValueError, lambda: Poly(x + 1).coeff_monomial(0)) raises(ValueError, lambda: Poly(x + 1).coeff_monomial(3*x)) raises(ValueError, lambda: Poly(x + 1).coeff_monomial(3*x*y)) def test_Poly_nth(): assert Poly(0, x).nth(0) == 0 assert Poly(0, x).nth(1) == 0 assert Poly(1, x).nth(0) == 1 assert Poly(1, x).nth(1) == 0 assert Poly(x**8, x).nth(0) == 0 assert Poly(x**8, x).nth(7) == 0 assert Poly(x**8, x).nth(8) == 1 assert Poly(x**8, x).nth(9) == 0 assert Poly(3*x*y**2 + 1, x, y).nth(0, 0) == 1 assert Poly(3*x*y**2 + 1, x, y).nth(1, 2) == 3 raises(ValueError, lambda: Poly(x*y + 1, x, y).nth(1)) def test_Poly_LM(): assert Poly(0, x).LM() == (0,) assert Poly(1, x).LM() == (0,) assert Poly(2*x**2 + x, x).LM() == (2,) assert Poly(x*y**7 + 2*x**2*y**3).LM('lex') == (2, 3) assert Poly(x*y**7 + 2*x**2*y**3).LM('grlex') == (1, 7) assert LM(x*y**7 + 2*x**2*y**3, order='lex') == x**2*y**3 assert LM(x*y**7 + 2*x**2*y**3, order='grlex') == x*y**7 def test_Poly_LM_custom_order(): f = Poly(x**2*y**3*z + x**2*y*z**3 + x*y*z + 1) rev_lex = lambda monom: tuple(reversed(monom)) assert f.LM(order='lex') == (2, 3, 1) assert f.LM(order=rev_lex) == (2, 1, 3) def test_Poly_EM(): assert Poly(0, x).EM() == (0,) assert Poly(1, x).EM() == (0,) assert Poly(2*x**2 + x, x).EM() == (1,) assert Poly(x*y**7 + 2*x**2*y**3).EM('lex') == (1, 7) assert Poly(x*y**7 + 2*x**2*y**3).EM('grlex') == (2, 3) def test_Poly_LT(): assert Poly(0, x).LT() == ((0,), 0) assert Poly(1, x).LT() == ((0,), 1) assert Poly(2*x**2 + x, x).LT() == ((2,), 2) assert Poly(x*y**7 + 2*x**2*y**3).LT('lex') == ((2, 3), 2) assert Poly(x*y**7 + 2*x**2*y**3).LT('grlex') == ((1, 7), 1) assert LT(x*y**7 + 2*x**2*y**3, order='lex') == 2*x**2*y**3 assert LT(x*y**7 + 2*x**2*y**3, order='grlex') == x*y**7 def test_Poly_ET(): assert Poly(0, x).ET() == ((0,), 0) assert Poly(1, x).ET() == ((0,), 1) assert Poly(2*x**2 + x, x).ET() == ((1,), 1) assert Poly(x*y**7 + 2*x**2*y**3).ET('lex') == ((1, 7), 1) assert Poly(x*y**7 + 2*x**2*y**3).ET('grlex') == ((2, 3), 2) def test_Poly_max_norm(): assert Poly(-1, x).max_norm() == 1 assert Poly( 0, x).max_norm() == 0 assert Poly( 1, x).max_norm() == 1 def test_Poly_l1_norm(): assert Poly(-1, x).l1_norm() == 1 assert Poly( 0, x).l1_norm() == 0 assert Poly( 1, x).l1_norm() == 1 def test_Poly_clear_denoms(): coeff, poly = Poly(x + 2, x).clear_denoms() assert coeff == 1 and poly == Poly( x + 2, x, domain='ZZ') and poly.get_domain() == ZZ coeff, poly = Poly(x/2 + 1, x).clear_denoms() assert coeff == 2 and poly == Poly( x + 2, x, domain='QQ') and poly.get_domain() == QQ coeff, poly = Poly(x/2 + 1, x).clear_denoms(convert=True) assert coeff == 2 and poly == Poly( x + 2, x, domain='ZZ') and poly.get_domain() == ZZ coeff, poly = Poly(x/y + 1, x).clear_denoms(convert=True) assert coeff == y and poly == Poly( x + y, x, domain='ZZ[y]') and poly.get_domain() == ZZ[y] coeff, poly = Poly(x/3 + sqrt(2), x, domain='EX').clear_denoms() assert coeff == 3 and poly == Poly( x + 3*sqrt(2), x, domain='EX') and poly.get_domain() == EX coeff, poly = Poly( x/3 + sqrt(2), x, domain='EX').clear_denoms(convert=True) assert coeff == 3 and poly == Poly( x + 3*sqrt(2), x, domain='EX') and poly.get_domain() == EX def test_Poly_rat_clear_denoms(): f = Poly(x**2/y + 1, x) g = Poly(x**3 + y, x) assert f.rat_clear_denoms(g) == \ (Poly(x**2 + y, x), Poly(y*x**3 + y**2, x)) f = f.set_domain(EX) g = g.set_domain(EX) assert f.rat_clear_denoms(g) == (f, g) def test_Poly_integrate(): assert Poly(x + 1).integrate() == Poly(x**2/2 + x) assert Poly(x + 1).integrate(x) == Poly(x**2/2 + x) assert Poly(x + 1).integrate((x, 1)) == Poly(x**2/2 + x) assert Poly(x*y + 1).integrate(x) == Poly(x**2*y/2 + x) assert Poly(x*y + 1).integrate(y) == Poly(x*y**2/2 + y) assert Poly(x*y + 1).integrate(x, x) == Poly(x**3*y/6 + x**2/2) assert Poly(x*y + 1).integrate(y, y) == Poly(x*y**3/6 + y**2/2) assert Poly(x*y + 1).integrate((x, 2)) == Poly(x**3*y/6 + x**2/2) assert Poly(x*y + 1).integrate((y, 2)) == Poly(x*y**3/6 + y**2/2) assert Poly(x*y + 1).integrate(x, y) == Poly(x**2*y**2/4 + x*y) assert Poly(x*y + 1).integrate(y, x) == Poly(x**2*y**2/4 + x*y) def test_Poly_diff(): assert Poly(x**2 + x).diff() == Poly(2*x + 1) assert Poly(x**2 + x).diff(x) == Poly(2*x + 1) assert Poly(x**2 + x).diff((x, 1)) == Poly(2*x + 1) assert Poly(x**2*y**2 + x*y).diff(x) == Poly(2*x*y**2 + y) assert Poly(x**2*y**2 + x*y).diff(y) == Poly(2*x**2*y + x) assert Poly(x**2*y**2 + x*y).diff(x, x) == Poly(2*y**2, x, y) assert Poly(x**2*y**2 + x*y).diff(y, y) == Poly(2*x**2, x, y) assert Poly(x**2*y**2 + x*y).diff((x, 2)) == Poly(2*y**2, x, y) assert Poly(x**2*y**2 + x*y).diff((y, 2)) == Poly(2*x**2, x, y) assert Poly(x**2*y**2 + x*y).diff(x, y) == Poly(4*x*y + 1) assert Poly(x**2*y**2 + x*y).diff(y, x) == Poly(4*x*y + 1) def test_issue_9585(): assert diff(Poly(x**2 + x)) == Poly(2*x + 1) assert diff(Poly(x**2 + x), x, evaluate=False) == \ Derivative(Poly(x**2 + x), x) assert Derivative(Poly(x**2 + x), x).doit() == Poly(2*x + 1) def test_Poly_eval(): assert Poly(0, x).eval(7) == 0 assert Poly(1, x).eval(7) == 1 assert Poly(x, x).eval(7) == 7 assert Poly(0, x).eval(0, 7) == 0 assert Poly(1, x).eval(0, 7) == 1 assert Poly(x, x).eval(0, 7) == 7 assert Poly(0, x).eval(x, 7) == 0 assert Poly(1, x).eval(x, 7) == 1 assert Poly(x, x).eval(x, 7) == 7 assert Poly(0, x).eval('x', 7) == 0 assert Poly(1, x).eval('x', 7) == 1 assert Poly(x, x).eval('x', 7) == 7 raises(PolynomialError, lambda: Poly(1, x).eval(1, 7)) raises(PolynomialError, lambda: Poly(1, x).eval(y, 7)) raises(PolynomialError, lambda: Poly(1, x).eval('y', 7)) assert Poly(123, x, y).eval(7) == Poly(123, y) assert Poly(2*y, x, y).eval(7) == Poly(2*y, y) assert Poly(x*y, x, y).eval(7) == Poly(7*y, y) assert Poly(123, x, y).eval(x, 7) == Poly(123, y) assert Poly(2*y, x, y).eval(x, 7) == Poly(2*y, y) assert Poly(x*y, x, y).eval(x, 7) == Poly(7*y, y) assert Poly(123, x, y).eval(y, 7) == Poly(123, x) assert Poly(2*y, x, y).eval(y, 7) == Poly(14, x) assert Poly(x*y, x, y).eval(y, 7) == Poly(7*x, x) assert Poly(x*y + y, x, y).eval({x: 7}) == Poly(8*y, y) assert Poly(x*y + y, x, y).eval({y: 7}) == Poly(7*x + 7, x) assert Poly(x*y + y, x, y).eval({x: 6, y: 7}) == 49 assert Poly(x*y + y, x, y).eval({x: 7, y: 6}) == 48 assert Poly(x*y + y, x, y).eval((6, 7)) == 49 assert Poly(x*y + y, x, y).eval([6, 7]) == 49 assert Poly(x + 1, domain='ZZ').eval(S(1)/2) == S(3)/2 assert Poly(x + 1, domain='ZZ').eval(sqrt(2)) == sqrt(2) + 1 raises(ValueError, lambda: Poly(x*y + y, x, y).eval((6, 7, 8))) raises(DomainError, lambda: Poly(x + 1, domain='ZZ').eval(S(1)/2, auto=False)) # issue 6344 alpha = Symbol('alpha') result = (2*alpha*z - 2*alpha + z**2 + 3)/(z**2 - 2*z + 1) f = Poly(x**2 + (alpha - 1)*x - alpha + 1, x, domain='ZZ[alpha]') assert f.eval((z + 1)/(z - 1)) == result g = Poly(x**2 + (alpha - 1)*x - alpha + 1, x, y, domain='ZZ[alpha]') assert g.eval((z + 1)/(z - 1)) == Poly(result, y, domain='ZZ(alpha,z)') def test_Poly___call__(): f = Poly(2*x*y + 3*x + y + 2*z) assert f(2) == Poly(5*y + 2*z + 6) assert f(2, 5) == Poly(2*z + 31) assert f(2, 5, 7) == 45 def test_parallel_poly_from_expr(): assert parallel_poly_from_expr( [x - 1, x**2 - 1], x)[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [Poly(x - 1, x), x**2 - 1], x)[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [x - 1, Poly(x**2 - 1, x)], x)[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr([Poly( x - 1, x), Poly(x**2 - 1, x)], x)[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [x - 1, x**2 - 1], x, y)[0] == [Poly(x - 1, x, y), Poly(x**2 - 1, x, y)] assert parallel_poly_from_expr([Poly( x - 1, x), x**2 - 1], x, y)[0] == [Poly(x - 1, x, y), Poly(x**2 - 1, x, y)] assert parallel_poly_from_expr([x - 1, Poly( x**2 - 1, x)], x, y)[0] == [Poly(x - 1, x, y), Poly(x**2 - 1, x, y)] assert parallel_poly_from_expr([Poly(x - 1, x), Poly( x**2 - 1, x)], x, y)[0] == [Poly(x - 1, x, y), Poly(x**2 - 1, x, y)] assert parallel_poly_from_expr( [x - 1, x**2 - 1])[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [Poly(x - 1, x), x**2 - 1])[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [x - 1, Poly(x**2 - 1, x)])[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [Poly(x - 1, x), Poly(x**2 - 1, x)])[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [1, x**2 - 1])[0] == [Poly(1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [1, x**2 - 1])[0] == [Poly(1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [1, Poly(x**2 - 1, x)])[0] == [Poly(1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [1, Poly(x**2 - 1, x)])[0] == [Poly(1, x), Poly(x**2 - 1, x)] assert parallel_poly_from_expr( [x**2 - 1, 1])[0] == [Poly(x**2 - 1, x), Poly(1, x)] assert parallel_poly_from_expr( [x**2 - 1, 1])[0] == [Poly(x**2 - 1, x), Poly(1, x)] assert parallel_poly_from_expr( [Poly(x**2 - 1, x), 1])[0] == [Poly(x**2 - 1, x), Poly(1, x)] assert parallel_poly_from_expr( [Poly(x**2 - 1, x), 1])[0] == [Poly(x**2 - 1, x), Poly(1, x)] assert parallel_poly_from_expr([Poly(x, x, y), Poly(y, x, y)], x, y, order='lex')[0] == \ [Poly(x, x, y, domain='ZZ'), Poly(y, x, y, domain='ZZ')] raises(PolificationFailed, lambda: parallel_poly_from_expr([0, 1])) def test_pdiv(): f, g = x**2 - y**2, x - y q, r = x + y, 0 F, G, Q, R = [ Poly(h, x, y) for h in (f, g, q, r) ] assert F.pdiv(G) == (Q, R) assert F.prem(G) == R assert F.pquo(G) == Q assert F.pexquo(G) == Q assert pdiv(f, g) == (q, r) assert prem(f, g) == r assert pquo(f, g) == q assert pexquo(f, g) == q assert pdiv(f, g, x, y) == (q, r) assert prem(f, g, x, y) == r assert pquo(f, g, x, y) == q assert pexquo(f, g, x, y) == q assert pdiv(f, g, (x, y)) == (q, r) assert prem(f, g, (x, y)) == r assert pquo(f, g, (x, y)) == q assert pexquo(f, g, (x, y)) == q assert pdiv(F, G) == (Q, R) assert prem(F, G) == R assert pquo(F, G) == Q assert pexquo(F, G) == Q assert pdiv(f, g, polys=True) == (Q, R) assert prem(f, g, polys=True) == R assert pquo(f, g, polys=True) == Q assert pexquo(f, g, polys=True) == Q assert pdiv(F, G, polys=False) == (q, r) assert prem(F, G, polys=False) == r assert pquo(F, G, polys=False) == q assert pexquo(F, G, polys=False) == q raises(ComputationFailed, lambda: pdiv(4, 2)) raises(ComputationFailed, lambda: prem(4, 2)) raises(ComputationFailed, lambda: pquo(4, 2)) raises(ComputationFailed, lambda: pexquo(4, 2)) def test_div(): f, g = x**2 - y**2, x - y q, r = x + y, 0 F, G, Q, R = [ Poly(h, x, y) for h in (f, g, q, r) ] assert F.div(G) == (Q, R) assert F.rem(G) == R assert F.quo(G) == Q assert F.exquo(G) == Q assert div(f, g) == (q, r) assert rem(f, g) == r assert quo(f, g) == q assert exquo(f, g) == q assert div(f, g, x, y) == (q, r) assert rem(f, g, x, y) == r assert quo(f, g, x, y) == q assert exquo(f, g, x, y) == q assert div(f, g, (x, y)) == (q, r) assert rem(f, g, (x, y)) == r assert quo(f, g, (x, y)) == q assert exquo(f, g, (x, y)) == q assert div(F, G) == (Q, R) assert rem(F, G) == R assert quo(F, G) == Q assert exquo(F, G) == Q assert div(f, g, polys=True) == (Q, R) assert rem(f, g, polys=True) == R assert quo(f, g, polys=True) == Q assert exquo(f, g, polys=True) == Q assert div(F, G, polys=False) == (q, r) assert rem(F, G, polys=False) == r assert quo(F, G, polys=False) == q assert exquo(F, G, polys=False) == q raises(ComputationFailed, lambda: div(4, 2)) raises(ComputationFailed, lambda: rem(4, 2)) raises(ComputationFailed, lambda: quo(4, 2)) raises(ComputationFailed, lambda: exquo(4, 2)) f, g = x**2 + 1, 2*x - 4 qz, rz = 0, x**2 + 1 qq, rq = x/2 + 1, 5 assert div(f, g) == (qq, rq) assert div(f, g, auto=True) == (qq, rq) assert div(f, g, auto=False) == (qz, rz) assert div(f, g, domain=ZZ) == (qz, rz) assert div(f, g, domain=QQ) == (qq, rq) assert div(f, g, domain=ZZ, auto=True) == (qq, rq) assert div(f, g, domain=ZZ, auto=False) == (qz, rz) assert div(f, g, domain=QQ, auto=True) == (qq, rq) assert div(f, g, domain=QQ, auto=False) == (qq, rq) assert rem(f, g) == rq assert rem(f, g, auto=True) == rq assert rem(f, g, auto=False) == rz assert rem(f, g, domain=ZZ) == rz assert rem(f, g, domain=QQ) == rq assert rem(f, g, domain=ZZ, auto=True) == rq assert rem(f, g, domain=ZZ, auto=False) == rz assert rem(f, g, domain=QQ, auto=True) == rq assert rem(f, g, domain=QQ, auto=False) == rq assert quo(f, g) == qq assert quo(f, g, auto=True) == qq assert quo(f, g, auto=False) == qz assert quo(f, g, domain=ZZ) == qz assert quo(f, g, domain=QQ) == qq assert quo(f, g, domain=ZZ, auto=True) == qq assert quo(f, g, domain=ZZ, auto=False) == qz assert quo(f, g, domain=QQ, auto=True) == qq assert quo(f, g, domain=QQ, auto=False) == qq f, g, q = x**2, 2*x, x/2 assert exquo(f, g) == q assert exquo(f, g, auto=True) == q raises(ExactQuotientFailed, lambda: exquo(f, g, auto=False)) raises(ExactQuotientFailed, lambda: exquo(f, g, domain=ZZ)) assert exquo(f, g, domain=QQ) == q assert exquo(f, g, domain=ZZ, auto=True) == q raises(ExactQuotientFailed, lambda: exquo(f, g, domain=ZZ, auto=False)) assert exquo(f, g, domain=QQ, auto=True) == q assert exquo(f, g, domain=QQ, auto=False) == q f, g = Poly(x**2), Poly(x) q, r = f.div(g) assert q.get_domain().is_ZZ and r.get_domain().is_ZZ r = f.rem(g) assert r.get_domain().is_ZZ q = f.quo(g) assert q.get_domain().is_ZZ q = f.exquo(g) assert q.get_domain().is_ZZ def test_gcdex(): f, g = 2*x, x**2 - 16 s, t, h = x/32, -Rational(1, 16), 1 F, G, S, T, H = [ Poly(u, x, domain='QQ') for u in (f, g, s, t, h) ] assert F.half_gcdex(G) == (S, H) assert F.gcdex(G) == (S, T, H) assert F.invert(G) == S assert half_gcdex(f, g) == (s, h) assert gcdex(f, g) == (s, t, h) assert invert(f, g) == s assert half_gcdex(f, g, x) == (s, h) assert gcdex(f, g, x) == (s, t, h) assert invert(f, g, x) == s assert half_gcdex(f, g, (x,)) == (s, h) assert gcdex(f, g, (x,)) == (s, t, h) assert invert(f, g, (x,)) == s assert half_gcdex(F, G) == (S, H) assert gcdex(F, G) == (S, T, H) assert invert(F, G) == S assert half_gcdex(f, g, polys=True) == (S, H) assert gcdex(f, g, polys=True) == (S, T, H) assert invert(f, g, polys=True) == S assert half_gcdex(F, G, polys=False) == (s, h) assert gcdex(F, G, polys=False) == (s, t, h) assert invert(F, G, polys=False) == s assert half_gcdex(100, 2004) == (-20, 4) assert gcdex(100, 2004) == (-20, 1, 4) assert invert(3, 7) == 5 raises(DomainError, lambda: half_gcdex(x + 1, 2*x + 1, auto=False)) raises(DomainError, lambda: gcdex(x + 1, 2*x + 1, auto=False)) raises(DomainError, lambda: invert(x + 1, 2*x + 1, auto=False)) def test_revert(): f = Poly(1 - x**2/2 + x**4/24 - x**6/720) g = Poly(61*x**6/720 + 5*x**4/24 + x**2/2 + 1) assert f.revert(8) == g def test_subresultants(): f, g, h = x**2 - 2*x + 1, x**2 - 1, 2*x - 2 F, G, H = Poly(f), Poly(g), Poly(h) assert F.subresultants(G) == [F, G, H] assert subresultants(f, g) == [f, g, h] assert subresultants(f, g, x) == [f, g, h] assert subresultants(f, g, (x,)) == [f, g, h] assert subresultants(F, G) == [F, G, H] assert subresultants(f, g, polys=True) == [F, G, H] assert subresultants(F, G, polys=False) == [f, g, h] raises(ComputationFailed, lambda: subresultants(4, 2)) def test_resultant(): f, g, h = x**2 - 2*x + 1, x**2 - 1, 0 F, G = Poly(f), Poly(g) assert F.resultant(G) == h assert resultant(f, g) == h assert resultant(f, g, x) == h assert resultant(f, g, (x,)) == h assert resultant(F, G) == h assert resultant(f, g, polys=True) == h assert resultant(F, G, polys=False) == h assert resultant(f, g, includePRS=True) == (h, [f, g, 2*x - 2]) f, g, h = x - a, x - b, a - b F, G, H = Poly(f), Poly(g), Poly(h) assert F.resultant(G) == H assert resultant(f, g) == h assert resultant(f, g, x) == h assert resultant(f, g, (x,)) == h assert resultant(F, G) == H assert resultant(f, g, polys=True) == H assert resultant(F, G, polys=False) == h raises(ComputationFailed, lambda: resultant(4, 2)) def test_discriminant(): f, g = x**3 + 3*x**2 + 9*x - 13, -11664 F = Poly(f) assert F.discriminant() == g assert discriminant(f) == g assert discriminant(f, x) == g assert discriminant(f, (x,)) == g assert discriminant(F) == g assert discriminant(f, polys=True) == g assert discriminant(F, polys=False) == g f, g = a*x**2 + b*x + c, b**2 - 4*a*c F, G = Poly(f), Poly(g) assert F.discriminant() == G assert discriminant(f) == g assert discriminant(f, x, a, b, c) == g assert discriminant(f, (x, a, b, c)) == g assert discriminant(F) == G assert discriminant(f, polys=True) == G assert discriminant(F, polys=False) == g raises(ComputationFailed, lambda: discriminant(4)) def test_dispersion(): # We test only the API here. For more mathematical # tests see the dedicated test file. fp = poly((x + 1)*(x + 2), x) assert sorted(fp.dispersionset()) == [0, 1] assert fp.dispersion() == 1 fp = poly(x**4 - 3*x**2 + 1, x) gp = fp.shift(-3) assert sorted(fp.dispersionset(gp)) == [2, 3, 4] assert fp.dispersion(gp) == 4 def test_gcd_list(): F = [x**3 - 1, x**2 - 1, x**2 - 3*x + 2] assert gcd_list(F) == x - 1 assert gcd_list(F, polys=True) == Poly(x - 1) assert gcd_list([]) == 0 assert gcd_list([1, 2]) == 1 assert gcd_list([4, 6, 8]) == 2 assert gcd_list([x*(y + 42) - x*y - x*42]) == 0 gcd = gcd_list([], x) assert gcd.is_Number and gcd is S.Zero gcd = gcd_list([], x, polys=True) assert gcd.is_Poly and gcd.is_zero raises(ComputationFailed, lambda: gcd_list([], polys=True)) def test_lcm_list(): F = [x**3 - 1, x**2 - 1, x**2 - 3*x + 2] assert lcm_list(F) == x**5 - x**4 - 2*x**3 - x**2 + x + 2 assert lcm_list(F, polys=True) == Poly(x**5 - x**4 - 2*x**3 - x**2 + x + 2) assert lcm_list([]) == 1 assert lcm_list([1, 2]) == 2 assert lcm_list([4, 6, 8]) == 24 assert lcm_list([x*(y + 42) - x*y - x*42]) == 0 lcm = lcm_list([], x) assert lcm.is_Number and lcm is S.One lcm = lcm_list([], x, polys=True) assert lcm.is_Poly and lcm.is_one raises(ComputationFailed, lambda: lcm_list([], polys=True)) def test_gcd(): f, g = x**3 - 1, x**2 - 1 s, t = x**2 + x + 1, x + 1 h, r = x - 1, x**4 + x**3 - x - 1 F, G, S, T, H, R = [ Poly(u) for u in (f, g, s, t, h, r) ] assert F.cofactors(G) == (H, S, T) assert F.gcd(G) == H assert F.lcm(G) == R assert cofactors(f, g) == (h, s, t) assert gcd(f, g) == h assert lcm(f, g) == r assert cofactors(f, g, x) == (h, s, t) assert gcd(f, g, x) == h assert lcm(f, g, x) == r assert cofactors(f, g, (x,)) == (h, s, t) assert gcd(f, g, (x,)) == h assert lcm(f, g, (x,)) == r assert cofactors(F, G) == (H, S, T) assert gcd(F, G) == H assert lcm(F, G) == R assert cofactors(f, g, polys=True) == (H, S, T) assert gcd(f, g, polys=True) == H assert lcm(f, g, polys=True) == R assert cofactors(F, G, polys=False) == (h, s, t) assert gcd(F, G, polys=False) == h assert lcm(F, G, polys=False) == r f, g = 1.0*x**2 - 1.0, 1.0*x - 1.0 h, s, t = g, 1.0*x + 1.0, 1.0 assert cofactors(f, g) == (h, s, t) assert gcd(f, g) == h assert lcm(f, g) == f f, g = 1.0*x**2 - 1.0, 1.0*x - 1.0 h, s, t = g, 1.0*x + 1.0, 1.0 assert cofactors(f, g) == (h, s, t) assert gcd(f, g) == h assert lcm(f, g) == f assert cofactors(8, 6) == (2, 4, 3) assert gcd(8, 6) == 2 assert lcm(8, 6) == 24 f, g = x**2 - 3*x - 4, x**3 - 4*x**2 + x - 4 l = x**4 - 3*x**3 - 3*x**2 - 3*x - 4 h, s, t = x - 4, x + 1, x**2 + 1 assert cofactors(f, g, modulus=11) == (h, s, t) assert gcd(f, g, modulus=11) == h assert lcm(f, g, modulus=11) == l f, g = x**2 + 8*x + 7, x**3 + 7*x**2 + x + 7 l = x**4 + 8*x**3 + 8*x**2 + 8*x + 7 h, s, t = x + 7, x + 1, x**2 + 1 assert cofactors(f, g, modulus=11, symmetric=False) == (h, s, t) assert gcd(f, g, modulus=11, symmetric=False) == h assert lcm(f, g, modulus=11, symmetric=False) == l raises(TypeError, lambda: gcd(x)) raises(TypeError, lambda: lcm(x)) def test_gcd_numbers_vs_polys(): assert isinstance(gcd(3, 9), Integer) assert isinstance(gcd(3*x, 9), Integer) assert gcd(3, 9) == 3 assert gcd(3*x, 9) == 3 assert isinstance(gcd(S(3)/2, S(9)/4), Rational) assert isinstance(gcd(S(3)/2*x, S(9)/4), Rational) assert gcd(S(3)/2, S(9)/4) == S(3)/4 assert gcd(S(3)/2*x, S(9)/4) == 1 assert isinstance(gcd(3.0, 9.0), Float) assert isinstance(gcd(3.0*x, 9.0), Float) assert gcd(3.0, 9.0) == 1.0 assert gcd(3.0*x, 9.0) == 1.0 def test_terms_gcd(): assert terms_gcd(1) == 1 assert terms_gcd(1, x) == 1 assert terms_gcd(x - 1) == x - 1 assert terms_gcd(-x - 1) == -x - 1 assert terms_gcd(2*x + 3) == 2*x + 3 assert terms_gcd(6*x + 4) == Mul(2, 3*x + 2, evaluate=False) assert terms_gcd(x**3*y + x*y**3) == x*y*(x**2 + y**2) assert terms_gcd(2*x**3*y + 2*x*y**3) == 2*x*y*(x**2 + y**2) assert terms_gcd(x**3*y/2 + x*y**3/2) == x*y/2*(x**2 + y**2) assert terms_gcd(x**3*y + 2*x*y**3) == x*y*(x**2 + 2*y**2) assert terms_gcd(2*x**3*y + 4*x*y**3) == 2*x*y*(x**2 + 2*y**2) assert terms_gcd(2*x**3*y/3 + 4*x*y**3/5) == 2*x*y/15*(5*x**2 + 6*y**2) assert terms_gcd(2.0*x**3*y + 4.1*x*y**3) == x*y*(2.0*x**2 + 4.1*y**2) assert _aresame(terms_gcd(2.0*x + 3), 2.0*x + 3) assert terms_gcd((3 + 3*x)*(x + x*y), expand=False) == \ (3*x + 3)*(x*y + x) assert terms_gcd((3 + 3*x)*(x + x*sin(3 + 3*y)), expand=False, deep=True) == \ 3*x*(x + 1)*(sin(Mul(3, y + 1, evaluate=False)) + 1) assert terms_gcd(sin(x + x*y), deep=True) == \ sin(x*(y + 1)) eq = Eq(2*x, 2*y + 2*z*y) assert terms_gcd(eq) == eq assert terms_gcd(eq, deep=True) == Eq(2*x, 2*y*(z + 1)) def test_trunc(): f, g = x**5 + 2*x**4 + 3*x**3 + 4*x**2 + 5*x + 6, x**5 - x**4 + x**2 - x F, G = Poly(f), Poly(g) assert F.trunc(3) == G assert trunc(f, 3) == g assert trunc(f, 3, x) == g assert trunc(f, 3, (x,)) == g assert trunc(F, 3) == G assert trunc(f, 3, polys=True) == G assert trunc(F, 3, polys=False) == g f, g = 6*x**5 + 5*x**4 + 4*x**3 + 3*x**2 + 2*x + 1, -x**4 + x**3 - x + 1 F, G = Poly(f), Poly(g) assert F.trunc(3) == G assert trunc(f, 3) == g assert trunc(f, 3, x) == g assert trunc(f, 3, (x,)) == g assert trunc(F, 3) == G assert trunc(f, 3, polys=True) == G assert trunc(F, 3, polys=False) == g f = Poly(x**2 + 2*x + 3, modulus=5) assert f.trunc(2) == Poly(x**2 + 1, modulus=5) def test_monic(): f, g = 2*x - 1, x - S(1)/2 F, G = Poly(f, domain='QQ'), Poly(g) assert F.monic() == G assert monic(f) == g assert monic(f, x) == g assert monic(f, (x,)) == g assert monic(F) == G assert monic(f, polys=True) == G assert monic(F, polys=False) == g raises(ComputationFailed, lambda: monic(4)) assert monic(2*x**2 + 6*x + 4, auto=False) == x**2 + 3*x + 2 raises(ExactQuotientFailed, lambda: monic(2*x + 6*x + 1, auto=False)) assert monic(2.0*x**2 + 6.0*x + 4.0) == 1.0*x**2 + 3.0*x + 2.0 assert monic(2*x**2 + 3*x + 4, modulus=5) == x**2 - x + 2 def test_content(): f, F = 4*x + 2, Poly(4*x + 2) assert F.content() == 2 assert content(f) == 2 raises(ComputationFailed, lambda: content(4)) f = Poly(2*x, modulus=3) assert f.content() == 1 def test_primitive(): f, g = 4*x + 2, 2*x + 1 F, G = Poly(f), Poly(g) assert F.primitive() == (2, G) assert primitive(f) == (2, g) assert primitive(f, x) == (2, g) assert primitive(f, (x,)) == (2, g) assert primitive(F) == (2, G) assert primitive(f, polys=True) == (2, G) assert primitive(F, polys=False) == (2, g) raises(ComputationFailed, lambda: primitive(4)) f = Poly(2*x, modulus=3) g = Poly(2.0*x, domain=RR) assert f.primitive() == (1, f) assert g.primitive() == (1.0, g) assert primitive(S('-3*x/4 + y + 11/8')) == \ S('(1/8, -6*x + 8*y + 11)') def test_compose(): f = x**12 + 20*x**10 + 150*x**8 + 500*x**6 + 625*x**4 - 2*x**3 - 10*x + 9 g = x**4 - 2*x + 9 h = x**3 + 5*x F, G, H = map(Poly, (f, g, h)) assert G.compose(H) == F assert compose(g, h) == f assert compose(g, h, x) == f assert compose(g, h, (x,)) == f assert compose(G, H) == F assert compose(g, h, polys=True) == F assert compose(G, H, polys=False) == f assert F.decompose() == [G, H] assert decompose(f) == [g, h] assert decompose(f, x) == [g, h] assert decompose(f, (x,)) == [g, h] assert decompose(F) == [G, H] assert decompose(f, polys=True) == [G, H] assert decompose(F, polys=False) == [g, h] raises(ComputationFailed, lambda: compose(4, 2)) raises(ComputationFailed, lambda: decompose(4)) assert compose(x**2 - y**2, x - y, x, y) == x**2 - 2*x*y assert compose(x**2 - y**2, x - y, y, x) == -y**2 + 2*x*y def test_shift(): assert Poly(x**2 - 2*x + 1, x).shift(2) == Poly(x**2 + 2*x + 1, x) def test_sturm(): f, F = x, Poly(x, domain='QQ') g, G = 1, Poly(1, x, domain='QQ') assert F.sturm() == [F, G] assert sturm(f) == [f, g] assert sturm(f, x) == [f, g] assert sturm(f, (x,)) == [f, g] assert sturm(F) == [F, G] assert sturm(f, polys=True) == [F, G] assert sturm(F, polys=False) == [f, g] raises(ComputationFailed, lambda: sturm(4)) raises(DomainError, lambda: sturm(f, auto=False)) f = Poly(S(1024)/(15625*pi**8)*x**5 - S(4096)/(625*pi**8)*x**4 + S(32)/(15625*pi**4)*x**3 - S(128)/(625*pi**4)*x**2 + S(1)/62500*x - S(1)/625, x, domain='ZZ(pi)') assert sturm(f) == \ [Poly(x**3 - 100*x**2 + pi**4/64*x - 25*pi**4/16, x, domain='ZZ(pi)'), Poly(3*x**2 - 200*x + pi**4/64, x, domain='ZZ(pi)'), Poly((S(20000)/9 - pi**4/96)*x + 25*pi**4/18, x, domain='ZZ(pi)'), Poly((-3686400000000*pi**4 - 11520000*pi**8 - 9*pi**12)/(26214400000000 - 245760000*pi**4 + 576*pi**8), x, domain='ZZ(pi)')] def test_gff(): f = x**5 + 2*x**4 - x**3 - 2*x**2 assert Poly(f).gff_list() == [(Poly(x), 1), (Poly(x + 2), 4)] assert gff_list(f) == [(x, 1), (x + 2, 4)] raises(NotImplementedError, lambda: gff(f)) f = x*(x - 1)**3*(x - 2)**2*(x - 4)**2*(x - 5) assert Poly(f).gff_list() == [( Poly(x**2 - 5*x + 4), 1), (Poly(x**2 - 5*x + 4), 2), (Poly(x), 3)] assert gff_list(f) == [(x**2 - 5*x + 4, 1), (x**2 - 5*x + 4, 2), (x, 3)] raises(NotImplementedError, lambda: gff(f)) def test_sqf_norm(): assert sqf_norm(x**2 - 2, extension=sqrt(3)) == \ (1, x**2 - 2*sqrt(3)*x + 1, x**4 - 10*x**2 + 1) assert sqf_norm(x**2 - 3, extension=sqrt(2)) == \ (1, x**2 - 2*sqrt(2)*x - 1, x**4 - 10*x**2 + 1) assert Poly(x**2 - 2, extension=sqrt(3)).sqf_norm() == \ (1, Poly(x**2 - 2*sqrt(3)*x + 1, x, extension=sqrt(3)), Poly(x**4 - 10*x**2 + 1, x, domain='QQ')) assert Poly(x**2 - 3, extension=sqrt(2)).sqf_norm() == \ (1, Poly(x**2 - 2*sqrt(2)*x - 1, x, extension=sqrt(2)), Poly(x**4 - 10*x**2 + 1, x, domain='QQ')) def test_sqf(): f = x**5 - x**3 - x**2 + 1 g = x**3 + 2*x**2 + 2*x + 1 h = x - 1 p = x**4 + x**3 - x - 1 F, G, H, P = map(Poly, (f, g, h, p)) assert F.sqf_part() == P assert sqf_part(f) == p assert sqf_part(f, x) == p assert sqf_part(f, (x,)) == p assert sqf_part(F) == P assert sqf_part(f, polys=True) == P assert sqf_part(F, polys=False) == p assert F.sqf_list() == (1, [(G, 1), (H, 2)]) assert sqf_list(f) == (1, [(g, 1), (h, 2)]) assert sqf_list(f, x) == (1, [(g, 1), (h, 2)]) assert sqf_list(f, (x,)) == (1, [(g, 1), (h, 2)]) assert sqf_list(F) == (1, [(G, 1), (H, 2)]) assert sqf_list(f, polys=True) == (1, [(G, 1), (H, 2)]) assert sqf_list(F, polys=False) == (1, [(g, 1), (h, 2)]) assert F.sqf_list_include() == [(G, 1), (H, 2)] raises(ComputationFailed, lambda: sqf_part(4)) assert sqf(1) == 1 assert sqf_list(1) == (1, []) assert sqf((2*x**2 + 2)**7) == 128*(x**2 + 1)**7 assert sqf(f) == g*h**2 assert sqf(f, x) == g*h**2 assert sqf(f, (x,)) == g*h**2 d = x**2 + y**2 assert sqf(f/d) == (g*h**2)/d assert sqf(f/d, x) == (g*h**2)/d assert sqf(f/d, (x,)) == (g*h**2)/d assert sqf(x - 1) == x - 1 assert sqf(-x - 1) == -x - 1 assert sqf(x - 1) == x - 1 assert sqf(6*x - 10) == Mul(2, 3*x - 5, evaluate=False) assert sqf((6*x - 10)/(3*x - 6)) == S(2)/3*((3*x - 5)/(x - 2)) assert sqf(Poly(x**2 - 2*x + 1)) == (x - 1)**2 f = 3 + x - x*(1 + x) + x**2 assert sqf(f) == 3 f = (x**2 + 2*x + 1)**20000000000 assert sqf(f) == (x + 1)**40000000000 assert sqf_list(f) == (1, [(x + 1, 40000000000)]) def test_factor(): f = x**5 - x**3 - x**2 + 1 u = x + 1 v = x - 1 w = x**2 + x + 1 F, U, V, W = map(Poly, (f, u, v, w)) assert F.factor_list() == (1, [(U, 1), (V, 2), (W, 1)]) assert factor_list(f) == (1, [(u, 1), (v, 2), (w, 1)]) assert factor_list(f, x) == (1, [(u, 1), (v, 2), (w, 1)]) assert factor_list(f, (x,)) == (1, [(u, 1), (v, 2), (w, 1)]) assert factor_list(F) == (1, [(U, 1), (V, 2), (W, 1)]) assert factor_list(f, polys=True) == (1, [(U, 1), (V, 2), (W, 1)]) assert factor_list(F, polys=False) == (1, [(u, 1), (v, 2), (w, 1)]) assert F.factor_list_include() == [(U, 1), (V, 2), (W, 1)] assert factor_list(1) == (1, []) assert factor_list(6) == (6, []) assert factor_list(sqrt(3), x) == (1, [(3, S.Half)]) assert factor_list((-1)**x, x) == (1, [(-1, x)]) assert factor_list((2*x)**y, x) == (1, [(2, y), (x, y)]) assert factor_list(sqrt(x*y), x) == (1, [(x*y, S.Half)]) assert factor(6) == 6 and factor(6).is_Integer assert factor_list(3*x) == (3, [(x, 1)]) assert factor_list(3*x**2) == (3, [(x, 2)]) assert factor(3*x) == 3*x assert factor(3*x**2) == 3*x**2 assert factor((2*x**2 + 2)**7) == 128*(x**2 + 1)**7 assert factor(f) == u*v**2*w assert factor(f, x) == u*v**2*w assert factor(f, (x,)) == u*v**2*w g, p, q, r = x**2 - y**2, x - y, x + y, x**2 + 1 assert factor(f/g) == (u*v**2*w)/(p*q) assert factor(f/g, x) == (u*v**2*w)/(p*q) assert factor(f/g, (x,)) == (u*v**2*w)/(p*q) p = Symbol('p', positive=True) i = Symbol('i', integer=True) r = Symbol('r', real=True) assert factor(sqrt(x*y)).is_Pow is True assert factor(sqrt(3*x**2 - 3)) == sqrt(3)*sqrt((x - 1)*(x + 1)) assert factor(sqrt(3*x**2 + 3)) == sqrt(3)*sqrt(x**2 + 1) assert factor((y*x**2 - y)**i) == y**i*(x - 1)**i*(x + 1)**i assert factor((y*x**2 + y)**i) == y**i*(x**2 + 1)**i assert factor((y*x**2 - y)**t) == (y*(x - 1)*(x + 1))**t assert factor((y*x**2 + y)**t) == (y*(x**2 + 1))**t f = sqrt(expand((r**2 + 1)*(p + 1)*(p - 1)*(p - 2)**3)) g = sqrt((p - 2)**3*(p - 1))*sqrt(p + 1)*sqrt(r**2 + 1) assert factor(f) == g assert factor(g) == g g = (x - 1)**5*(r**2 + 1) f = sqrt(expand(g)) assert factor(f) == sqrt(g) f = Poly(sin(1)*x + 1, x, domain=EX) assert f.factor_list() == (1, [(f, 1)]) f = x**4 + 1 assert factor(f) == f assert factor(f, extension=I) == (x**2 - I)*(x**2 + I) assert factor(f, gaussian=True) == (x**2 - I)*(x**2 + I) assert factor( f, extension=sqrt(2)) == (x**2 + sqrt(2)*x + 1)*(x**2 - sqrt(2)*x + 1) f = x**2 + 2*sqrt(2)*x + 2 assert factor(f, extension=sqrt(2)) == (x + sqrt(2))**2 assert factor(f**3, extension=sqrt(2)) == (x + sqrt(2))**6 assert factor(x**2 - 2*y**2, extension=sqrt(2)) == \ (x + sqrt(2)*y)*(x - sqrt(2)*y) assert factor(2*x**2 - 4*y**2, extension=sqrt(2)) == \ 2*((x + sqrt(2)*y)*(x - sqrt(2)*y)) assert factor(x - 1) == x - 1 assert factor(-x - 1) == -x - 1 assert factor(x - 1) == x - 1 assert factor(6*x - 10) == Mul(2, 3*x - 5, evaluate=False) assert factor(x**11 + x + 1, modulus=65537, symmetric=True) == \ (x**2 + x + 1)*(x**9 - x**8 + x**6 - x**5 + x**3 - x** 2 + 1) assert factor(x**11 + x + 1, modulus=65537, symmetric=False) == \ (x**2 + x + 1)*(x**9 + 65536*x**8 + x**6 + 65536*x**5 + x**3 + 65536*x** 2 + 1) f = x/pi + x*sin(x)/pi g = y/(pi**2 + 2*pi + 1) + y*sin(x)/(pi**2 + 2*pi + 1) assert factor(f) == x*(sin(x) + 1)/pi assert factor(g) == y*(sin(x) + 1)/(pi + 1)**2 assert factor(Eq( x**2 + 2*x + 1, x**3 + 1)) == Eq((x + 1)**2, (x + 1)*(x**2 - x + 1)) f = (x**2 - 1)/(x**2 + 4*x + 4) assert factor(f) == (x + 1)*(x - 1)/(x + 2)**2 assert factor(f, x) == (x + 1)*(x - 1)/(x + 2)**2 f = 3 + x - x*(1 + x) + x**2 assert factor(f) == 3 assert factor(f, x) == 3 assert factor(1/(x**2 + 2*x + 1/x) - 1) == -((1 - x + 2*x**2 + x**3)/(1 + 2*x**2 + x**3)) assert factor(f, expand=False) == f raises(PolynomialError, lambda: factor(f, x, expand=False)) raises(FlagError, lambda: factor(x**2 - 1, polys=True)) assert factor([x, Eq(x**2 - y**2, Tuple(x**2 - z**2, 1/x + 1/y))]) == \ [x, Eq((x - y)*(x + y), Tuple((x - z)*(x + z), (x + y)/x/y))] assert not isinstance( Poly(x**3 + x + 1).factor_list()[1][0][0], PurePoly) is True assert isinstance( PurePoly(x**3 + x + 1).factor_list()[1][0][0], PurePoly) is True assert factor(sqrt(-x)) == sqrt(-x) # issue 5917 e = (-2*x*(-x + 1)*(x - 1)*(-x*(-x + 1)*(x - 1) - x*(x - 1)**2)*(x**2*(x - 1) - x*(x - 1) - x) - (-2*x**2*(x - 1)**2 - x*(-x + 1)*(-x*(-x + 1) + x*(x - 1)))*(x**2*(x - 1)**4 - x*(-x*(-x + 1)*(x - 1) - x*(x - 1)**2))) assert factor(e) == 0 # deep option assert factor(sin(x**2 + x) + x, deep=True) == sin(x*(x + 1)) + x assert factor(sqrt(x**2)) == sqrt(x**2) def test_factor_large(): f = (x**2 + 4*x + 4)**10000000*(x**2 + 1)*(x**2 + 2*x + 1)**1234567 g = ((x**2 + 2*x + 1)**3000*y**2 + (x**2 + 2*x + 1)**3000*2*y + ( x**2 + 2*x + 1)**3000) assert factor(f) == (x + 2)**20000000*(x**2 + 1)*(x + 1)**2469134 assert factor(g) == (x + 1)**6000*(y + 1)**2 assert factor_list( f) == (1, [(x + 1, 2469134), (x + 2, 20000000), (x**2 + 1, 1)]) assert factor_list(g) == (1, [(y + 1, 2), (x + 1, 6000)]) f = (x**2 - y**2)**200000*(x**7 + 1) g = (x**2 + y**2)**200000*(x**7 + 1) assert factor(f) == \ (x + 1)*(x - y)**200000*(x + y)**200000*(x**6 - x**5 + x**4 - x**3 + x**2 - x + 1) assert factor(g, gaussian=True) == \ (x + 1)*(x - I*y)**200000*(x + I*y)**200000*(x**6 - x**5 + x**4 - x**3 + x**2 - x + 1) assert factor_list(f) == \ (1, [(x + 1, 1), (x - y, 200000), (x + y, 200000), (x**6 - x**5 + x**4 - x**3 + x**2 - x + 1, 1)]) assert factor_list(g, gaussian=True) == \ (1, [(x + 1, 1), (x - I*y, 200000), (x + I*y, 200000), ( x**6 - x**5 + x**4 - x**3 + x**2 - x + 1, 1)]) @XFAIL def test_factor_noeval(): assert factor(6*x - 10) == 2*(3*x - 5) assert factor((6*x - 10)/(3*x - 6)) == S(2)/3*((3*x - 5)/(x - 2)) def test_intervals(): assert intervals(0) == [] assert intervals(1) == [] assert intervals(x, sqf=True) == [(0, 0)] assert intervals(x) == [((0, 0), 1)] assert intervals(x**128) == [((0, 0), 128)] assert intervals([x**2, x**4]) == [((0, 0), {0: 2, 1: 4})] f = Poly((2*x/5 - S(17)/3)*(4*x + S(1)/257)) assert f.intervals(sqf=True) == [(-1, 0), (14, 15)] assert f.intervals() == [((-1, 0), 1), ((14, 15), 1)] assert f.intervals(fast=True, sqf=True) == [(-1, 0), (14, 15)] assert f.intervals(fast=True) == [((-1, 0), 1), ((14, 15), 1)] assert f.intervals(eps=S(1)/10) == f.intervals(eps=0.1) == \ [((-S(1)/258, 0), 1), ((S(85)/6, S(85)/6), 1)] assert f.intervals(eps=S(1)/100) == f.intervals(eps=0.01) == \ [((-S(1)/258, 0), 1), ((S(85)/6, S(85)/6), 1)] assert f.intervals(eps=S(1)/1000) == f.intervals(eps=0.001) == \ [((-S(1)/1002, 0), 1), ((S(85)/6, S(85)/6), 1)] assert f.intervals(eps=S(1)/10000) == f.intervals(eps=0.0001) == \ [((-S(1)/1028, -S(1)/1028), 1), ((S(85)/6, S(85)/6), 1)] f = (2*x/5 - S(17)/3)*(4*x + S(1)/257) assert intervals(f, sqf=True) == [(-1, 0), (14, 15)] assert intervals(f) == [((-1, 0), 1), ((14, 15), 1)] assert intervals(f, eps=S(1)/10) == intervals(f, eps=0.1) == \ [((-S(1)/258, 0), 1), ((S(85)/6, S(85)/6), 1)] assert intervals(f, eps=S(1)/100) == intervals(f, eps=0.01) == \ [((-S(1)/258, 0), 1), ((S(85)/6, S(85)/6), 1)] assert intervals(f, eps=S(1)/1000) == intervals(f, eps=0.001) == \ [((-S(1)/1002, 0), 1), ((S(85)/6, S(85)/6), 1)] assert intervals(f, eps=S(1)/10000) == intervals(f, eps=0.0001) == \ [((-S(1)/1028, -S(1)/1028), 1), ((S(85)/6, S(85)/6), 1)] f = Poly((x**2 - 2)*(x**2 - 3)**7*(x + 1)*(7*x + 3)**3) assert f.intervals() == \ [((-2, -S(3)/2), 7), ((-S(3)/2, -1), 1), ((-1, -1), 1), ((-1, 0), 3), ((1, S(3)/2), 1), ((S(3)/2, 2), 7)] assert intervals([x**5 - 200, x**5 - 201]) == \ [((S(75)/26, S(101)/35), {0: 1}), ((S(309)/107, S(26)/9), {1: 1})] assert intervals([x**5 - 200, x**5 - 201], fast=True) == \ [((S(75)/26, S(101)/35), {0: 1}), ((S(309)/107, S(26)/9), {1: 1})] assert intervals([x**2 - 200, x**2 - 201]) == \ [((-S(71)/5, -S(85)/6), {1: 1}), ((-S(85)/6, -14), {0: 1}), ((14, S(85)/6), {0: 1}), ((S(85)/6, S(71)/5), {1: 1})] assert intervals([x + 1, x + 2, x - 1, x + 1, 1, x - 1, x - 1, (x - 2)**2]) == \ [((-2, -2), {1: 1}), ((-1, -1), {0: 1, 3: 1}), ((1, 1), {2: 1, 5: 1, 6: 1}), ((2, 2), {7: 2})] f, g, h = x**2 - 2, x**4 - 4*x**2 + 4, x - 1 assert intervals(f, inf=S(7)/4, sqf=True) == [] assert intervals(f, inf=S(7)/5, sqf=True) == [(S(7)/5, S(3)/2)] assert intervals(f, sup=S(7)/4, sqf=True) == [(-2, -1), (1, S(3)/2)] assert intervals(f, sup=S(7)/5, sqf=True) == [(-2, -1)] assert intervals(g, inf=S(7)/4) == [] assert intervals(g, inf=S(7)/5) == [((S(7)/5, S(3)/2), 2)] assert intervals(g, sup=S(7)/4) == [((-2, -1), 2), ((1, S(3)/2), 2)] assert intervals(g, sup=S(7)/5) == [((-2, -1), 2)] assert intervals([g, h], inf=S(7)/4) == [] assert intervals([g, h], inf=S(7)/5) == [((S(7)/5, S(3)/2), {0: 2})] assert intervals([g, h], sup=S( 7)/4) == [((-2, -1), {0: 2}), ((1, 1), {1: 1}), ((1, S(3)/2), {0: 2})] assert intervals( [g, h], sup=S(7)/5) == [((-2, -1), {0: 2}), ((1, 1), {1: 1})] assert intervals([x + 2, x**2 - 2]) == \ [((-2, -2), {0: 1}), ((-2, -1), {1: 1}), ((1, 2), {1: 1})] assert intervals([x + 2, x**2 - 2], strict=True) == \ [((-2, -2), {0: 1}), ((-S(3)/2, -1), {1: 1}), ((1, 2), {1: 1})] f = 7*z**4 - 19*z**3 + 20*z**2 + 17*z + 20 assert intervals(f) == [] real_part, complex_part = intervals(f, all=True, sqf=True) assert real_part == [] assert all(re(a) < re(r) < re(b) and im( a) < im(r) < im(b) for (a, b), r in zip(complex_part, nroots(f))) assert complex_part == [(-S(40)/7 - 40*I/7, 0), (-S(40)/7, 40*I/7), (-40*I/7, S(40)/7), (0, S(40)/7 + 40*I/7)] real_part, complex_part = intervals(f, all=True, sqf=True, eps=S(1)/10) assert real_part == [] assert all(re(a) < re(r) < re(b) and im( a) < im(r) < im(b) for (a, b), r in zip(complex_part, nroots(f))) raises(ValueError, lambda: intervals(x**2 - 2, eps=10**-100000)) raises(ValueError, lambda: Poly(x**2 - 2).intervals(eps=10**-100000)) raises( ValueError, lambda: intervals([x**2 - 2, x**2 - 3], eps=10**-100000)) def test_refine_root(): f = Poly(x**2 - 2) assert f.refine_root(1, 2, steps=0) == (1, 2) assert f.refine_root(-2, -1, steps=0) == (-2, -1) assert f.refine_root(1, 2, steps=None) == (1, S(3)/2) assert f.refine_root(-2, -1, steps=None) == (-S(3)/2, -1) assert f.refine_root(1, 2, steps=1) == (1, S(3)/2) assert f.refine_root(-2, -1, steps=1) == (-S(3)/2, -1) assert f.refine_root(1, 2, steps=1, fast=True) == (1, S(3)/2) assert f.refine_root(-2, -1, steps=1, fast=True) == (-S(3)/2, -1) assert f.refine_root(1, 2, eps=S(1)/100) == (S(24)/17, S(17)/12) assert f.refine_root(1, 2, eps=1e-2) == (S(24)/17, S(17)/12) raises(PolynomialError, lambda: (f**2).refine_root(1, 2, check_sqf=True)) raises(RefinementFailed, lambda: (f**2).refine_root(1, 2)) raises(RefinementFailed, lambda: (f**2).refine_root(2, 3)) f = x**2 - 2 assert refine_root(f, 1, 2, steps=1) == (1, S(3)/2) assert refine_root(f, -2, -1, steps=1) == (-S(3)/2, -1) assert refine_root(f, 1, 2, steps=1, fast=True) == (1, S(3)/2) assert refine_root(f, -2, -1, steps=1, fast=True) == (-S(3)/2, -1) assert refine_root(f, 1, 2, eps=S(1)/100) == (S(24)/17, S(17)/12) assert refine_root(f, 1, 2, eps=1e-2) == (S(24)/17, S(17)/12) raises(PolynomialError, lambda: refine_root(1, 7, 8, eps=S(1)/100)) raises(ValueError, lambda: Poly(f).refine_root(1, 2, eps=10**-100000)) raises(ValueError, lambda: refine_root(f, 1, 2, eps=10**-100000)) def test_count_roots(): assert count_roots(x**2 - 2) == 2 assert count_roots(x**2 - 2, inf=-oo) == 2 assert count_roots(x**2 - 2, sup=+oo) == 2 assert count_roots(x**2 - 2, inf=-oo, sup=+oo) == 2 assert count_roots(x**2 - 2, inf=-2) == 2 assert count_roots(x**2 - 2, inf=-1) == 1 assert count_roots(x**2 - 2, sup=1) == 1 assert count_roots(x**2 - 2, sup=2) == 2 assert count_roots(x**2 - 2, inf=-1, sup=1) == 0 assert count_roots(x**2 - 2, inf=-2, sup=2) == 2 assert count_roots(x**2 - 2, inf=-1, sup=1) == 0 assert count_roots(x**2 - 2, inf=-2, sup=2) == 2 assert count_roots(x**2 + 2) == 0 assert count_roots(x**2 + 2, inf=-2*I) == 2 assert count_roots(x**2 + 2, sup=+2*I) == 2 assert count_roots(x**2 + 2, inf=-2*I, sup=+2*I) == 2 assert count_roots(x**2 + 2, inf=0) == 0 assert count_roots(x**2 + 2, sup=0) == 0 assert count_roots(x**2 + 2, inf=-I) == 1 assert count_roots(x**2 + 2, sup=+I) == 1 assert count_roots(x**2 + 2, inf=+I/2, sup=+I) == 0 assert count_roots(x**2 + 2, inf=-I, sup=-I/2) == 0 raises(PolynomialError, lambda: count_roots(1)) def test_Poly_root(): f = Poly(2*x**3 - 7*x**2 + 4*x + 4) assert f.root(0) == -S(1)/2 assert f.root(1) == 2 assert f.root(2) == 2 raises(IndexError, lambda: f.root(3)) assert Poly(x**5 + x + 1).root(0) == rootof(x**3 - x**2 + 1, 0) def test_real_roots(): assert real_roots(x) == [0] assert real_roots(x, multiple=False) == [(0, 1)] assert real_roots(x**3) == [0, 0, 0] assert real_roots(x**3, multiple=False) == [(0, 3)] assert real_roots(x*(x**3 + x + 3)) == [rootof(x**3 + x + 3, 0), 0] assert real_roots(x*(x**3 + x + 3), multiple=False) == [(rootof( x**3 + x + 3, 0), 1), (0, 1)] assert real_roots( x**3*(x**3 + x + 3)) == [rootof(x**3 + x + 3, 0), 0, 0, 0] assert real_roots(x**3*(x**3 + x + 3), multiple=False) == [(rootof( x**3 + x + 3, 0), 1), (0, 3)] f = 2*x**3 - 7*x**2 + 4*x + 4 g = x**3 + x + 1 assert Poly(f).real_roots() == [-S(1)/2, 2, 2] assert Poly(g).real_roots() == [rootof(g, 0)] def test_all_roots(): f = 2*x**3 - 7*x**2 + 4*x + 4 g = x**3 + x + 1 assert Poly(f).all_roots() == [-S(1)/2, 2, 2] assert Poly(g).all_roots() == [rootof(g, 0), rootof(g, 1), rootof(g, 2)] def test_nroots(): assert Poly(0, x).nroots() == [] assert Poly(1, x).nroots() == [] assert Poly(x**2 - 1, x).nroots() == [-1.0, 1.0] assert Poly(x**2 + 1, x).nroots() == [-1.0*I, 1.0*I] roots = Poly(x**2 - 1, x).nroots() assert roots == [-1.0, 1.0] roots = Poly(x**2 + 1, x).nroots() assert roots == [-1.0*I, 1.0*I] roots = Poly(x**2/3 - S(1)/3, x).nroots() assert roots == [-1.0, 1.0] roots = Poly(x**2/3 + S(1)/3, x).nroots() assert roots == [-1.0*I, 1.0*I] assert Poly(x**2 + 2*I, x).nroots() == [-1.0 + 1.0*I, 1.0 - 1.0*I] assert Poly( x**2 + 2*I, x, extension=I).nroots() == [-1.0 + 1.0*I, 1.0 - 1.0*I] assert Poly(0.2*x + 0.1).nroots() == [-0.5] roots = nroots(x**5 + x + 1, n=5) eps = Float("1e-5") assert re(roots[0]).epsilon_eq(-0.75487, eps) is S.true assert im(roots[0]) == 0.0 assert re(roots[1]) == -0.5 assert im(roots[1]).epsilon_eq(-0.86602, eps) is S.true assert re(roots[2]) == -0.5 assert im(roots[2]).epsilon_eq(+0.86602, eps) is S.true assert re(roots[3]).epsilon_eq(+0.87743, eps) is S.true assert im(roots[3]).epsilon_eq(-0.74486, eps) is S.true assert re(roots[4]).epsilon_eq(+0.87743, eps) is S.true assert im(roots[4]).epsilon_eq(+0.74486, eps) is S.true eps = Float("1e-6") assert re(roots[0]).epsilon_eq(-0.75487, eps) is S.false assert im(roots[0]) == 0.0 assert re(roots[1]) == -0.5 assert im(roots[1]).epsilon_eq(-0.86602, eps) is S.false assert re(roots[2]) == -0.5 assert im(roots[2]).epsilon_eq(+0.86602, eps) is S.false assert re(roots[3]).epsilon_eq(+0.87743, eps) is S.false assert im(roots[3]).epsilon_eq(-0.74486, eps) is S.false assert re(roots[4]).epsilon_eq(+0.87743, eps) is S.false assert im(roots[4]).epsilon_eq(+0.74486, eps) is S.false raises(DomainError, lambda: Poly(x + y, x).nroots()) raises(MultivariatePolynomialError, lambda: Poly(x + y).nroots()) assert nroots(x**2 - 1) == [-1.0, 1.0] roots = nroots(x**2 - 1) assert roots == [-1.0, 1.0] assert nroots(x + I) == [-1.0*I] assert nroots(x + 2*I) == [-2.0*I] raises(PolynomialError, lambda: nroots(0)) # issue 8296 f = Poly(x**4 - 1) assert f.nroots(2) == [w.n(2) for w in f.all_roots()] def test_ground_roots(): f = x**6 - 4*x**4 + 4*x**3 - x**2 assert Poly(f).ground_roots() == {S(1): 2, S(0): 2} assert ground_roots(f) == {S(1): 2, S(0): 2} def test_nth_power_roots_poly(): f = x**4 - x**2 + 1 f_2 = (x**2 - x + 1)**2 f_3 = (x**2 + 1)**2 f_4 = (x**2 + x + 1)**2 f_12 = (x - 1)**4 assert nth_power_roots_poly(f, 1) == f raises(ValueError, lambda: nth_power_roots_poly(f, 0)) raises(ValueError, lambda: nth_power_roots_poly(f, x)) assert factor(nth_power_roots_poly(f, 2)) == f_2 assert factor(nth_power_roots_poly(f, 3)) == f_3 assert factor(nth_power_roots_poly(f, 4)) == f_4 assert factor(nth_power_roots_poly(f, 12)) == f_12 raises(MultivariatePolynomialError, lambda: nth_power_roots_poly( x + y, 2, x, y)) def test_torational_factor_list(): p = expand(((x**2-1)*(x-2)).subs({x:x*(1 + sqrt(2))})) assert _torational_factor_list(p, x) == (-2, [ (-x*(1 + sqrt(2))/2 + 1, 1), (-x*(1 + sqrt(2)) - 1, 1), (-x*(1 + sqrt(2)) + 1, 1)]) p = expand(((x**2-1)*(x-2)).subs({x:x*(1 + 2**Rational(1, 4))})) assert _torational_factor_list(p, x) is None def test_cancel(): assert cancel(0) == 0 assert cancel(7) == 7 assert cancel(x) == x assert cancel(oo) == oo assert cancel((2, 3)) == (1, 2, 3) assert cancel((1, 0), x) == (1, 1, 0) assert cancel((0, 1), x) == (1, 0, 1) f, g, p, q = 4*x**2 - 4, 2*x - 2, 2*x + 2, 1 F, G, P, Q = [ Poly(u, x) for u in (f, g, p, q) ] assert F.cancel(G) == (1, P, Q) assert cancel((f, g)) == (1, p, q) assert cancel((f, g), x) == (1, p, q) assert cancel((f, g), (x,)) == (1, p, q) assert cancel((F, G)) == (1, P, Q) assert cancel((f, g), polys=True) == (1, P, Q) assert cancel((F, G), polys=False) == (1, p, q) f = (x**2 - 2)/(x + sqrt(2)) assert cancel(f) == f assert cancel(f, greedy=False) == x - sqrt(2) f = (x**2 - 2)/(x - sqrt(2)) assert cancel(f) == f assert cancel(f, greedy=False) == x + sqrt(2) assert cancel((x**2/4 - 1, x/2 - 1)) == (S(1)/2, x + 2, 1) assert cancel((x**2 - y)/(x - y)) == 1/(x - y)*(x**2 - y) assert cancel((x**2 - y**2)/(x - y), x) == x + y assert cancel((x**2 - y**2)/(x - y), y) == x + y assert cancel((x**2 - y**2)/(x - y)) == x + y assert cancel((x**3 - 1)/(x**2 - 1)) == (x**2 + x + 1)/(x + 1) assert cancel((x**3/2 - S(1)/2)/(x**2 - 1)) == (x**2 + x + 1)/(2*x + 2) assert cancel((exp(2*x) + 2*exp(x) + 1)/(exp(x) + 1)) == exp(x) + 1 f = Poly(x**2 - a**2, x) g = Poly(x - a, x) F = Poly(x + a, x) G = Poly(1, x) assert cancel((f, g)) == (1, F, G) f = x**3 + (sqrt(2) - 2)*x**2 - (2*sqrt(2) + 3)*x - 3*sqrt(2) g = x**2 - 2 assert cancel((f, g), extension=True) == (1, x**2 - 2*x - 3, x - sqrt(2)) f = Poly(-2*x + 3, x) g = Poly(-x**9 + x**8 + x**6 - x**5 + 2*x**2 - 3*x + 1, x) assert cancel((f, g)) == (1, -f, -g) f = Poly(y, y, domain='ZZ(x)') g = Poly(1, y, domain='ZZ[x]') assert f.cancel( g) == (1, Poly(y, y, domain='ZZ(x)'), Poly(1, y, domain='ZZ(x)')) assert f.cancel(g, include=True) == ( Poly(y, y, domain='ZZ(x)'), Poly(1, y, domain='ZZ(x)')) f = Poly(5*x*y + x, y, domain='ZZ(x)') g = Poly(2*x**2*y, y, domain='ZZ(x)') assert f.cancel(g, include=True) == ( Poly(5*y + 1, y, domain='ZZ(x)'), Poly(2*x*y, y, domain='ZZ(x)')) f = -(-2*x - 4*y + 0.005*(z - y)**2)/((z - y)*(-z + y + 2)) assert cancel(f).is_Mul == True P = tanh(x - 3.0) Q = tanh(x + 3.0) f = ((-2*P**2 + 2)*(-P**2 + 1)*Q**2/2 + (-2*P**2 + 2)*(-2*Q**2 + 2)*P*Q - (-2*P**2 + 2)*P**2*Q**2 + (-2*Q**2 + 2)*(-Q**2 + 1)*P**2/2 - (-2*Q**2 + 2)*P**2*Q**2)/(2*sqrt(P**2*Q**2 + 0.0001)) \ + (-(-2*P**2 + 2)*P*Q**2/2 - (-2*Q**2 + 2)*P**2*Q/2)*((-2*P**2 + 2)*P*Q**2/2 + (-2*Q**2 + 2)*P**2*Q/2)/(2*(P**2*Q**2 + 0.0001)**(S(3)/2)) assert cancel(f).is_Mul == True # issue 7022 A = Symbol('A', commutative=False) p1 = Piecewise((A*(x**2 - 1)/(x + 1), x > 1), ((x + 2)/(x**2 + 2*x), True)) p2 = Piecewise((A*(x - 1), x > 1), (1/x, True)) assert cancel(p1) == p2 assert cancel(2*p1) == 2*p2 assert cancel(1 + p1) == 1 + p2 assert cancel((x**2 - 1)/(x + 1)*p1) == (x - 1)*p2 assert cancel((x**2 - 1)/(x + 1) + p1) == (x - 1) + p2 p3 = Piecewise(((x**2 - 1)/(x + 1), x > 1), ((x + 2)/(x**2 + 2*x), True)) p4 = Piecewise(((x - 1), x > 1), (1/x, True)) assert cancel(p3) == p4 assert cancel(2*p3) == 2*p4 assert cancel(1 + p3) == 1 + p4 assert cancel((x**2 - 1)/(x + 1)*p3) == (x - 1)*p4 assert cancel((x**2 - 1)/(x + 1) + p3) == (x - 1) + p4 # issue 9363 M = MatrixSymbol('M', 5, 5) assert cancel(M[0,0] + 7) == M[0,0] + 7 expr = sin(M[1, 4] + M[2, 1] * 5 * M[4, 0]) - 5 * M[1, 2] / z assert cancel(expr) == (z*sin(M[1, 4] + M[2, 1] * 5 * M[4, 0]) - 5 * M[1, 2]) / z def test_reduced(): f = 2*x**4 + y**2 - x**2 + y**3 G = [x**3 - x, y**3 - y] Q = [2*x, 1] r = x**2 + y**2 + y assert reduced(f, G) == (Q, r) assert reduced(f, G, x, y) == (Q, r) H = groebner(G) assert H.reduce(f) == (Q, r) Q = [Poly(2*x, x, y), Poly(1, x, y)] r = Poly(x**2 + y**2 + y, x, y) assert _strict_eq(reduced(f, G, polys=True), (Q, r)) assert _strict_eq(reduced(f, G, x, y, polys=True), (Q, r)) H = groebner(G, polys=True) assert _strict_eq(H.reduce(f), (Q, r)) f = 2*x**3 + y**3 + 3*y G = groebner([x**2 + y**2 - 1, x*y - 2]) Q = [x**2 - x*y**3/2 + x*y/2 + y**6/4 - y**4/2 + y**2/4, -y**5/4 + y**3/2 + 3*y/4] r = 0 assert reduced(f, G) == (Q, r) assert G.reduce(f) == (Q, r) assert reduced(f, G, auto=False)[1] != 0 assert G.reduce(f, auto=False)[1] != 0 assert G.contains(f) is True assert G.contains(f + 1) is False assert reduced(1, [1], x) == ([1], 0) raises(ComputationFailed, lambda: reduced(1, [1])) def test_groebner(): assert groebner([], x, y, z) == [] assert groebner([x**2 + 1, y**4*x + x**3], x, y, order='lex') == [1 + x**2, -1 + y**4] assert groebner([x**2 + 1, y**4*x + x**3, x*y*z**3], x, y, z, order='grevlex') == [-1 + y**4, z**3, 1 + x**2] assert groebner([x**2 + 1, y**4*x + x**3], x, y, order='lex', polys=True) == \ [Poly(1 + x**2, x, y), Poly(-1 + y**4, x, y)] assert groebner([x**2 + 1, y**4*x + x**3, x*y*z**3], x, y, z, order='grevlex', polys=True) == \ [Poly(-1 + y**4, x, y, z), Poly(z**3, x, y, z), Poly(1 + x**2, x, y, z)] assert groebner([x**3 - 1, x**2 - 1]) == [x - 1] assert groebner([Eq(x**3, 1), Eq(x**2, 1)]) == [x - 1] F = [3*x**2 + y*z - 5*x - 1, 2*x + 3*x*y + y**2, x - 3*y + x*z - 2*z**2] f = z**9 - x**2*y**3 - 3*x*y**2*z + 11*y*z**2 + x**2*z**2 - 5 G = groebner(F, x, y, z, modulus=7, symmetric=False) assert G == [1 + x + y + 3*z + 2*z**2 + 2*z**3 + 6*z**4 + z**5, 1 + 3*y + y**2 + 6*z**2 + 3*z**3 + 3*z**4 + 3*z**5 + 4*z**6, 1 + 4*y + 4*z + y*z + 4*z**3 + z**4 + z**6, 6 + 6*z + z**2 + 4*z**3 + 3*z**4 + 6*z**5 + 3*z**6 + z**7] Q, r = reduced(f, G, x, y, z, modulus=7, symmetric=False, polys=True) assert sum([ q*g for q, g in zip(Q, G.polys)], r) == Poly(f, modulus=7) F = [x*y - 2*y, 2*y**2 - x**2] assert groebner(F, x, y, order='grevlex') == \ [y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y] assert groebner(F, y, x, order='grevlex') == \ [x**3 - 2*x**2, -x**2 + 2*y**2, x*y - 2*y] assert groebner(F, order='grevlex', field=True) == \ [y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y] assert groebner([1], x) == [1] assert groebner([x**2 + 2.0*y], x, y) == [1.0*x**2 + 2.0*y] raises(ComputationFailed, lambda: groebner([1])) assert groebner([x**2 - 1, x**3 + 1], method='buchberger') == [x + 1] assert groebner([x**2 - 1, x**3 + 1], method='f5b') == [x + 1] raises(ValueError, lambda: groebner([x, y], method='unknown')) def test_fglm(): F = [a + b + c + d, a*b + a*d + b*c + b*d, a*b*c + a*b*d + a*c*d + b*c*d, a*b*c*d - 1] G = groebner(F, a, b, c, d, order=grlex) B = [ 4*a + 3*d**9 - 4*d**5 - 3*d, 4*b + 4*c - 3*d**9 + 4*d**5 + 7*d, 4*c**2 + 3*d**10 - 4*d**6 - 3*d**2, 4*c*d**4 + 4*c - d**9 + 4*d**5 + 5*d, d**12 - d**8 - d**4 + 1, ] assert groebner(F, a, b, c, d, order=lex) == B assert G.fglm(lex) == B F = [9*x**8 + 36*x**7 - 32*x**6 - 252*x**5 - 78*x**4 + 468*x**3 + 288*x**2 - 108*x + 9, -72*t*x**7 - 252*t*x**6 + 192*t*x**5 + 1260*t*x**4 + 312*t*x**3 - 404*t*x**2 - 576*t*x + \ 108*t - 72*x**7 - 256*x**6 + 192*x**5 + 1280*x**4 + 312*x**3 - 576*x + 96] G = groebner(F, t, x, order=grlex) B = [ 203577793572507451707*t + 627982239411707112*x**7 - 666924143779443762*x**6 - \ 10874593056632447619*x**5 + 5119998792707079562*x**4 + 72917161949456066376*x**3 + \ 20362663855832380362*x**2 - 142079311455258371571*x + 183756699868981873194, 9*x**8 + 36*x**7 - 32*x**6 - 252*x**5 - 78*x**4 + 468*x**3 + 288*x**2 - 108*x + 9, ] assert groebner(F, t, x, order=lex) == B assert G.fglm(lex) == B F = [x**2 - x - 3*y + 1, -2*x + y**2 + y - 1] G = groebner(F, x, y, order=lex) B = [ x**2 - x - 3*y + 1, y**2 - 2*x + y - 1, ] assert groebner(F, x, y, order=grlex) == B assert G.fglm(grlex) == B def test_is_zero_dimensional(): assert is_zero_dimensional([x, y], x, y) is True assert is_zero_dimensional([x**3 + y**2], x, y) is False assert is_zero_dimensional([x, y, z], x, y, z) is True assert is_zero_dimensional([x, y, z], x, y, z, t) is False F = [x*y - z, y*z - x, x*y - y] assert is_zero_dimensional(F, x, y, z) is True F = [x**2 - 2*x*z + 5, x*y**2 + y*z**3, 3*y**2 - 8*z**2] assert is_zero_dimensional(F, x, y, z) is True def test_GroebnerBasis(): F = [x*y - 2*y, 2*y**2 - x**2] G = groebner(F, x, y, order='grevlex') H = [y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y] P = [ Poly(h, x, y) for h in H ] assert isinstance(G, GroebnerBasis) is True assert len(G) == 3 assert G[0] == H[0] and not G[0].is_Poly assert G[1] == H[1] and not G[1].is_Poly assert G[2] == H[2] and not G[2].is_Poly assert G[1:] == H[1:] and not any(g.is_Poly for g in G[1:]) assert G[:2] == H[:2] and not any(g.is_Poly for g in G[1:]) assert G.exprs == H assert G.polys == P assert G.gens == (x, y) assert G.domain == ZZ assert G.order == grevlex assert G == H assert G == tuple(H) assert G == P assert G == tuple(P) assert G != [] G = groebner(F, x, y, order='grevlex', polys=True) assert G[0] == P[0] and G[0].is_Poly assert G[1] == P[1] and G[1].is_Poly assert G[2] == P[2] and G[2].is_Poly assert G[1:] == P[1:] and all(g.is_Poly for g in G[1:]) assert G[:2] == P[:2] and all(g.is_Poly for g in G[1:]) def test_poly(): assert poly(x) == Poly(x, x) assert poly(y) == Poly(y, y) assert poly(x + y) == Poly(x + y, x, y) assert poly(x + sin(x)) == Poly(x + sin(x), x, sin(x)) assert poly(x + y, wrt=y) == Poly(x + y, y, x) assert poly(x + sin(x), wrt=sin(x)) == Poly(x + sin(x), sin(x), x) assert poly(x*y + 2*x*z**2 + 17) == Poly(x*y + 2*x*z**2 + 17, x, y, z) assert poly(2*(y + z)**2 - 1) == Poly(2*y**2 + 4*y*z + 2*z**2 - 1, y, z) assert poly( x*(y + z)**2 - 1) == Poly(x*y**2 + 2*x*y*z + x*z**2 - 1, x, y, z) assert poly(2*x*( y + z)**2 - 1) == Poly(2*x*y**2 + 4*x*y*z + 2*x*z**2 - 1, x, y, z) assert poly(2*( y + z)**2 - x - 1) == Poly(2*y**2 + 4*y*z + 2*z**2 - x - 1, x, y, z) assert poly(x*( y + z)**2 - x - 1) == Poly(x*y**2 + 2*x*y*z + x*z**2 - x - 1, x, y, z) assert poly(2*x*(y + z)**2 - x - 1) == Poly(2*x*y**2 + 4*x*y*z + 2* x*z**2 - x - 1, x, y, z) assert poly(x*y + (x + y)**2 + (x + z)**2) == \ Poly(2*x*z + 3*x*y + y**2 + z**2 + 2*x**2, x, y, z) assert poly(x*y*(x + y)*(x + z)**2) == \ Poly(x**3*y**2 + x*y**2*z**2 + y*x**2*z**2 + 2*z*x**2* y**2 + 2*y*z*x**3 + y*x**4, x, y, z) assert poly(Poly(x + y + z, y, x, z)) == Poly(x + y + z, y, x, z) assert poly((x + y)**2, x) == Poly(x**2 + 2*x*y + y**2, x, domain=ZZ[y]) assert poly((x + y)**2, y) == Poly(x**2 + 2*x*y + y**2, y, domain=ZZ[x]) assert poly(1, x) == Poly(1, x) raises(GeneratorsNeeded, lambda: poly(1)) # issue 6184 assert poly(x + y, x, y) == Poly(x + y, x, y) assert poly(x + y, y, x) == Poly(x + y, y, x) def test_keep_coeff(): u = Mul(2, x + 1, evaluate=False) assert _keep_coeff(S(1), x) == x assert _keep_coeff(S(-1), x) == -x assert _keep_coeff(S(1.0), x) == 1.0*x assert _keep_coeff(S(-1.0), x) == -1.0*x assert _keep_coeff(S(1), 2*x) == 2*x assert _keep_coeff(S(2), x/2) == x assert _keep_coeff(S(2), sin(x)) == 2*sin(x) assert _keep_coeff(S(2), x + 1) == u assert _keep_coeff(x, 1/x) == 1 assert _keep_coeff(x + 1, S(2)) == u @XFAIL def test_poly_matching_consistency(): # Test for this issue: # https://github.com/sympy/sympy/issues/5514 assert I * Poly(x, x) == Poly(I*x, x) assert Poly(x, x) * I == Poly(I*x, x) @XFAIL def test_issue_5786(): assert expand(factor(expand( (x - I*y)*(z - I*t)), extension=[I])) == -I*t*x - t*y + x*z - I*y*z def test_noncommutative(): class foo(Expr): is_commutative=False e = x/(x + x*y) c = 1/( 1 + y) assert cancel(foo(e)) == foo(c) assert cancel(e + foo(e)) == c + foo(c) assert cancel(e*foo(c)) == c*foo(c) def test_to_rational_coeffs(): assert to_rational_coeffs( Poly(x**3 + y*x**2 + sqrt(y), x, domain='EX')) == None def test_factor_terms(): # issue 7067 assert factor_list(x*(x + y)) == (1, [(x, 1), (x + y, 1)]) assert sqf_list(x*(x + y)) == (1, [(x, 1), (x + y, 1)])
ChristinaZografou/sympy
sympy/polys/tests/test_polytools.py
Python
bsd-3-clause
106,569
0.001314
# (C) 2015, 2016 Elke Schaper """ :synopsis: The Plate Class. .. moduleauthor:: Elke Schaper <elke.schaper@isb-sib.ch> """ import itertools import logging import pickle import random import re import string import GPy import numpy as np import pylab import scipy.stats import hts.data_tasks.gaussian_processes from hts.data_tasks import prediction from hts.plate_data import plate_data, data_issue, meta_data, plate_layout, readout KNOWN_DATA_TYPES = ["plate_layout", "readout", "data_issue", "config_data"] LETTERS = list(string.ascii_uppercase) + ["".join(i) for i in itertools.product(string.ascii_uppercase, string.ascii_uppercase)] MAX_WIDTH = 48 MAX_HEIGHT = 32 TRANSLATE_HUMANREADABLE_COORDINATE = {(LETTERS[cc[0]], str(cc[1] + 1)): cc for cc in itertools.product(range(MAX_HEIGHT), range(MAX_WIDTH))} TRANSLATE_COORDINATE_HUMANREADABLE = {cc: (LETTERS[cc[0]], str(cc[0] + 1), str(cc[1] + 1)) for cc in itertools.product(range(MAX_HEIGHT), range(MAX_WIDTH))} LOG = logging.getLogger(__name__) ## TODO: Instead of creating a matrix for both coordinates, simply create a list each to safe memory. def translate_coordinate_humanreadable(coordinate, pattern=None): coordinate_human = TRANSLATE_COORDINATE_HUMANREADABLE[coordinate] if pattern: return pattern.format(coordinate_human[0], int(coordinate_human[2])) else: return coordinate_human def translate_humanreadable_coordinate(humanreadable): pattern = re.compile('([a-zA-Z]+)0*(\d+)') match = re.match(pattern, humanreadable) if not match: LOG.error("pattern: {} did not match {}".format(pattern, humanreadable)) humanreadable = (match.group(1), match.group(2)) return TRANSLATE_HUMANREADABLE_COORDINATE[humanreadable] class Plate: """ ``Plate`` describes all information connected to the readout_dict of a high throughput screen. This could be either several readouts of a plate, or the same plate across several plates. Attributes: name (str): Name of the plate width (int): Width of the plate height (int): Height of the plate KNOWN_DATA_TYPES[i] (subclass of plate_data.PlateData): The data associated to this Plate, e.g. a plate layout, or readouts. """ def __str__(self): """ Create string for Plate instance. """ if hasattr(self, "name"): name = self.name else: name = "<not named>" try: readout_dict = ("<Plate instance>\nname: {}\nread_outs: {}" "\nNumber of read_outs: {}\nwidth: {}\nheight: {}".format(name, str(self.readout.data.keys()), len(self.readout.data), self.width, self.height)) except: readout_dict = "<Plate instance>" LOG.warning("Could not create string of Plate instance.") return readout_dict def __init__(self, data, name, **kwargs): LOG.debug(data) self.name = name for data_type in KNOWN_DATA_TYPES: if data_type in data: if not isinstance(data[data_type], plate_data.PlateData): raise Exception( "type of {} data is {}, not plate_data.PlateData.".format(data_type, type(data[data_type]))) setattr(self, data_type, data[data_type]) else: setattr(self, data_type, None) if "height" in kwargs: self.height = kwargs.pop("height") if "width" in kwargs: self.width = kwargs.pop("width") # You are using this construct in many an __init__ . Consider turning into decorator. for key, value in kwargs.items(): if not hasattr(self, key): setattr(self, key, value) """ FORMERLY: # Make sure all readouts are equal in height and width. plate_heights = [i.height for i in self.readout.data.values()] plate_widths = [i.width for i in self.readout.data.values()] if len(set(plate_heights)) != 1 or len(set(plate_widths)) != 1: raise Exception("Plate widths and lengths in the parsed output " "files are not all equal: plate_heights: {}, plate_widths: {} " "".format(plate_heights, plate_widths)) """ def create(format, name=None, **kwargs): """ Create ``Plate`` instance. Create ``Plate`` instance. Args: path (str): Path to input file or directory format (str): Format of the input file, at current not specified """ if format == "config": data = {} if "meta_data" in kwargs: data["meta_data"] = meta_data.MetaData.create(**kwargs["meta_data"]) if "plate_layout" in kwargs: data["plate_layout"] = plate_layout.PlateLayout.create(**kwargs["plate_layout"]) if "data_issue" in kwargs: data["data_issue"] = data_issue.DataIssue.create(**kwargs["data_issue"]) if "readout" in kwargs: data["readout"] = readout.Readout.create(**kwargs["readout"]) height = len(next(iter(next(iter(data.values())).data.values()))) width = len(next(iter(next(iter(data.values())).data.values()))[0]) if not name: name = next(iter(data.values())).name return Plate(data=data, height=height, width=width, name=name) elif format == 'pickle': with open(kwargs["path"], 'rb') as fh: return pickle.load(fh) else: raise Exception("Format: {} is not implemented in " "Plate.create()".format(format)) def add_data(self, data_type, data, force=False, tag=None): """ Add `data` of `data_type` to `self.config_data` Add `data` of `data_type` to `self.config_data` """ if data_type == "meta_data" and not isinstance(data, meta_data.MetaData): raise Exception('data is not of type config_data.MetaData, but {}'.format(type(data))) elif data_type == "plate_layout" and not isinstance(data, plate_layout.PlateLayout): raise Exception('data is not of type plate_layout.PlateLayout, but {}'.format(type(data))) elif data_type == "data_issue" and not isinstance(data, data_issue.DataIssue): raise Exception('data is not of type data_issue.DataIssue, but {}'.format(type(data))) elif data_type == "readout" and not isinstance(data, readout.Readout): raise Exception('data is not of type readout.Readout, but {}'.format(type(data))) if force or not hasattr(self, data_type) or not isinstance(getattr(self, data_type), plate_data.PlateData): setattr(self, data_type, data) else: getattr(self, data_type).add_data(data=data, tag=tag) def write(self, format, path=None, return_string=None, *args): """ Serialize and write ``Plate`` instances. Serialize ``Plate`` instance using the stated ``format``. Args: format (str): The output format: Currently only "pickle". path (str): Path to output file .. todo:: Write checks for ``format`` and ``path``. """ if format == 'pickle': with open(path, 'wb') as fh: pickle.dump(self, fh) else: raise Exception('Format is unknown: {}'.format(format)) if path: with open(path, 'w') as fh: fh.write(output) if return_string: return output def filter(self, value_data_type, value_data_tag, value_type=None, condition_data_type=None, condition_data_tag=None, condition=None, return_list=True): """ Get list of values for defined `wells` of the data tagged with `data_tag`. If `value_type` is set, check if all values conform with `value_type`. Args: condition_data_type (str): Reference to PlateData instance on which wells are filtered for the condition. condition_data_tag (str): Data tag for condition_data_type condition (method): The condition expressed as a method. value_data_type (str): Reference to PlateData instance from which (for filtered wells) the values are retrieved. value_data_tag (str): Data tag for value_data_type. value_type (str): The type of the return values. return_list (bool): Returns a flattened list of all values Returns: (list of x), where x are of type `value_type`, if `value_type` is set. ..todo: rename method from filter to get_data """ value_plate_data = getattr(self, value_data_type) if condition_data_type: condition_plate_data = getattr(self, condition_data_type) wells = condition_plate_data.get_wells(data_tag=condition_data_tag, condition=condition) if return_list: return value_plate_data.get_values(wells=wells, data_tag=value_data_tag, value_type=value_type) else: data = np.empty([self.height, self.width]) data[:] = np.NAN for well in wells: value = value_plate_data.get_values(wells=[well], data_tag=value_data_tag, value_type=value_type) data[well[0], well[1]] = value[0] return data # ToDo: Return matrix of values, with None for wells that do not fulfill the condition, and # the value otherwise. else: data = value_plate_data.data[value_data_tag] if return_list: return [item for sublist in data for item in sublist] else: return data return values #### Preprocessing functions def preprocess(self, methodname, **kwargs): method = getattr(self, methodname) method(**kwargs) def calculate_linearly_normalized_signal(self, unnormalized_key, normalized_0, normalized_1, normalized_key): """ Linearly normalize the data .. math:: normalized__i = \frac{ x_{unnormalized_i} - \hat{x_{low}} } { \hat{x_{high}} - \hat{x_{low}} } normalized_0 are all wells (according to the plate layout) with mean(wells)==0 after normalization. normalized_1 are all wells (according to the plate layout) with mean(wells)==1 for normalization. Args: unnormalized_key (str): The key for self.readout.data where the unnormalized ``Readout`` instance is stored. normalized_key (str): The key for self.readout.data where the resulting normalized ``Readout`` instance will be stored. x_low (list of str): The list of names of all low fixtures in the plate layout (self.plate_data). x_high (list of str): The list of names of the high fixture in the plate layout (self.plate_data). """ if normalized_key in self.readout.data: LOG.warning("The normalized_key {} is already in self.readout.data. " "Skipping recalculation".format(normalized_key)) return data_normalized_0 = self.filter(condition_data_type="plate_layout", condition_data_tag="layout", condition=lambda x: x in normalized_0, value_data_type="readout", value_data_tag=unnormalized_key) data_normalized_1 = self.filter(condition_data_type="plate_layout", condition_data_tag="layout", condition=lambda x: x in normalized_1, value_data_type="readout", value_data_tag=unnormalized_key) normalized_data = (self.readout.get_data(unnormalized_key) - np.mean(data_normalized_0)) / ( np.mean(data_normalized_1) - np.mean(data_normalized_0)) self.readout.add_data(data={normalized_key: normalized_data}, tag=normalized_key) def calculate_normalization_by_division(self, unnormalized_key, normalizer_key, normalized_key): """ The normalize data set is equal to a division of all data by the mean of a subset of the data. Args: unnormalized_key (str): The key for self.readout.data where the unnormalized ``Readout`` instance is stored. normalized_key (str): The key for self.readout.data where the resulting normalized ``Readout`` instance will be stored. """ if normalized_key in self.readout.data: LOG.warning("The normalized_key {} is already in self.readout.data. " "Skipping recalculation".format(normalized_key)) return relative_data = self.readout.get_data(unnormalized_key) / self.readout.get_data(normalizer_key) self.readout.add_data(data={normalized_key: relative_data}, tag=normalized_key) def subtract_readouts(self, data_tag_readout_minuend, data_tag_readout_subtrahend, data_tag_readout_difference, **kwargs): if data_tag_readout_difference in self.readout.data: LOG.warning("The data_tag_readout_difference {} is already in self.readout.data. " "Skipping recalculation".format(data_tag_readout_difference)) return difference = self.readout.get_data(data_tag_readout_minuend) - self.readout.get_data(data_tag_readout_subtrahend) self.readout.add_data(data={data_tag_readout_difference: difference}, tag=data_tag_readout_difference) def calculate_net_fret(self, donor_channel, acceptor_channel, fluorophore_donor="fluorophore_donor", fluorophore_acceptor="fluorophore_acceptor", buffer="buffer", net_fret_key="net_fret"): """ Calculate the net FRET signal for a donor acceptor FRET setup. Calculate the net FRET signal for a donor acceptor FRET setup. Typical donor -> aceptor pairs include * 414nm CFP -> 475nm -> YFP 525nm * EU -> 615nm -> APC 665nm The following wells are needed, for both channels * `donor` Donor_fluorophor blank * `acceptor` Acceptor_fluorophor blank * `buffer` Buffer blank The proportionality factor for donor compensation is then calculation as .. math:: p = \frac{\hat{donor_{acceptor_channel}} - \hat{buffer_{acceptor_channel}}}{\hat{donor_{donor_channel}} - \hat{buffer_{donor_channel}}} Further, the net FRET signal `f` for all wells `x` may be calculated as: .. math:: netfret = x_{acceptor_channel} - \hat{acceptor_{acceptor_channel}} - p \cdot (x_{donor_channel} - \hat{buffer_{donor_channel}}) Args: donor_channel (str): The key for self.readout.data where the donor_channel ``Readout`` instance is stored. acceptor_channel (str): The key for self.readout.data where the acceptor_channel ``Readout`` instance is stored. fluorophore_donor (str): The name of the donor fluorophore in self.plate_data. fluorophore_acceptor (str): The name of the acceptor fluorophore in self.plate_data. buffer (str): The name of the buffer in self.plate_data. net_fret_key (str): The key for self.readout.data where the resulting net fret ``Readout`` instance will be stored. """ if net_fret_key in self.readout.data: LOG.warning("The net_fret_key {} is already in self.readout.data. " "Skipping recalculation".format(net_fret_key)) return mean_donor_donor_channel = np.mean(self.filter(condition_data_type="plate_layout", condition_data_tag="layout", condition=lambda x: x == fluorophore_donor, value_data_type="readout", value_data_tag=donor_channel)) mean_acceptor_donor_channel = np.mean( self.filter(condition_data_type="plate_layout", condition_data_tag="layout", condition=lambda x: x == fluorophore_acceptor, value_data_type="readout", value_data_tag=donor_channel)) mean_buffer_donor_channel = np.mean(self.filter(condition_data_type="plate_layout", condition_data_tag="layout", condition=lambda x: x == buffer, value_data_type="readout", value_data_tag=donor_channel)) mean_donor_acceptor_channel = np.mean( self.filter(condition_data_type="plate_layout", condition_data_tag="layout", condition=lambda x: x == fluorophore_donor, value_data_type="readout", value_data_tag=acceptor_channel)) mean_acceptor_acceptor_channel = np.mean( self.filter(condition_data_type="plate_layout", condition_data_tag="layout", condition=lambda x: x == fluorophore_acceptor, value_data_type="readout", value_data_tag=acceptor_channel)) mean_buffer_acceptor_channel = np.mean( self.filter(condition_data_type="plate_layout", condition_data_tag="layout", condition=lambda x: x == buffer, value_data_type="readout", value_data_tag=acceptor_channel)) for i, value in enumerate([mean_donor_donor_channel, mean_acceptor_donor_channel, mean_buffer_donor_channel, mean_donor_acceptor_channel, mean_acceptor_acceptor_channel, mean_buffer_acceptor_channel]): if np.isnan(value): import pdb; pdb.set_trace() raise ValueError( "Calculation of variable {} resulted in {}. Check whether the plate layout is correctly assigned.".format( i, value)) p = (mean_donor_acceptor_channel - mean_buffer_acceptor_channel) / ( mean_donor_donor_channel - mean_buffer_donor_channel) # Calculate the net FRET signal for the entire plate # See TechNote #TNPJ100.04 PROzyme # http://prozyme.com/pages/tech-notes netfret = self.readout.get_data(acceptor_channel) - mean_acceptor_acceptor_channel - p * ( self.readout.get_data(donor_channel) - mean_buffer_donor_channel) # ToDo: Add calculations for other values, as described by Eq. 5 or Eq. 6 in the Technote. self.readout.add_data(data={net_fret_key: netfret}, tag=net_fret_key) def calculate_control_normalized_signal(self, data_tag_readout, negative_control_key, positive_control_key, data_tag_normalized_readout=None, local=True, **kwargs): """ Normalize the signal in `data_tag_readout`, normalized by `negative_control_key` and `positive_control_key`. Normalize the signal in `data_tag_readout`, normalized by `negative_control_key` and `positive_control_key`. The normalization is calculated as: .. math:: y' = \frac{y - mu_{nc}}{| mu_{nc} - mu_{pc}| } For local==True, $mu_{nc}$ and $mu_{pc}$ are predicted locally to the well (using Gaussian processes). For local==False, $mu_{nc}$ and $mu_{pc}$ are estimated by the average control values across the plate. Args: data_tag_readout (str): The key for self.readout.data where the readouts are stored. negative_control_key (str): The name of the negative control in the plate layout. positive_control_key (str): The name of the positive control in the plate layout. data_tag_normalized_readout (str): The key for self.readout.data where the normalized readouts will be stored. local (Bool): If True, use Gaussian processes to locally predict the control distributions. Else, use plate-wise control distributions. """ if data_tag_normalized_readout == None: data_tag_normalized_readout = "{}__control_normalized".format(data_tag_readout) all_readouts = self.readout.get_data(data_tag_readout) if local != True: # Normalize by "global" plate averages of negative and positive controls. nc_wells = self.plate_layout.get_wells(data_tag="layout", condition=lambda x: x == negative_control_key) nc_values = self.readout.get_values(wells=nc_wells, data_tag=data_tag_readout) data_nc_mean = np.mean(nc_values) data_nc_std = np.std(nc_values) pc_wells = self.plate_layout.get_wells(data_tag="layout", condition=lambda x: x == positive_control_key) pc_values = self.readout.get_values(wells=pc_wells, data_tag=data_tag_readout) data_pc_mean = np.mean(pc_values) data_pc_std = np.std(pc_values) LOG.debug("Normalize globally with mean negative control: {} " "and mean negative control: {}.".format(data_nc_mean, data_pc_mean)) else: # Normalize by "local" predictions of negative and positive control distributions (extracted with Gaussian # processes.) # Calculate predicted values for mean and std: negative control. m, data_nc_mean, data_nc_std = self.apply_gaussian_process(data_tag_readout=data_tag_readout, sample_tag_input=negative_control_key, **kwargs) # Calculate predicted values for mean and std: positive control. m, data_pc_mean, data_pc_std = self.apply_gaussian_process(data_tag_readout=data_tag_readout, sample_tag_input=positive_control_key, **kwargs) # Calculate the normalised data normalized_data = (all_readouts - data_nc_mean) / (data_pc_mean - data_nc_mean) self.readout.add_data(data={data_tag_normalized_readout: normalized_data, # These are scalars, not arrays x arrays. Does this data need saving this way? # data_tag_normalized_negative_control: data_nc_mean, # data_tag_normalized_positive_control: data_pc_mean, }, tag=data_tag_normalized_readout) def calculate_significance_compared_to_null_distribution(self, data_tag_readout, sample_tag_null_distribution, data_tag_standard_score, data_tag_p_value, is_higher_value_better, **kwargs): """ Calculate the standard score and p-value for all data (in `data_tag_readout`) compared to the null distribution defined by all data of `sample_tag_null_distribution` in `data_tag_readout`. Save as readouts with tags `data_tag_standard_score` and `data_tag_p_value`. Assume that the samples in `sample_tag_null_distribution` follows a Gaussian distribution. WARNING! For pvalue calculation, we assume that the control, which has lower mean values, is also supposed to show lower mean values. [Otherwise, we would have to introduce a boolean "pos_control_lower_than_neg_control."] Args: data_tag_readout (str): The key for self.readout.data where the readouts are stored. sample_tag_null_distribution (str): The sample key (defined in plate layout) defining what sample will make up the null distribution that we compare all other samples to. data_tag_standard_score (str): The key for self.readout.data where the standard scores will be stored. data_tag_p_value (str): The key for self.readout.data where the p-values will be stored. **kwargs: Returns: """ if data_tag_standard_score == None: data_tag_standard_score = "{}__all__vs__{}__standard_score".format(data_tag_readout, sample_tag_null_distribution) if data_tag_p_value == None: data_tag_p_value = "{}__all__vs__{}__pvalue".format(data_tag_readout, sample_tag_null_distribution) all_readouts = self.readout.get_data(data_tag_readout) if type(sample_tag_null_distribution) != list: sample_tag_null_distribution = [sample_tag_null_distribution] # Extract null distribution. null_distribution_wells = self.plate_layout.get_wells(data_tag="layout", condition=lambda x: x in sample_tag_null_distribution) null_distribution_values = self.readout.get_values(wells=null_distribution_wells, data_tag=data_tag_readout) null_mean = np.mean(null_distribution_values) null_std = np.std(null_distribution_values) LOG.debug("Null distribution of sample {} has mean {} and std: {}.".format(sample_tag_null_distribution, null_mean, null_std)) # Compute the z-score or standard score standard_score = (all_readouts - null_mean) / null_std # Calculate the p-Value of all data points compared to the null distribution. # Inspired by: # http://stackoverflow.com/questions/17559897/python-p-value-from-t-statistic p_value = scipy.stats.norm(null_mean, null_std).cdf(all_readouts) # Alternative pvalue calculation: ## p_value for one-sided test. For two-sided test, multiply by 2: # p_value = scipy.stats.norm.sf(abs(standard_score)) # ToDo: Check and understand if is_higher_value_better in [True, "true", "True", "TRUE"]: standard_score = - standard_score p_value = 1 - p_value self.readout.add_data(data={data_tag_standard_score: standard_score, data_tag_p_value: p_value, }, tag=data_tag_readout) def calculate_local_ssmd(self, data_tag_mean_pos, data_tag_mean_neg, data_tag_std_pos, data_tag_std_neg, data_tag_ssmd, **kwargs): """ Calculate local SSMD values. Args: data_tag_mean_pos: data_tag_mean_neg: data_tag_std_pos: data_tag_std_neg: data_tag_ssmd: Returns: """ mean_pos = self.readout.get_data(data_tag_mean_pos) mean_neg = self.readout.get_data(data_tag_mean_neg) std_pos = self.readout.get_data(data_tag_std_pos) std_neg = self.readout.get_data(data_tag_std_neg) ssmd = np.abs(mean_pos - mean_neg)/np.sqrt(std_pos**2 + std_neg**2) self.readout.add_data(data={data_tag_ssmd: ssmd}, tag=data_tag_ssmd) def classify_by_cutoff(self, data_tag_readout, data_tag_classified_readout, threshold, is_higher_value_better=True, is_twosided=False, **kwargs): """ Map a dataset of float values to either binary (`is_twosided==False`) or [-1,0,1] (`is_twosided==True`), depending on whether values fall below `threshold` Args: data_tag_readout: The key for self.readout.data where the readouts are stored. data_tag_classified_readout: The key for self.readout.data where the True/False classification values will be stored. threshold: Returns: """ all_readouts = self.readout.get_data(data_tag_readout) if is_twosided in [True, "true", "True", "TRUE"]: is_twosided = True if is_higher_value_better in [True, "true", "True", "TRUE"]: is_higher_value_better = True threshold = float(threshold) if is_twosided: classified = [[1 if datum > threshold else -1 if abs(datum) > threshold else 0 for datum in row] for row in all_readouts] elif is_higher_value_better: classified = [[True if datum > threshold else False for datum in row] for row in all_readouts] else: classified = [[True if datum < threshold else False for datum in row] for row in all_readouts] data = data_issue.DataIssue(data={data_tag_classified_readout: classified}, name=data_tag_classified_readout) self.add_data(data_type="data_issue", data=data) def randomize_values(self, data_tag_readout, data_tag_randomized_readout, randomized_samples="s", **kwargs): """ Randomize the signal in a readout per plate and for a specific sample. The result of this method has only visualization purposes. Args: data_tag_readout (str): The key for self.readout.data where the readouts are stored. data_tag_randomized_readout (str): The key for self.readout.data where the randomized data will be stored. **kwargs: """ if data_tag_randomized_readout == None: data_tag_randomized_readout = "{}__randomized".format(data_tag_readout) all_readouts = self.readout.get_data(data_tag_readout) # Extract wells randomizable_wells = self.plate_layout.get_wells(data_tag="layout", condition=lambda x: x == randomized_samples) randomizable_values = self.readout.get_values(wells=randomizable_wells, data_tag=data_tag_readout) random.shuffle(randomizable_values) randomized_readouts = all_readouts.copy() for well, value in zip(randomizable_wells, randomizable_values): randomized_readouts[well] = value self.readout.add_data(data={data_tag_randomized_readout: randomized_readouts,}, tag=data_tag_readout) #### Prediction functions def cross_validate_predictions(self, data_tag_readout, sample_tag, method_name, **kwargs): """ Cross validate sample value predictions for sample type `sample_tag` and readout `data_tag_readout`, using prediction method `method_name`. Args: data_tag_readout (str): The key for self.readout.data where the ``Readout`` instance is stored. sample_tag (str): The sample for which the gaussian process will be modeled according to the position in self.plate_layout.data. E.g. for positive controls "pos" method_name (str): The prediction method. E.g. gp for Gaussian processes. """ sampled_wells = self.plate_layout.get_wells(data_tag="layout", condition=lambda x: x == sample_tag) values = self.readout.get_values(wells=sampled_wells, data_tag=data_tag_readout) # value_type=float if method_name == "gp": prediction_method = prediction.predict_with_gaussian_process x, y, y_mean, y_std = self.flatten_data(wells=sampled_wells, values=values, normalize=False) return prediction.cross_validate_predictions(x, y, prediction_method, **kwargs) def evaluate_well_value_prediction(self, data_predictions, data_tag_readout, sample_key=None): """ Calculate mean squared prediction error. ToDo: Debug. Better: REWRITE! """ # y_predicted_mean, y_predicted_var = m.predict(X) # f_mean, f_var = m._raw_predict(X) # Difference to m.predict(X) # y_predicted_abs = y_predicted_mean * y_std + y_mean # y_error = y_norm - y_predicted_mean # y_error_abs = y - y_predicted_abs wells = self.plate_layout.get_wells(data_tag="layout", condition=lambda x: True) values = self.readout.get_values(wells=wells, data_tag=data_tag_readout) # value_type=float #### This needs to be debugged, as data_predictions now come in a different format. raise Exception("Needs debugging.") values = np.array(values).reshape((len(values), 1)) diff = data_predictions - values if sample_key: specific_wells = self.plate_layout.get_wells(data_tag="layout", condition=lambda x: x == sample_key) if len(specific_wells) == 0: raise Exception("sample_key: {} does not define any wells.".format(sample_key)) diff = np.array([diff[wells.index(well)] for well in specific_wells]) return np.linalg.norm(diff) #### Prediction functions - Gaussian processes def map_coordinates(self, coordinates_list): # map plate coordinates to "standard" coordinates. E.g. switch axes, turn x-Axis. return [(i[1], self.height - i[0] + 1) for i in coordinates_list] def flatten_data(self, wells, values): return self.flatten_wells(wells), self.flatten_values(values) def flatten_wells(self, wells): # Structure of X: Similar to http://gpy.readthedocs.org/en/master/tuto_GP_regression.html # map plate coordinates to "standard" coordinates. E.g. switch axes, turn x-Axis. sampled_wells = self.map_coordinates(wells) x = np.array(sampled_wells) return x def flatten_values(self, values): y = np.array(values) y = y.reshape((len(values), 1)) return y def un_flatten_data(self, y): y = [i for j in y for i in j] y = np.array([y[row * self.width:(row + 1) * self.width] for row in range(self.height)]) return y def get_data_for_gaussian_process(self, data_tag_readout, sample_tags): if type(sample_tags) != list: sample_tags = [sample_tags] sampled_wells = self.plate_layout.get_wells(data_tag="layout", condition=lambda x: x in sample_tags) values = self.readout.get_values(wells=sampled_wells, data_tag=data_tag_readout) # value_type=float x, y = self.flatten_data(wells=sampled_wells, values=values) return x, y
elkeschaper/hts
hts/plate/plate.py
Python
gpl-2.0
35,353
0.006082
keyBindings = { } from keyids import KEYIDS from Components.config import config from Components.RcModel import rc_model keyDescriptions = [{ KEYIDS["BTN_0"]: ("UP", "fp"), KEYIDS["BTN_1"]: ("DOWN", "fp"), KEYIDS["KEY_OK"]: ("OK", ""), KEYIDS["KEY_UP"]: ("UP",), KEYIDS["KEY_DOWN"]: ("DOWN",), KEYIDS["KEY_POWER"]: ("POWER",), KEYIDS["KEY_RED"]: ("RED",), KEYIDS["KEY_BLUE"]: ("BLUE",), KEYIDS["KEY_GREEN"]: ("GREEN",), KEYIDS["KEY_YELLOW"]: ("YELLOW",), KEYIDS["KEY_MENU"]: ("MENU",), KEYIDS["KEY_LEFT"]: ("LEFT",), KEYIDS["KEY_RIGHT"]: ("RIGHT",), KEYIDS["KEY_VIDEO"]: ("PVR",), KEYIDS["KEY_INFO"]: ("INFO",), KEYIDS["KEY_AUDIO"]: ("YELLOW",), KEYIDS["KEY_TV"]: ("TV",), KEYIDS["KEY_RADIO"]: ("RADIO",), KEYIDS["KEY_TEXT"]: ("TEXT",), KEYIDS["KEY_NEXT"]: ("ARROWRIGHT",), KEYIDS["KEY_PREVIOUS"]: ("ARROWLEFT",), KEYIDS["KEY_PREVIOUSSONG"]: ("REWIND",), KEYIDS["KEY_PLAYPAUSE"]: ("PLAYPAUSE",), KEYIDS["KEY_PLAY"]: ("PLAYPAUSE",), KEYIDS["KEY_NEXTSONG"]: ("FASTFORWARD",), KEYIDS["KEY_CHANNELUP"]: ("BOUQUET+",), KEYIDS["KEY_CHANNELDOWN"]: ("BOUQUET-",), KEYIDS["KEY_0"]: ("0",), KEYIDS["KEY_1"]: ("1",), KEYIDS["KEY_2"]: ("2",), KEYIDS["KEY_3"]: ("3",), KEYIDS["KEY_4"]: ("4",), KEYIDS["KEY_5"]: ("5",), KEYIDS["KEY_6"]: ("6",), KEYIDS["KEY_7"]: ("7",), KEYIDS["KEY_8"]: ("8",), KEYIDS["KEY_9"]: ("9",), KEYIDS["KEY_EXIT"]: ("EXIT",), KEYIDS["KEY_STOP"]: ("STOP",), KEYIDS["KEY_RECORD"]: ("RECORD",) }, { KEYIDS["BTN_0"]: ("UP", "fp"), KEYIDS["BTN_1"]: ("DOWN", "fp"), KEYIDS["KEY_OK"]: ("OK", ""), KEYIDS["KEY_UP"]: ("UP",), KEYIDS["KEY_DOWN"]: ("DOWN",), KEYIDS["KEY_POWER"]: ("POWER",), KEYIDS["KEY_RED"]: ("RED",), KEYIDS["KEY_BLUE"]: ("BLUE",), KEYIDS["KEY_GREEN"]: ("GREEN",), KEYIDS["KEY_YELLOW"]: ("YELLOW",), KEYIDS["KEY_MENU"]: ("MENU",), KEYIDS["KEY_LEFT"]: ("LEFT",), KEYIDS["KEY_RIGHT"]: ("RIGHT",), KEYIDS["KEY_VIDEO"]: ("VIDEO",), KEYIDS["KEY_INFO"]: ("INFO",), KEYIDS["KEY_AUDIO"]: ("AUDIO",), KEYIDS["KEY_TV"]: ("TV",), KEYIDS["KEY_RADIO"]: ("RADIO",), KEYIDS["KEY_TEXT"]: ("TEXT",), KEYIDS["KEY_NEXT"]: ("ARROWRIGHT",), KEYIDS["KEY_PREVIOUS"]: ("ARROWLEFT",), KEYIDS["KEY_PREVIOUSSONG"]: ("RED", "SHIFT"), KEYIDS["KEY_PLAYPAUSE"]: ("YELLOW", "SHIFT"), KEYIDS["KEY_PLAY"]: ("GREEN", "SHIFT"), KEYIDS["KEY_NEXTSONG"]: ("BLUE", "SHIFT"), KEYIDS["KEY_CHANNELUP"]: ("BOUQUET+",), KEYIDS["KEY_CHANNELDOWN"]: ("BOUQUET-",), KEYIDS["KEY_0"]: ("0",), KEYIDS["KEY_1"]: ("1",), KEYIDS["KEY_2"]: ("2",), KEYIDS["KEY_3"]: ("3",), KEYIDS["KEY_4"]: ("4",), KEYIDS["KEY_5"]: ("5",), KEYIDS["KEY_6"]: ("6",), KEYIDS["KEY_7"]: ("7",), KEYIDS["KEY_8"]: ("8",), KEYIDS["KEY_9"]: ("9",), KEYIDS["KEY_EXIT"]: ("EXIT",), KEYIDS["KEY_STOP"]: ("TV", "SHIFT"), KEYIDS["KEY_RECORD"]: ("RADIO", "SHIFT") }, { KEYIDS["BTN_0"]: ("UP", "fp"), KEYIDS["BTN_1"]: ("DOWN", "fp"), KEYIDS["KEY_OK"]: ("OK", ""), KEYIDS["KEY_UP"]: ("UP",), KEYIDS["KEY_DOWN"]: ("DOWN",), KEYIDS["KEY_POWER"]: ("POWER",), KEYIDS["KEY_RED"]: ("RED",), KEYIDS["KEY_BLUE"]: ("BLUE",), KEYIDS["KEY_GREEN"]: ("GREEN",), KEYIDS["KEY_YELLOW"]: ("YELLOW",), KEYIDS["KEY_MENU"]: ("MENU",), KEYIDS["KEY_LEFT"]: ("LEFT",), KEYIDS["KEY_RIGHT"]: ("RIGHT",), KEYIDS["KEY_VIDEO"]: ("PVR",), KEYIDS["KEY_INFO"]: ("INFO",), KEYIDS["KEY_AUDIO"]: ("AUDIO",), KEYIDS["KEY_TV"]: ("TV",), KEYIDS["KEY_RADIO"]: ("RADIO",), KEYIDS["KEY_TEXT"]: ("TEXT",), KEYIDS["KEY_NEXT"]: ("ARROWRIGHT",), KEYIDS["KEY_PREVIOUS"]: ("ARROWLEFT",), KEYIDS["KEY_PREVIOUSSONG"]: ("REWIND",), KEYIDS["KEY_PLAYPAUSE"]: ("PAUSE",), KEYIDS["KEY_PLAY"]: ("PLAY",), KEYIDS["KEY_NEXTSONG"]: ("FASTFORWARD",), KEYIDS["KEY_CHANNELUP"]: ("BOUQUET+",), KEYIDS["KEY_CHANNELDOWN"]: ("BOUQUET-",), KEYIDS["KEY_0"]: ("0",), KEYIDS["KEY_1"]: ("1",), KEYIDS["KEY_2"]: ("2",), KEYIDS["KEY_3"]: ("3",), KEYIDS["KEY_4"]: ("4",), KEYIDS["KEY_5"]: ("5",), KEYIDS["KEY_6"]: ("6",), KEYIDS["KEY_7"]: ("7",), KEYIDS["KEY_8"]: ("8",), KEYIDS["KEY_9"]: ("9",), KEYIDS["KEY_EXIT"]: ("EXIT",), KEYIDS["KEY_STOP"]: ("STOP",), KEYIDS["KEY_RECORD"]: ("RECORD",) }, ] def addKeyBinding(domain, key, context, action, flags): keyBindings.setdefault((context, action), []).append((key, domain, flags)) # returns a list of (key, flags) for a specified action def queryKeyBinding(context, action): if (context, action) in keyBindings: return [(x[0], x[2]) for x in keyBindings[(context, action)]] else: return [ ] def getKeyDescription(key): if rc_model.rcIsDefault(): idx = config.misc.rcused.value else: idx = 2 if key in keyDescriptions[idx]: return keyDescriptions[idx].get(key, [ ]) def removeKeyBindings(domain): # remove all entries of domain 'domain' for x in keyBindings: keyBindings[x] = filter(lambda e: e[1] != domain, keyBindings[x])
opendroid-Team/enigma2-4.1
lib/python/Tools/KeyBindings.py
Python
gpl-2.0
4,901
0.030402
# Copyright 2014 PressLabs SRL # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from errno import EROFS from fuse import FuseOSError, ENOTSUP from .view import View class ReadOnlyView(View): def getxattr(self, path, name, *args): raise FuseOSError(ENOTSUP) def open(self, path, flags): write_flags = (os.O_WRONLY | os.O_RDWR | os.O_APPEND | os.O_TRUNC | os.O_CREAT) if write_flags & flags: raise FuseOSError(EROFS) return 0 def create(self, path, fh): raise FuseOSError(EROFS) def write(self, path, fh): raise FuseOSError(EROFS) def opendir(self, path): return 0 def releasedir(self, path, fi): return 0 def flush(self, path, fh): return 0 def release(self, path, fh): return 0 def access(self, path, amode): if amode & os.W_OK: raise FuseOSError(EROFS) return 0 def mkdir(self, path, mode): raise FuseOSError(EROFS) def utimens(self, path, times=None): raise FuseOSError(EROFS) def chown(self, path, uid, gid): raise FuseOSError(EROFS) def chmod(self, path, mode): raise FuseOSError(EROFS)
ksmaheshkumar/gitfs
gitfs/views/read_only.py
Python
apache-2.0
1,744
0
# set command to set global variables from lib.utils import * def _help(): usage = ''' Usage: set [options] (var) [value] [options]: -h Print this help. -del (var) Delete variable (var) if defined. where (var) is a valid global variable if [value] is not given, current value is returned ''' print(usage) def main(argv): if '-h' in argv: _help() return # The shell doesnt send the # command name in the arg list # so the next line is not needed # anymore # argv.pop(0) #remove arg # to show all vars if len(argv) < 1: for i in prop.vars(): print(i, ' = ', prop.get(i)) return if '-del' in argv: try: var = argv[1] # detect system vars if var == 'save_state' or var == 'c_char': err(4, add='Cant delete system variable "' + var + '"') return prop.delete(var) return except IndexError: err(4, add='variable name was missing') return var = argv[0] if len(argv) < 2: val = prop.get(var) if val == NULL: err(4, var) return print(val) return # remove name of var argv.pop(0) # make the rest the val val = make_s(argv) try: prop.set(var, val) except ValueError: err(4, add="can't create this variable")
nayas360/pyterm
bin/set.py
Python
mit
1,471
0