text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
ksleg = "http://www.kslegislature.org/li"
url = "%s/api/v11/rev-1/" % ksleg
# These actions are from the KLISS API documentation,
# and are in the same order as that table
# The PDF is linked from this webpage, and changes name
# based on the most recent API version:
# http://www.kslegislature.org/klois/Pages/RESTianAPI.html
action_codes = {
# motion to acede; appointed
"ccac_om_370": None,
"efa_fabc_343": "passage",
"efa_fabc_342": "passage",
"cref_cref_500": "referral-committee",
"gov_avm_336": "veto-override-passage",
# change sequence
"mot_cgo_200": None,
"mot_cgo_201": None,
"mot_cgo_202": None,
"gov_mg_378": "executive-veto-line-item",
"fa_fabc_115": "failure",
"cr_rsc_292": "committee-passage-favorable",
"cr_rsc_276": "committee-passage",
"cr_rsc_274": "committee-passage-unfavorable",
"cr_rsc_275": "committee-passage-unfavorable",
"cr_rsc_273": "committee-passage-unfavorable",
"cr_rsc_270": "substitution",
# untabled/reconsiderations
"mot_tab_402": None,
"mot_tab_403": None,
"mot_tab_401": None,
"mot_tab_404": None,
"mot_rcon_303": None,
"mot_rcon_302": None,
"ee_enrb_149": "executive-receipt",
"cow_jcow_197": ["passage", "substitution"],
"mot_pspn_405": None, # postpone - failed
# other COW actions
"cow_jcow_211": None,
"cow_jcow_210": None,
"cow_jcow_214": None,
"cow_jcow_695": None,
"cow_jcow_694": None,
"cow_jcow_693": None,
"cow_jcow_692": None,
"cow_jcow_690": None,
# withdrawn from consent cal.'
"ccal_rcc_233": None,
"efa_fabc_933": "passage", # these 3 are 2/3 emergency clause votes...
"efa_fabc_936": "failure",
"efa_fabc_934": "passage",
"cref_cref_316": ["withdrawal", "referral-committee"],
"cref_cref_315": ["withdrawal", "referral-committee"],
"cur_con_374": None, # non-concur, conf. com. requested
"cr_rsc_801": "committee-passage-unfavorable", # these 3 are appointments..
"cr_rsc_800": "committee-passage-favorable",
"cr_rsc_802": "committee-passage",
"gov_mg_150": "executive-signature",
"gov_mg_151": None, # law w/o signature
"gov_mg_154": "executive-veto",
"cow_jcow_180": "passage", # COW
"ar_adj_605": None, # adjourn
"ee_enrb_888": None, # enrolled and presented to Sec. of State
"cow_jcow_239": "passage", # adopted
"cur_con_875": None, # nonconcurrences
"cur_con_876": None,
"cur_con_873": None,
"fa_fabc_341": "passage",
"fa_fabc_340": "passage",
"ccac_ccr_860": None,
"efa_fabc_115": "failure",
"intro_iopbc_158": "introduction",
"cr_rsc_291": "committee-passage",
"fa_fabc_116": "failure",
"cow_jcow_728": "amendment-withdrawal",
"cow_jcow_727": "amendment-failure",
"cow_jcow_726": "amendment-passage",
"cow_jcow_725": ["substitution", "passage"],
# motions to postpone
"mot_pspn_404": None,
"mot_pspn_403": None,
"mot_pspn_402": None,
"fa_fabc_910": "failure",
# suspend rules
"mot_susp_216": None,
"mot_susp_214": None,
"mot_susp_215": None,
"cr_rsc_289": "committee-passage",
# conference committee
"ccac_ccr_375": None,
"cur_con_337": None,
"cur_con_336": None,
"cur_con_335": None,
"ref_rbc_308": "referral-committee",
"ref_rbc_307": "referral-committee",
"ref_rbc_311": "referral-committee",
"efa_fabc_352": "passage",
"efa_fabc_351": "passage",
"intro_ibc_251": "passage",
# COW recommendations
"cow_jcow_705": ["substitution", "passage"],
"cow_jcow_704": ["substitution", "passage"],
"cow_jcow_707": "amendment-introduction",
"cow_jcow_709": "passage",
"cow_jcow_708": "passage",
# adjourn/recess
"ar_adj_625": None,
"ar_adj_626": None,
"intro_ires_251": "passage",
# engrossed/rengrossed
"ee_eng_225": None,
"ee_eng_227": None,
# referred to COW
"ref_rbc_235": None,
"cur_iopbc_141": "referral-committee",
"mot_wd_126": None, # 'committee:withdrawn',
"mot_wd_127": None, # withdraw from com- failed
"mot_wd_125": None, # withdraw from com- pending
# strike from calendar
"mot_strk_505": None,
"mot_strk_504": None,
"mot_strk_501": None,
# conf. com report adopted
"ccac_om_832": "passage",
"ccac_ccr_862": None, # motion to not adopt conf.com report failed
"ccac_ccr_863": "failure", # failed in conf.com, report not adopted
"ccac_ccr_865": None, # motion to not adopt conf.com report failed
"ccac_ccr_867": None, # agree to disagree on conf. com report
# passed over
"cow_jcow_201": None,
"cow_jcow_202": None,
"cow_jcow_203": None,
"ccac_cc_377": None, # conf committee changed member
"ee_enrb_226": None, # Enrolled
# more COW actions
"cow_jcow_681": None,
"cow_jcow_682": None,
"cow_jcow_683": None,
"cow_jcow_688": None,
"cow_jcow_689": None,
# veto overrides
"gov_avm_885": "veto-override-failure",
"gov_avm_887": "veto-override-passage",
"ref_rsc_312": "referral-committee",
# more COW actions
"cow_jcow_903": None,
"cow_jcow_902": None,
"cow_jcow_901": None,
"cow_jcow_905": None,
# no motion to veto override (count as failure?)
"gov_avm_128": "veto-override-failure",
"gov_avm_129": "veto-override-failure",
"cow_jcow_191": "passage",
"cow_jcow_192": "passage",
"cow_jcow_195": None, # com. report adopted
"cow_jcow_196": ["passage", "substitution"],
"gov_avm_125": "veto-override-failure",
"mot_ref_102": "referral-committee",
"mot_ref_105": None, # not referred to committee
"cref_cref_551": "referral-committee",
"cref_cref_552": "referral-committee",
"mot_apt_301": None, # 20 days in committee, returned to senate
"ccac_om_878": None, # Motion to accede failed
"efa_fabc_925": ["passage", "substitution"],
"efa_fabc_926": ["passage", "substitution"],
"efa_fabc_923": ["passage", "substitution"],
"efa_fabc_922": ["passage", "substitution"],
"fa_fabc_105": ["failure", "substitution"],
"fa_fabc_104": "failure",
"intro_ibc_157": "introduction",
"intro_ibc_156": "filing",
"fa_fabc_905": "passage",
"intro_ires_681": "introduction",
"cref_cref_290": "referral-committee",
"fa_fabc_352": "passage",
"ccac_ccr_145": "failure",
"fa_fabc_351": "passage",
# motion to move to general orders
"mot_adv_303": None,
"mot_adv_302": None,
"mot_adv_301": None,
"efa_fabc_106": ["failure", "substitution"],
"efa_fabc_105": ["failure", "substitution"],
"efa_fabc_104": "failure",
"ccac_ccr_833": "failure",
"ref_rbc_310": "referral-committee",
"cr_rsc_283": "committee-passage-favorable",
"cr_rsc_282": "committee-passage-favorable",
"cr_rsc_281": "committee-passage-favorable",
"cr_rsc_287": "committee-passage-favorable",
"cr_rsc_286": "committee-passage-favorable",
"cr_rsc_285": "committee-passage-favorable",
"ref_rbc_500": "referral-committee",
"cr_rsc_288": "committee-passage",
# Conf. Com. reports
"ccac_ccr_883": None,
"ccac_ccr_880": None,
"ccac_ccr_881": None,
"cow_jcow_712": ["passage", "substitution"],
"cow_jcow_710": ["passage", "substitution"],
"cow_jcow_711": ["passage", "substitution"],
"cow_jcow_716": None,
"fa_fabc_925": "passage",
"fa_fabc_924": "passage",
"fa_fabc_926": "failure",
"fa_fabc_921": ["passage", "substitution"],
"fa_fabc_920": ["passage", "substitution"],
"fa_fabc_923": ["passage", "substitution"],
"fa_fabc_922": ["passage", "substitution"],
"cr_rsc_821": "committee-passage-unfavorable",
"cow_jcow_305": "referral-committee",
"cow_jcow_304": "referral-committee",
"gov_avm_349": "veto-override-failure",
"intro_ibc_681": "introduction",
"dss_627": None,
"mot_susp_203": None,
"mot_susp_202": None,
"mot_susp_206": None,
"cur_con_101": None, # concur. failed
"cur_om_141": "referral-committee",
"misc_he_200": None,
}
| openstates/openstates | openstates/ks/ksapi.py | Python | gpl-3.0 | 8,104 | 0.000123 |
"""Tests for the forms of the ``event_rsvp`` app."""
from django.test import TestCase
from django.utils import timezone
from django_libs.tests.factories import UserFactory
from event_rsvp.forms import EventForm, GuestForm
from event_rsvp.models import Event, Guest
from event_rsvp.tests.factories import EventFactory
class EventFormTestCase(TestCase):
"""Tests for the ``EventForm`` form class."""
longMessage = True
def test_validates_and_saves_input(self):
self.user = UserFactory()
data = {
'title': 'Foo',
'venue': 'Bar',
'start': timezone.now(),
'end': timezone.now() + timezone.timedelta(days=11),
}
form = EventForm(data=data, created_by=self.user)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(Event.objects.all().count(), 1)
# Test update
data.update({'street': 'Foostreet'})
form = EventForm(data=data, instance=instance, created_by=self.user)
instance = form.save()
self.assertEqual(instance.street, 'Foostreet')
# Test creating an event from a template
form = EventForm(data=data, instance=instance, created_by=self.user,
create_from_template=True)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(Event.objects.all().count(), 2)
# Test saving a template
data.update({'template_name': 'Foo'})
form = EventForm(data=data, created_by=self.user)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(
Event.objects.exclude(template_name__exact='').count(), 1)
# Test updating a template
data.update({'street': 'Barstreet'})
instance = Event.objects.get(template_name='Foo')
form = EventForm(data=data, instance=instance, created_by=self.user)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(instance.street, 'Barstreet')
class GuestFormTestCase(TestCase):
"""Tests for the ``GuestForm`` form class."""
longMessage = True
def test_validates_and_saves_input(self):
# Test exceeding available seats
self.event = EventFactory(available_seats=1)
form = GuestForm(data={'number_of_seats': 100}, event=self.event,
user=None)
self.assertFalse(form.is_valid())
# Test exceeding available seats (plural error msg)
self.event = EventFactory(available_seats=20, max_seats_per_guest=1)
form = GuestForm(data={'number_of_seats': 100}, event=self.event,
user=None)
self.assertFalse(form.is_valid())
# Test exceeding max amount of seats per booking
form = GuestForm(data={'number_of_seats': 2}, event=self.event,
user=None)
self.assertFalse(form.is_valid())
# Test exceeding max amount of seats per booking (plural error msg)
self.event = EventFactory(max_seats_per_guest=2)
form = GuestForm(data={'number_of_seats': 3}, event=self.event,
user=None)
self.assertFalse(form.is_valid())
# Test missing required fields
self.event = EventFactory(required_fields=['name', 'phone'])
form = GuestForm(data={'name': 'Foo', 'email': 'test@example.com'},
event=self.event, user=None)
self.assertFalse(form.is_valid())
# Test valid form
form = GuestForm(data={'name': 'Foo', 'phone': '+4911111111'},
event=self.event, user=None)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(Guest.objects.all().count(), 1)
| bitmazk/django-event-rsvp | event_rsvp/tests/forms_tests.py | Python | mit | 3,796 | 0 |
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for base class NrfMatterDevice."""
from unittest import mock
from absl.testing import parameterized
from gazoo_device import errors
from gazoo_device.base_classes import nrf_matter_device
from gazoo_device.capabilities import device_power_default
from gazoo_device.capabilities import pwrpc_common_default
from gazoo_device.tests.unit_tests.capability_tests.mixins import pwrpc_common_test
from gazoo_device.tests.unit_tests.utils import fake_device_test_case
import immutabledict
_FAKE_DEVICE_ID = "nrfmatterdevicestub-detect"
_FAKE_DEVICE_ADDRESS = "fake-device-address"
_FAKE_VENDOR_ID = "fake-vendor-id"
_FAKE_PRODUCT_ID = "fake-product-id"
_NRF_CONNECT_PERSISTENT_PROPERTIES = immutabledict.immutabledict({
"os": "Zephyr RTOS",
"platform": "nRF Connect",
"serial_number": "FT2BSR6O",
"name": "nrfmatterdevicestub_detect",
"device_type": "nrfmatterdevicestub",
"vendor_id": _FAKE_VENDOR_ID,
"product_id": _FAKE_PRODUCT_ID,
})
class NrfMatterDeviceStub(nrf_matter_device.NrfMatterDevice):
"""Dummy implementation for instantiation of NrfMatterDevice."""
DEVICE_TYPE = "nrfmatterdevicestub"
ENDPOINT_ID_TO_CLASS = {}
class NrfMatterDeviceTest(fake_device_test_case.FakeDeviceTestCase,
pwrpc_common_test.PigweedRpcCommonTestMixin):
"""Unit tests for base class NrfMatterDevice."""
def setUp(self):
super().setUp()
self.setup_fake_device_requirements(_FAKE_DEVICE_ID)
self.device_config["persistent"]["console_port_name"] = _FAKE_DEVICE_ADDRESS
jlink_patcher = mock.patch("pylink.JLink")
jlink_patcher.start()
self.addCleanup(jlink_patcher.stop)
self.uut = NrfMatterDeviceStub(self.mock_manager,
self.device_config,
log_directory=self.artifacts_directory)
@mock.patch.object(nrf_matter_device.os.path, "exists", return_value=True)
def test_is_connected_true(self, mock_exists):
"""Verifies is_connected returns true when console port exists."""
self.assertTrue(
nrf_matter_device.NrfMatterDevice.is_connected(self.device_config))
mock_exists.assert_called_once()
@mock.patch.object(
nrf_matter_device.NrfMatterDevice, "pw_rpc_common")
def test_get_detection_info_on_success(self, mock_rpc_common):
"""Verifies persistent properties are set correctly."""
mock_rpc_common.vendor_id = _FAKE_VENDOR_ID
mock_rpc_common.product_id = _FAKE_PRODUCT_ID
self._test_get_detection_info(
console_port_name=_FAKE_DEVICE_ADDRESS,
device_class=NrfMatterDeviceStub,
persistent_properties=_NRF_CONNECT_PERSISTENT_PROPERTIES)
def test_flash_build_capability(self):
"""Verifies the initialization of flash_build capability."""
self.assertTrue(self.uut.flash_build)
def test_matter_endpoints_capability(self):
"""Verifies the initialization of matter_endpoints capability."""
self.assertIsNotNone(self.uut.matter_endpoints)
def test_device_power_capability(self):
"""Verifies the initialization of device_power capability."""
self.assertIsNotNone(self.uut.device_power)
@mock.patch.object(
device_power_default.DevicePowerDefault, "cycle", autospec=True)
def test_device_reboot_hard(self, reboot_fn):
self.uut.reboot(method="hard")
reboot_fn.assert_called_once()
@parameterized.parameters(dict(method="soft"), dict(method="pw_rpc"))
@mock.patch.object(
pwrpc_common_default.PwRPCCommonDefault, "reboot", autospec=True)
def test_device_reboot(self, reboot_fn, method):
self.uut.reboot(method)
reboot_fn.assert_called_once()
def test_device_reboot_raise_error(self):
"""Test reboot method with invalid method."""
with self.assertRaisesRegex(
errors.DeviceError,
r"ValueError: Method invalid_reboot_method not recognized"):
self.uut.reboot(method="invalid_reboot_method")
if __name__ == "__main__":
fake_device_test_case.main()
| google/gazoo-device | gazoo_device/tests/unit_tests/nrf_matter_device_test.py | Python | apache-2.0 | 4,549 | 0.005056 |
from zen import *
import unittest
import os
import os.path as path
import tempfile
class GMLTokenizerCase(unittest.TestCase):
tok = gml_tokenizer.GMLTokenizer()
codec = gml_codec.BasicGMLCodec()
interp = gml_interpreter.GMLInterpreter(codec, tok)
def test_basic_correct(self):
tokens = [
('keyOne', 0, 1), ('"one"', 1, 1),
('keyTwo', 0, 1), ('2', 1, 1)
]
expected_interpretation = {'keyOne': 'one', 'keyTwo': 2}
got_interpretation = self.interp.interpret(tokens)
self.assertEqual(got_interpretation, expected_interpretation)
def test_nested_list_correct(self):
tokens = [
('keyOne', 0, 1), ('[', 2, 1),
('subKeyOne', 0, 2), ('"one"', 1, 2),
('subKeyTwo', 0, 3), ('[', 2, 3),
('subSubKey', 0, 4), ('"one"', 1, 4),
('subSubKey', 0, 5), ('2', 1, 5),
(']', 3, 6),
(']', 3, 7),
('keyTwo', 0, 8), ('"two"', 1, 8)
]
expected_interpretation = {
'keyOne': {
'subKeyOne': 'one',
'subKeyTwo': {
'subSubKey': ['one', 2]
}
},
'keyTwo': 'two'
}
got_interpretation = self.interp.interpret(tokens)
self.assertEqual(got_interpretation, expected_interpretation)
def test_correct_empty_list(self):
tokens = [
('keyOne', 0, 1), ('[', 2, 1),
(']', 3, 2), ('keyTwo', 0, 3), ('"two"', 1, 3)
]
expected_interpretation = {'keyOne': {}, 'keyTwo': 'two'}
got_interpretation = self.interp.interpret(tokens)
self.assertEqual(got_interpretation, expected_interpretation)
def test_incorrect_val_when_key_expected(self):
# VAL_TOK when KEY_TOK expected
tokens = [
('"keyOne"', 1, 1), ('"one"', 1, 1),
('keyTwo', 0, 1), ('2', 1, 1)
]
self.assertRaises(ZenException, self.interp.interpret, tokens)
def test_incorrect_key_when_val_expected(self):
# KEY_TOK when VAL_TOK expected
tokens = [
('keyOne', 1, 1), ('one', 0, 1),
('keyTwo', 0, 1), ('2', 1, 1)
]
self.assertRaises(ZenException, self.interp.interpret, tokens)
def test_incorrect_unexpected_token_type(self):
# unexpected token type
tokens = [
('keyOne', 1, 1), ('"one"', 4, 1),
('keyTwo', 0, 1), ('2', 1, 1)
]
self.assertRaises(ZenException, self.interp.interpret, tokens)
def test_incorrect_eof_when_expecting_value(self):
tokens = [
('keyOne', 0, 1), ('"one"', 1, 1),
('keyTwo', 0, 1)
]
self.assertRaises(ZenException, self.interp.interpret, tokens)
def test_incorrect_eolist_when_expecting_value(self):
tokens = [
('keyOne', 0, 1), ('[', 2, 1),
('subKeyOne', 0, 2), ('"one"', 1, 2),
('subKeyTwo', 0, 3),
(']', 3, 6),
('keyTwo', 0, 8), ('"two"', 1, 8)
]
self.assertRaises(ZenException, self.interp.interpret, tokens)
if __name__ == '__main__':
unittest.main()
| networkdynamics/zenlib | src/zen/tests/gml_interpreter.py | Python | bsd-3-clause | 2,727 | 0.041437 |
from __future__ import absolute_import, print_function, division
from mitmproxy import exceptions
import pprint
def _get_name(itm):
return getattr(itm, "name", itm.__class__.__name__)
class Addons(object):
def __init__(self, master):
self.chain = []
self.master = master
master.options.changed.connect(self.options_update)
def options_update(self, options, updated):
for i in self.chain:
with self.master.handlecontext():
i.configure(options, updated)
def add(self, options, *addons):
if not addons:
raise ValueError("No addons specified.")
self.chain.extend(addons)
for i in addons:
self.invoke_with_context(i, "start")
self.invoke_with_context(
i,
"configure",
self.master.options,
self.master.options.keys()
)
def remove(self, addon):
self.chain = [i for i in self.chain if i is not addon]
self.invoke_with_context(addon, "done")
def done(self):
for i in self.chain:
self.invoke_with_context(i, "done")
def has_addon(self, name):
"""
Is an addon with this name registered?
"""
for i in self.chain:
if _get_name(i) == name:
return True
def __len__(self):
return len(self.chain)
def __str__(self):
return pprint.pformat([str(i) for i in self.chain])
def invoke_with_context(self, addon, name, *args, **kwargs):
with self.master.handlecontext():
self.invoke(addon, name, *args, **kwargs)
def invoke(self, addon, name, *args, **kwargs):
func = getattr(addon, name, None)
if func:
if not callable(func):
raise exceptions.AddonError(
"Addon handler %s not callable" % name
)
func(*args, **kwargs)
def __call__(self, name, *args, **kwargs):
for i in self.chain:
self.invoke(i, name, *args, **kwargs)
| x2Ident/x2Ident_test | mitmproxy/mitmproxy/addons.py | Python | gpl-3.0 | 2,173 | 0 |
# Copyright (c) 2016 HuaWei, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
class Configuration(object):
def __init__(self, conf):
"""Initialize configuration."""
self.local_conf = conf
def register_opts(self, volume_opts, group=None):
self.local_conf.register_opts(volume_opts, group=group)
def set_override(self, name, override, group=None):
self.local_conf.set_override(name, override, group=group)
def safe_get(self, value):
try:
return self.__getattr__(value)
except cfg.NoSuchOptError:
return None
def __contains__(self, key):
"""Return True if key is in local_conf."""
return key in self.local_conf
def __getattr__(self, value):
# Don't use self.local_conf to avoid reentrant call to __getattr__()
local_conf = object.__getattribute__(self, 'local_conf')
return getattr(local_conf, value)
def __getitem__(self, key):
"""Look up an option value and perform string substitution."""
return self.local_conf.__getitem__(key)
| openstack/zaqar | zaqar/storage/configuration.py | Python | apache-2.0 | 1,625 | 0 |
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is configman
#
# The Initial Developer of the Original Code is
# Mozilla Foundation
# Portions created by the Initial Developer are Copyright (C) 2011
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# K Lars Lohn, lars@mozilla.com
# Peter Bengtsson, peterbe@mozilla.com
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import unittest
import re
import datetime
import configman.converters as conv
import configman.datetime_util as dtu
from configman.option import Option
from configman.config_exceptions import CannotConvertError
class TestCase(unittest.TestCase):
def test_option_constructor_basics(self):
o = Option('name')
self.assertEqual(o.name, 'name')
self.assertEqual(o.default, None)
self.assertEqual(o.doc, None)
self.assertEqual(o.from_string_converter, None)
self.assertEqual(o.value, None)
o = Option('lucy')
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, None)
self.assertEqual(o.doc, None)
self.assertEqual(o.from_string_converter, None)
self.assertEqual(o.value, None)
o = Option(u'spa\xa0e')
self.assertEqual(o.name, u'spa\xa0e')
self.assertEqual(o.default, None)
self.assertEqual(o.doc, None)
self.assertEqual(o.from_string_converter, None)
self.assertEqual(o.value, None)
data = {
'name': 'lucy',
'default': 1,
'doc': "lucy's integer"
}
o = Option(**data)
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, 1)
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
data = {
'name': 'lucy',
'default': 1,
'doc': "lucy's integer",
'value': '1'
}
o = Option(**data)
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, 1)
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
data = {
'name': 'lucy',
'default': '1',
'doc': "lucy's integer",
'from_string_converter': int
}
o = Option(**data)
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, 1) # converted using `int`
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
data = {
'name': 'lucy',
'default': '1',
'doc': "lucy's integer",
'from_string_converter': int,
}
o = Option(**data)
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, 1)
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
data = {
'default': '1',
'doc': "lucy's integer",
'from_string_converter': int,
}
o = Option('now', **data)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, 1)
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
d = datetime.datetime.now()
o = Option('now', default=d)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, d)
self.assertEqual(o.doc, None)
self.assertEqual(o.from_string_converter,
dtu.datetime_from_ISO_string)
self.assertEqual(o.value, d)
data = {
'default': '1.0',
'doc': "lucy's height",
'from_string_converter': float,
}
o = Option('now', **data)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, 1.0)
self.assertEqual(o.doc, "lucy's height")
self.assertEqual(o.from_string_converter, float)
self.assertEqual(o.value, 1.0)
def test_option_constructor_more_complex_default_converters(self):
data = {
'default': '2011-12-31',
'doc': "lucy's bday",
'from_string_converter': dtu.date_from_ISO_string,
}
o = Option('now', **data)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, datetime.date(2011, 12, 31))
self.assertEqual(o.doc, "lucy's bday")
self.assertEqual(o.from_string_converter, dtu.date_from_ISO_string)
self.assertEqual(o.value, datetime.date(2011, 12, 31))
data = {
'default': '2011-12-31',
'doc': "lucy's bday",
'from_string_converter': \
'configman.datetime_util.date_from_ISO_string',
}
o = Option('now', **data)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, datetime.date(2011, 12, 31))
self.assertEqual(o.doc, "lucy's bday")
self.assertEqual(o.from_string_converter, dtu.date_from_ISO_string)
self.assertEqual(o.value, datetime.date(2011, 12, 31))
def test_setting_known_from_string_converter_onOption(self):
opt = Option('name', default=u'Peter')
self.assertEqual(opt.default, u'Peter')
self.assertEqual(opt.from_string_converter, unicode)
opt = Option('name', default=100)
self.assertEqual(opt.default, 100)
self.assertEqual(opt.from_string_converter, int)
opt = Option('name', default=100L)
self.assertEqual(opt.default, 100L)
self.assertEqual(opt.from_string_converter, long)
opt = Option('name', default=100.0)
self.assertEqual(opt.default, 100.0)
self.assertEqual(opt.from_string_converter, float)
from decimal import Decimal
opt = Option('name', default=Decimal('100.0'))
self.assertEqual(opt.default, Decimal('100.0'))
self.assertEqual(opt.from_string_converter, Decimal)
opt = Option('name', default=False)
self.assertEqual(opt.default, False)
self.assertEqual(opt.from_string_converter,
conv.boolean_converter)
dt = datetime.datetime(2011, 8, 10, 0, 0, 0)
opt = Option('name', default=dt)
self.assertEqual(opt.default, dt)
self.assertEqual(opt.from_string_converter,
dtu.datetime_from_ISO_string)
dt = datetime.date(2011, 8, 10)
opt = Option('name', default=dt)
self.assertEqual(opt.default, dt)
self.assertEqual(opt.from_string_converter,
dtu.date_from_ISO_string)
def test_boolean_converter_inOption(self):
opt = Option('name', default=False)
self.assertEqual(opt.default, False)
self.assertEqual(opt.from_string_converter,
conv.boolean_converter)
opt.set_value('true')
self.assertEqual(opt.value, True)
opt.set_value('false')
self.assertEqual(opt.value, False)
opt.set_value('1')
self.assertEqual(opt.value, True)
opt.set_value('t')
self.assertEqual(opt.value, True)
opt.set_value(True)
self.assertEqual(opt.value, True)
opt.set_value(False)
self.assertEqual(opt.value, False)
opt.set_value('False')
self.assertEqual(opt.value, False)
opt.set_value('True')
self.assertEqual(opt.value, True)
opt.set_value('None')
self.assertEqual(opt.value, False)
opt.set_value('YES')
self.assertEqual(opt.value, True)
opt.set_value(u'1')
self.assertEqual(opt.value, True)
opt.set_value(u'y')
self.assertEqual(opt.value, True)
opt.set_value(u't')
self.assertEqual(opt.value, True)
def test_timedelta_converter_inOption(self):
one_day = datetime.timedelta(days=1)
opt = Option('some name', default=one_day)
self.assertEqual(opt.default, one_day)
self.assertEqual(opt.from_string_converter,
conv.timedelta_converter)
two_days = datetime.timedelta(days=2)
timedelta_as_string = dtu.timedelta_to_str(two_days)
assert isinstance(timedelta_as_string, basestring)
opt.set_value(timedelta_as_string)
self.assertEqual(opt.value, two_days)
opt.set_value(unicode(timedelta_as_string))
self.assertEqual(opt.value, two_days)
opt.set_value(two_days)
self.assertEqual(opt.value, two_days)
self.assertRaises(CannotConvertError,
opt.set_value, 'JUNK')
self.assertRaises(CannotConvertError,
opt.set_value, '0:x:0:0')
def test_regexp_converter_inOption(self):
regex_str = '\w+'
sample_regex = re.compile(regex_str)
opt = Option('name', default=sample_regex)
self.assertEqual(opt.default, sample_regex)
self.assertEqual(opt.from_string_converter,
conv.regex_converter)
opt.set_value(regex_str)
self.assertEqual(opt.value.pattern, sample_regex.pattern)
def test_option_comparison(self):
o1 = Option('name')
o2 = Option('name')
self.assertEqual(o1, o2)
o1 = Option('name', 'Peter')
o2 = Option('name', u'Peter')
self.assertEqual(o1, o2)
o1 = Option('name', 'Peter')
o2 = Option('name', 'Ashley')
self.assertNotEqual(o1, o2)
o1 = Option('name', doc='Aaa')
o2 = Option('name', doc='Bee')
self.assertNotEqual(o1, o2)
o1 = Option('name', doc='Aaa')
o2 = Option('name', doc='Aaa')
self.assertEqual(o1, o2)
o1 = Option('name', doc='Aaa', short_form='n')
o2 = Option('name', doc='Aaa', short_form='N')
self.assertNotEqual(o1, o2)
o1 = Option('name')
o1.set_value('Peter')
o2 = Option('name')
self.assertNotEqual(o1, o2)
def test_set_value_from_other_option(self):
o1 = Option('name')
o1.set_value('Peter')
o2 = Option('name')
o2.set_value(o1)
self.assertEqual(o2.value, None)
o1 = Option('name', default='Your name here')
o1.set_value('Peter')
o2 = Option('name')
o2.set_value(o1)
self.assertEqual(o2.value, 'Your name here')
def test_set_value_from_mapping(self):
o1 = Option('name')
val = {'default': u'Peter'}
o1.set_value(val)
self.assertEqual(o1.value, 'Peter')
val = {'justanother': 'dict!'}
o1.set_value(val)
self.assertEqual(o1.value, val)
| peterbe/configman | configman/tests/test_option.py | Python | bsd-3-clause | 12,084 | 0.000248 |
""" Test of tracking and detector response. """
# pylint: disable=C0103
from ..detector import LayeredDetector
from ..track import gen_straight_tracks
from matplotlib import pyplot as plt
def main():
"""
Test if construction of detector works and propagate tracks through
detector.
"""
A = LayeredDetector(1, 0, 0.5, 8, 10, 25)
tracks = gen_straight_tracks(20)
x_coords = [0.1 * i for i in xrange(100)]
A.propagate_tracks(tracks)
for track in tracks:
y = [track.get_yintercept(x) for x in x_coords]
plt.plot(x_coords, y)
plt.xlim(0, 10)
plt.ylim(-0.5, 0.5)
A.draw()
if __name__ == "__main__":
main()
| jepio/JKalFilter | test/test_track.py | Python | gpl-2.0 | 673 | 0.001486 |
import warnings
import unittest
import sys
from nose.tools import assert_raises
from gplearn.skutils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.lda import LDA
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LDA()
tree = DecisionTreeClassifier()
# LDA doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
| danbob123/gplearn | gplearn/skutils/tests/test_testing.py | Python | bsd-3-clause | 3,785 | 0.000264 |
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pybuilder.core import use_plugin, task, after, init
from pybuilder.utils import assert_can_execute, read_file
from pybuilder.plugins.python.python_plugin_helper import execute_tool_on_source_files
use_plugin("python.core")
@init
def init_pep8_properties(project):
project.build_depends_on("pep8")
@after("prepare")
def check_pep8_available(logger):
logger.debug("Checking availability of pep8")
assert_can_execute(("pep8", ), "pep8", "plugin python.pep8")
@task
def analyze(project, logger):
logger.info("Executing pep8 on project sources")
_, report_file = execute_tool_on_source_files(project, "pep8", ["pep8"])
reports = read_file(report_file)
if len(reports) > 0:
logger.warn("Found %d warning%s produced by pep8",
len(reports), "" if len(reports) == 1 else "s")
| Danielweber7624/pybuilder | src/main/python/pybuilder/plugins/python/pep8_plugin.py | Python | apache-2.0 | 1,506 | 0.000664 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Nicira Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from quantumclient.quantum import v2_0 as quantumv20
class ListQoSQueue(quantumv20.ListCommand):
"""List queues that belong to a given tenant."""
resource = 'qos_queue'
log = logging.getLogger(__name__ + '.ListQoSQueue')
_formatters = {}
list_columns = ['id', 'name', 'min', 'max',
'qos_marking', 'dscp', 'default']
class ShowQoSQueue(quantumv20.ShowCommand):
"""Show information of a given queue."""
resource = 'qos_queue'
log = logging.getLogger(__name__ + '.ShowQoSQueue')
allow_names = True
class CreateQoSQueue(quantumv20.CreateCommand):
"""Create a queue."""
resource = 'qos_queue'
log = logging.getLogger(__name__ + '.CreateQoSQueue')
def add_known_arguments(self, parser):
parser.add_argument(
'name', metavar='NAME',
help='Name of queue')
parser.add_argument(
'--min',
help='min-rate'),
parser.add_argument(
'--max',
help='max-rate'),
parser.add_argument(
'--qos-marking',
help='qos marking untrusted/trusted'),
parser.add_argument(
'--default',
default=False,
help=('If true all ports created with be the size of this queue'
' if queue is not specified')),
parser.add_argument(
'--dscp',
help='Differentiated Services Code Point'),
def args2body(self, parsed_args):
params = {'name': parsed_args.name,
'default': parsed_args.default}
if parsed_args.min:
params['min'] = parsed_args.min
if parsed_args.max:
params['max'] = parsed_args.max
if parsed_args.qos_marking:
params['qos_marking'] = parsed_args.qos_marking
if parsed_args.dscp:
params['dscp'] = parsed_args.dscp
if parsed_args.tenant_id:
params['tenant_id'] = parsed_args.tenant_id
return {'qos_queue': params}
class DeleteQoSQueue(quantumv20.DeleteCommand):
"""Delete a given queue."""
log = logging.getLogger(__name__ + '.DeleteQoSQueue')
resource = 'qos_queue'
allow_names = True
| wallnerryan/quantum_migrate | quantumclient/quantum/v2_0/nvp_qos_queue.py | Python | apache-2.0 | 2,899 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitPeeringsOperations:
"""ExpressRouteCircuitPeeringsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified peering from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
async def get(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitPeering":
"""Gets the specified peering for the express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.ExpressRouteCircuitPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
peering_parameters: "_models.ExpressRouteCircuitPeering",
**kwargs: Any
) -> "_models.ExpressRouteCircuitPeering":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(peering_parameters, 'ExpressRouteCircuitPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
peering_parameters: "_models.ExpressRouteCircuitPeering",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitPeering"]:
"""Creates or updates a peering in the specified express route circuits.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param peering_parameters: Parameters supplied to the create or update express route circuit
peering operation.
:type peering_parameters: ~azure.mgmt.network.v2019_07_01.models.ExpressRouteCircuitPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitPeering or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_07_01.models.ExpressRouteCircuitPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
peering_parameters=peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
def list(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteCircuitPeeringListResult"]:
"""Gets all peerings in a specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitPeeringListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_07_01.models.ExpressRouteCircuitPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/aio/operations/_express_route_circuit_peerings_operations.py | Python | mit | 21,838 | 0.00522 |
__author__ = 'James DeVincentis <james.d@hexhost.net>'
from .ipaddress import Ipaddress
class Ipv6(Ipaddress):
def __init__(self, *args, **kwargs):
self._mask = None
super(Ipv6, self).__init__(self, args, **kwargs)
@property
def mask(self):
return self._mask
@mask.setter
def mask(self, value):
if self._validation and value is not None:
if not isinstance(value, int):
try:
value = int(value)
except Exception as e:
raise TypeError("Mask must be an integer") from e
if value > 128:
raise TypeError("Mask cannot be greater than 128 bits for IPv6")
if value < 0:
raise TypeError("Mask cannot be less than 0 bits")
self._mask = value
| Danko90/cifpy3 | lib/cif/types/observables/ipv6.py | Python | gpl-3.0 | 836 | 0.001196 |
#!/usr/bin/python
import argparse
from . import CLI
from .utils import _list_files
from .find import batchy_find
from .update import batchy_update
from .view import batchy_view
def _batchy_find(args):
return batchy_find(args.pattern, args.keys, args.replace, args.files)
def _batchy_update(args):
return batchy_update(args.key, args.value, args.files, add=args.add, append=args.append)
def _batchy_view(args):
return batchy_view(args.files, args.keys)
def _generate_args(args=None):
parser = argparse.ArgumentParser(prog=CLI)
subparser = parser.add_subparsers(title='subcommands')
find = subparser.add_parser('find')
find.set_defaults(func=_batchy_find)
find.add_argument('pattern', help='Pattern to search files for.')
find.add_argument('--keys', nargs='+', help='')
find.add_argument('--replace', nargs='+', help='')
find.add_argument('--files', nargs='+', help='List of files to limit the scope to.', default=[])
find.add_argument('--dirs', nargs='+', help='List of directories to limit the scope to.', default=[])
update = subparser.add_parser('update')
update.set_defaults(func=_batchy_update)
update.add_argument('key', help='')
update.add_argument('value', help='')
group = update.add_mutually_exclusive_group()
group.add_argument('--add', action='store_true')
group.add_argument('--append', action='store_true')
update.add_argument('--files', nargs='+', help='List of files to limit the scope to.', default=[])
update.add_argument('--dirs', nargs='+', help='List of directories to limit the scope to.', default=[])
view = subparser.add_parser('view')
view.set_defaults(func=_batchy_view)
view.add_argument('--keys', nargs='+')
view.add_argument('--files', nargs='+', help='List of files to limit the scope to.', default=[])
view.add_argument('--dirs', nargs='+', help='List of directories to limit the scope to.', default=[])
args = parser.parse_args(args)
args.files = _list_files(args.files, args.dirs)
return args
def main(args=None):
args = _generate_args(args)
return args.func(args)
| jkloo/BatchY | batchy/cli.py | Python | mit | 2,138 | 0.003274 |
import os
import re
import locale
locale.setlocale(locale.LC_ALL, '') # decimals according to locale
out_file_name = './logs/output_basic_constants.csv'
sep_char_for_csv = '|'
out_file = open(out_file_name, mode='w')
out_file_full_path = os.path.abspath(out_file_name)
def str_list_to_np_array_str(param):
return 'np.array([' + param + '])'
#sel_component_numbers = [130, 460, 461, 463, 95]
sel_component_numbers = [461, 455, 460, 463, 465]
# sel_component_numbers = [66, 60, 440, 460]
#sel_component_numbers = [130, 460, 31, 440, 455]
#sel_component_numbers = [66, 438, ]
out_file.write('sep=' + sep_char_for_csv + '\n')
out_file.write("""
# Source of data:
# Poling, Bruce E., John M. Prausnitz,
# and John P. O'connell.
# The properties of gases and liquids.
# Vol. 5. New York: Mcgraw-hill, 2001.
# Basic Constants I: 468 Components in table
""")
out_file.write('\n')
test_exp_descriptors = '(^[0-9]{1,3})\|(.*)\|(.*)\|' + \
'([0-9]{1,7}-[0-9]{1,7}-[0-9]{1,7})'
test_exp_basic_constants_i = test_exp_descriptors + \
'[\||\ ](\-?\ ?[0-9]+\.?[0-9]+)?' * 8 # 8 numeric columns
test_exp_basic_constants_ii = test_exp_descriptors + \
'[\||\ ](\-?\ ?[0-9]+\.?[0-9]+)?' * 7 # 7 numeric columns
test_exp_ig_heat_capacities = test_exp_descriptors + \
'\|(—|[0-9]{1,4}-[0-9]{1,4})?' + \
'[\||\ ](\-?\ ?[0-9]+\.?[0-9]+)?' * 7 # 7 numeric columns
file_name_basic_constants_i = os.path.abspath(
'./data/basic_constants_i_properties_of_gases_and_liquids.csv'
)
file_name_basic_constants_ii = os.path.abspath(
'./data/basic_constants_ii_properties_of_gases_and_liquids.csv'
)
file_name_ig_heat_capacities = os.path.abspath(
'./data/ig_l_heat_capacities_properties_of_gases_and_liquids.csv'
)
file = open(file_name_basic_constants_i, mode='r', encoding='utf-8-sig')
matches = []
k = 0
out_file.write('# ======================')
out_file.write('\n')
for line in file:
k += 1
# print header
if k < 4:
out_file.write(line)
out_file.write('\n')
match = re.search(test_exp_basic_constants_i, line)
if match is not None:
matches.append(match.groups())
file.close()
out_file.write('# Basic Constants I: ' + str(len(matches)) +
' Components in table')
out_file.write('\n')
no = [int(item[0]) for item in matches]
formula = [item[1] for item in matches]
name = [item[2] for item in matches]
cas_no = [item[3] for item in matches]
mol_wt = [float(item[4].replace(' ', ''))
if item[4] is not None
else 0.0
for item in matches] # g/mol
tfp = [float(item[5].replace(' ', ''))
if item[5] is not None
else 0.0
for item in matches] # K
tb = [float(item[6].replace(' ', ''))
if item[6] is not None
else 0.0
for item in matches] # K
tc = [float(item[7].replace(' ', ''))
if item[7] is not None
else 0.0
for item in matches] # K
pc = [float(item[8].replace(' ', ''))
if item[8] is not None
else 0.0
for item in matches] # bar
vc = [float(item[9].replace(' ', ''))
if item[9] is not None
else 0.0
for item in matches] # cm^3/mol
zc = [float(item[10].replace(' ', ''))
if item[10] is not None
else 0.0
for item in matches] # []
omega = [float(item[11].replace(' ', ''))
if item[11] is not None
else 0.0
for item in matches] # []
table_indexes_of_comp_nos = [
no.index(comp_no) for comp_no in sel_component_numbers
]
out_file.write('# ======================')
out_file.write('\n')
props = ['no', 'formula', 'name', 'cas_no',
'mol_wt', 'tfp', 'tb', 'tc',
'pc', 'zc', 'omega']
out_file.write('# Values in Table:')
out_file.write('\n')
for prop in props:
is_numeric_prop = not isinstance((globals()[prop])[0], str)
out_file.write(prop)
if is_numeric_prop:
for comp_no in table_indexes_of_comp_nos:
out_file.write(' | ' +
locale.str((globals()[prop])[comp_no])
)
else:
for comp_no in table_indexes_of_comp_nos:
out_file.write(' | ' +
(globals()[prop])[comp_no]
)
out_file.write('\n')
file = open(file_name_basic_constants_ii, mode='r', encoding='utf-8-sig')
matches = []
k = 0
out_file.write('# ======================')
out_file.write('\n')
for line in file:
k += 1
# print header
if k < 9:
out_file.write(line)
out_file.write('\n')
match = re.search(test_exp_basic_constants_ii, line)
if match is not None:
matches.append(match.groups())
file.close()
out_file.write('# Basic Constants II: ' + str(len(matches)) +
' Components in table')
out_file.write('\n')
no = [int(item[0]) for item in matches]
formula = [item[1] for item in matches]
name = [item[2] for item in matches]
cas_no = [item[3] for item in matches]
delHf0 = [float(item[4].replace(' ', ''))
if item[4] is not None
else 0.0
for item in matches] # kJ/mol
delGf0 = [float(item[5].replace(' ', ''))
if item[5] is not None
else 0.0
for item in matches] # kJ/mol
delHb = [float(item[6].replace(' ', ''))
if item[6] is not None
else 0.0
for item in matches] # kJ/mol
delHm = [float(item[7].replace(' ', ''))
if item[7] is not None
else 0.0
for item in matches] # kJ/mol
v_liq = [float(item[8].replace(' ', ''))
if item[8] is not None
else 0.0
for item in matches] # cm^3/mol
t_liq = [float(item[9].replace(' ', ''))
if item[9] is not None
else 0.0
for item in matches] # K
dipole = [float(item[10].replace(' ', ''))
if item[10] is not None
else 0.0
for item in matches] # Debye
table_indexes_of_comp_nos = [
no.index(comp_no) for comp_no in sel_component_numbers
]
out_file.write('# ======================')
out_file.write('\n')
props = ['no', 'formula', 'name', 'cas_no',
'delHf0', 'delGf0', 'delHb', 'delHm',
'v_liq', 't_liq', 'dipole']
out_file.write('# Values in Table:')
out_file.write('\n')
for prop in props:
is_numeric_prop = not isinstance((globals()[prop])[0], str)
out_file.write(prop)
if is_numeric_prop:
for comp_no in table_indexes_of_comp_nos:
out_file.write(' | ' +
locale.str((globals()[prop])[comp_no])
)
else:
for comp_no in table_indexes_of_comp_nos:
out_file.write(' | ' +
(globals()[prop])[comp_no]
)
out_file.write('\n')
file = open(file_name_ig_heat_capacities, mode='r', encoding='utf-8-sig')
matches = []
k = 0
out_file.write('# ======================')
out_file.write('\n')
for line in file:
k += 1
# print header
if k < 5:
out_file.write(line)
out_file.write('\n')
match = re.search(test_exp_ig_heat_capacities, line)
if match is not None:
matches.append(match.groups())
file.close()
out_file.write('# Ideal Gas and Liquid Heat Capacities: ' + str(len(matches)) +
' Components in table')
out_file.write('\n')
no = [int(item[0]) for item in matches]
formula = [item[1] for item in matches]
name = [item[2] for item in matches]
cas_no = [item[3] for item in matches]
trange = [item[4] for item in matches]
a0 = [float(item[5].replace(' ', ''))
if item[5] is not None
else 0.0
for item in matches]
a1 = [1e-3 * float(item[6].replace(' ', ''))
if item[6] is not None
else 0.0
for item in matches]
a2 = [1e-5 * float(item[7].replace(' ', ''))
if item[7] is not None
else 0.0
for item in matches]
a3 = [1e-8 * float(item[8].replace(' ', ''))
if item[8] is not None
else 0.0
for item in matches]
a4 = [1e-11 * float(item[9].replace(' ', ''))
if item[9] is not None
else 0.0
for item in matches]
cpig = [float(item[10].replace(' ', ''))
if item[10] is not None
else 0.0
for item in matches]
cpliq = [float(item[11].replace(' ', ''))
if item[11] is not None
else 0.0
for item in matches] # J/mol/K
cpig_test = [8.3145 * (
a0[i] + a1[i] * 298.15 + a2[i] * 298.15**2 +
a3[i] * 298.15**3 + a4[i] * 298.15**4
) for i in range(len(matches))] # J/mol/K
table_indexes_of_comp_nos = [
no.index(comp_no) for comp_no in sel_component_numbers
]
out_file.write('# ======================')
out_file.write('\n')
props = ['no', 'formula', 'name', 'cas_no',
'trange', 'a0', 'a1', 'a2',
'a3', 'a4', 'cpig', 'cpliq',
'cpig_test']
out_file.write('# Values in Table:')
out_file.write('\n')
for prop in props:
is_numeric_prop = not isinstance((globals()[prop])[0], str)
out_file.write(prop)
if is_numeric_prop:
for comp_no in table_indexes_of_comp_nos:
out_file.write(' | ' +
locale.str((globals()[prop])[comp_no])
)
else:
for comp_no in table_indexes_of_comp_nos:
out_file.write(' | ' +
(globals()[prop])[comp_no]
)
out_file.write('\n')
out_file.close()
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') # decimals to US
out_file = open(out_file_name, mode='r')
i = 0
for line in out_file:
if i > 1:
line_to_print = line.replace('\n', '')
if len(line_to_print) >= 1 \
and line_to_print[0] != "#" \
and line_to_print.count('|') > 1:
parts = [x.strip().replace(',', '.')
for x in line_to_print.split('|')]
prop = parts[0]
line_to_print = prop + ' = '
if prop not in [
'No.',
'no',
'formula',
'name',
'cas_no',
'trange']:
for i in range(1, len(parts)):
parts[i] = float(parts[i])
line_to_print += str_list_to_np_array_str(str(
parts[1:]).replace('[', '').replace(']', '')
)
print(line_to_print)
i += 1
out_file.close()
print('')
print('saved to ' + out_file_full_path)
| santiago-salas-v/walas | basic_constants_from_the_properties_of_gases_and_liquids.py | Python | mit | 10,361 | 0.002124 |
#/usr/bin/env python3
import webtools as wt
import os, crypt, cgitb
cgitb.enable()
modes = {"0": "no mode",
"1": "lock",
"2": "sticky",
"3": "stickylock",
"4": "permasage"
}
settings = "./settings.txt"
b_conf = []
cd = {}
with open(settings, "r") as settings:
settings = settings.read().splitlines()
for s in settings:
if len(s) == 0 or s[0] == "#" or ": " not in s:
continue
elif "#" in s:
s = s.split("#")[0]
s = s.split(": ")
if len(s) > 2:
s[1] = ": ".join(s[1:])
try:
s[1] = int(s[1])
except:
pass
b_conf.append(s[1])
cd[s[0]] = s[1]
with open("./admin/op.html", 'r') as op:
op = op.read()
def mode_icons(mo=""):
micons = ["", "lock.png", "sticky.png",
["lock.png", "sticky.png"], "ghost.png"]
ic = micons[int(mo)]
if len(ic) == 2:
ic = ["./img/" + i for i in ic if len(ic) == 2]
elif len(ic):
ic = ["./img/" + ic]
return ic
def login_admin():
# if wt.get_cookie():
cookies = wt.get_cookie()
if 'pw' in cookies.keys():
if tripcode(cookies['pw']) == b_conf[3]:
return 1
elif wt.get_form('pw') and \
tripcode(wt.get_form('pw')) == b_conf[3]:
print(wt.put_cookie('pw', wt.get_form('pw')))
return 1
else:
if wt.get_form('pw'):
print("Password incorrect.<br>")
print("<h1>Login</h1>")
print("<p>", wt.new_form('admin.py', 'post'))
print("#" + wt.put_form('password', 'pw'))
print(wt.put_form('submit', '', 'Submit'))
print("</form><p>")
return 0
def admin_splash():
print("""<pre>
- change settings
- moderate threads
- modify wordfilter
</pre>""")
if not wt.get_form('mode') or not wt.get_form('thread'):
print("<h2>Settings</h2>")
print("\n".join(["<br> - "+str(i) for i in b_conf]))
for s in cd.keys():
print("<p>",s + ":<br> ", cd[s])
print("<h2>Threads</h2>")
if wt.get_form('more'):
print("<a href='.'>Back</a><br>")
print(wt.get_form('thread'), "<hr>")
# print(load_thread(wt.get_form('thread')))
show_thread(load_thread(wt.get_form('thread')))
else:
mod_threads()
def mod_threads():
print("<pre>")
with open(b_conf[6]) as t_list:
print(t_list.read())
print("</pre>")
ti = thread_index()
for t in ti["ti"]:
# t = filename
# t[0] last reply time, t[1] thread title
# t[2] reply count, t[3] thread mode
mic = mode_icons(ti[t][3])
tm = [f"<img src='{m}'>" for m in mic]
if ti[t][3] in modes:
ti[t][3] = modes[ti[t][3]]
mk = list(modes.keys())
mv = [modes[i] for i in mk]
dropdown = wt.dropdown("mode", mk, mv)
ti[t][3] = dropdown.replace(f">{ti[t][3]}", \
f" selected>{ti[t][3]}")
print(op.format(t, ti[t], " ".join(tm)))
def thread_index():
with open(b_conf[6]) as t_list:
t_list = t_list.read().splitlines()
t = {}
t["ti"] = []
for th in t_list:
th = th.split(" >< ")
t["ti"].append(th[0])
t[th[0]] = th[1:]
return t
def load_thread(thr='0'):
# print(b_conf[5] + thr)
with open(b_conf[5] + thr, 'r') as thr:
thr = thr.read().splitlines()
for n, th in enumerate(thr):
thr[n] = th.split(' >< ')
return thr
def show_thread(thr=[]):
if not thr:
return None
table = ["<table>"]
table.append("<tr><td><td>Name<td>Date<td>Comment")
print("<tr><th colspan='4'>", thr.pop(0)[0])
for n, t in enumerate(thr):
t.pop(2)
t = f"<tr><td>{n+1}.<td>" + "<td>".join(t)
table.append(t)
print("\n".join(table), "</table>")
def tripcode(pw):
pw = pw[:8]
salt = (pw + "H..")[1:3]
trip = crypt.crypt(pw, salt)
return (trip[-10:])
def main():
print(wt.head(b_conf[0]))
print("<h2>", b_conf[0], "admin</h2>")
# print(wt.get_cookie())
if login_admin() == 1:
admin_splash()
main()
| 153/wbs | admin.py | Python | cc0-1.0 | 4,183 | 0.005259 |
# from cryptography import *
from salmon import MagicSig
from crypt import strip_whitespaces, b64_to_num, b64_to_str, b64encode, b64decode, generate_rsa_key, export_rsa_key
from activitystreams import salmon, salmon1, salmon2, salmon3
from webfinger import WebfingerClient
from convert import str_to_datetime, datetime_to_rfc3339
| bijanebrahimi/pystatus | pystatus/libs/__init__.py | Python | gpl-3.0 | 331 | 0.003021 |
# This file is part of Maker Keeper Framework.
#
# Copyright (C) 2017-2018 reverendus
# Copyright (C) 2018 bargst
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import math
from functools import total_ordering, reduce
from decimal import *
_context = Context(prec=1000, rounding=ROUND_DOWN)
@total_ordering
class Wad:
"""Represents a number with 18 decimal places.
`Wad` implements comparison, addition, subtraction, multiplication and division operators. Comparison, addition,
subtraction and division only work with other instances of `Wad`. Multiplication works with instances
of `Wad` and `Ray` and also with `int` numbers. The result of multiplication is always a `Wad`.
`Wad`, along with `Ray`, are the two basic numeric types used by Maker contracts.
Notes:
The internal representation of `Wad` is an unbounded integer, the last 18 digits of it being treated
as decimal places. It is similar to the representation used in Maker contracts (`uint128`).
"""
def __init__(self, value):
"""Creates a new Wad number.
Args:
value: an instance of `Wad`, `Ray` or an integer. In case of an integer, the internal representation
of Maker contracts is used which means that passing `1` will create an instance of `Wad`
with a value of `0.000000000000000001'.
"""
if isinstance(value, Wad):
self.value = value.value
elif isinstance(value, Ray):
self.value = int((Decimal(value.value) // (Decimal(10)**Decimal(9))).quantize(1, context=_context))
elif isinstance(value, Rad):
self.value = int((Decimal(value.value) // (Decimal(10)**Decimal(27))).quantize(1, context=_context))
elif isinstance(value, int):
# assert(value >= 0)
self.value = value
else:
raise ArithmeticError
@classmethod
def from_number(cls, number):
# assert(number >= 0)
pwr = Decimal(10) ** 18
dec = Decimal(str(number)) * pwr
return Wad(int(dec.quantize(1, context=_context)))
def __repr__(self):
return "Wad(" + str(self.value) + ")"
def __str__(self):
tmp = str(self.value).zfill(19)
return (tmp[0:len(tmp)-18] + "." + tmp[len(tmp)-18:len(tmp)]).replace("-.", "-0.")
def __add__(self, other):
if isinstance(other, Wad):
return Wad(self.value + other.value)
else:
raise ArithmeticError
def __sub__(self, other):
if isinstance(other, Wad):
return Wad(self.value - other.value)
else:
raise ArithmeticError
def __mod__(self, other):
if isinstance(other, Wad):
return Wad(self.value % other.value)
else:
raise ArithmeticError
# z = cast((uint256(x) * y + WAD / 2) / WAD);
def __mul__(self, other):
if isinstance(other, Wad):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(18))
return Wad(int(result.quantize(1, context=_context)))
elif isinstance(other, Ray):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(27))
return Wad(int(result.quantize(1, context=_context)))
elif isinstance(other, Rad):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(45))
return Wad(int(result.quantize(1, context=_context)))
elif isinstance(other, int):
return Wad(int((Decimal(self.value) * Decimal(other)).quantize(1, context=_context)))
else:
raise ArithmeticError
def __truediv__(self, other):
if isinstance(other, Wad):
return Wad(int((Decimal(self.value) * (Decimal(10) ** Decimal(18)) / Decimal(other.value)).quantize(1, context=_context)))
else:
raise ArithmeticError
def __abs__(self):
return Wad(abs(self.value))
def __eq__(self, other):
if isinstance(other, Wad):
return self.value == other.value
else:
raise ArithmeticError
def __hash__(self):
return hash(self.value)
def __lt__(self, other):
if isinstance(other, Wad):
return self.value < other.value
else:
raise ArithmeticError
def __int__(self):
return int(self.value / 10**18)
def __float__(self):
return self.value / 10**18
def __round__(self, ndigits: int = 0):
return Wad(round(self.value, -18 + ndigits))
def __sqrt__(self):
return Wad.from_number(math.sqrt(self.__float__()))
@staticmethod
def min(*args):
"""Returns the lower of the Wad values"""
return reduce(lambda x, y: x if x < y else y, args[1:], args[0])
@staticmethod
def max(*args):
"""Returns the higher of the Wad values"""
return reduce(lambda x, y: x if x > y else y, args[1:], args[0])
@total_ordering
class Ray:
"""Represents a number with 27 decimal places.
`Ray` implements comparison, addition, subtraction, multiplication and division operators. Comparison, addition,
subtraction and division only work with other instances of `Ray`. Multiplication works with instances
of `Ray` and `Wad` and also with `int` numbers. The result of multiplication is always a `Ray`.
`Ray`, along with `Wad`, are the two basic numeric types used by Maker contracts.
Notes:
The internal representation of `Ray` is an unbounded integer, the last 27 digits of it being treated
as decimal places. It is similar to the representation used in Maker contracts (`uint128`).
"""
def __init__(self, value):
"""Creates a new Ray number.
Args:
value: an instance of `Ray`, `Wad` or an integer. In case of an integer, the internal representation
of Maker contracts is used which means that passing `1` will create an instance of `Ray`
with a value of `0.000000000000000000000000001'.
"""
if isinstance(value, Ray):
self.value = value.value
elif isinstance(value, Wad):
self.value = int((Decimal(value.value) * (Decimal(10)**Decimal(9))).quantize(1, context=_context))
elif isinstance(value, Rad):
self.value = int((Decimal(value.value) / (Decimal(10)**Decimal(18))).quantize(1, context=_context))
elif isinstance(value, int):
# assert(value >= 0)
self.value = value
else:
raise ArithmeticError
@classmethod
def from_number(cls, number):
# assert(number >= 0)
pwr = Decimal(10) ** 27
dec = Decimal(str(number)) * pwr
return Ray(int(dec.quantize(1, context=_context)))
def __repr__(self):
return "Ray(" + str(self.value) + ")"
def __str__(self):
tmp = str(self.value).zfill(28)
return (tmp[0:len(tmp)-27] + "." + tmp[len(tmp)-27:len(tmp)]).replace("-.", "-0.")
def __add__(self, other):
if isinstance(other, Ray):
return Ray(self.value + other.value)
else:
raise ArithmeticError
def __sub__(self, other):
if isinstance(other, Ray):
return Ray(self.value - other.value)
else:
raise ArithmeticError
def __mod__(self, other):
if isinstance(other, Ray):
return Ray(self.value % other.value)
else:
raise ArithmeticError
def __mul__(self, other):
if isinstance(other, Ray):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(27))
return Ray(int(result.quantize(1, context=_context)))
elif isinstance(other, Wad):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(18))
return Ray(int(result.quantize(1, context=_context)))
elif isinstance(other, Rad):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(45))
return Ray(int(result.quantize(1, context=_context)))
elif isinstance(other, int):
return Ray(int((Decimal(self.value) * Decimal(other)).quantize(1, context=_context)))
else:
raise ArithmeticError
def __truediv__(self, other):
if isinstance(other, Ray):
return Ray(int((Decimal(self.value) * (Decimal(10) ** Decimal(27)) / Decimal(other.value)).quantize(1, context=_context)))
else:
raise ArithmeticError
def __abs__(self):
return Ray(abs(self.value))
def __eq__(self, other):
if isinstance(other, Ray):
return self.value == other.value
else:
raise ArithmeticError
def __hash__(self):
return hash(self.value)
def __lt__(self, other):
if isinstance(other, Ray):
return self.value < other.value
else:
raise ArithmeticError
def __int__(self):
return int(self.value / 10**27)
def __float__(self):
return self.value / 10**27
def __round__(self, ndigits: int = 0):
return Ray(round(self.value, -27 + ndigits))
def __sqrt__(self):
return Ray.from_number(math.sqrt(self.__float__()))
@staticmethod
def min(*args):
"""Returns the lower of the Ray values"""
return reduce(lambda x, y: x if x < y else y, args[1:], args[0])
@staticmethod
def max(*args):
"""Returns the higher of the Ray values"""
return reduce(lambda x, y: x if x > y else y, args[1:], args[0])
@total_ordering
class Rad:
"""Represents a number with 45 decimal places.
`Rad` implements comparison, addition, subtraction, multiplication and division operators. Comparison, addition,
subtraction and division only work with other instances of `Rad`. Multiplication works with instances
of `Rad`, `Ray and `Wad` and also with `int` numbers. The result of multiplication is always a `Rad`.
`Rad` is rad is a new unit that exists to prevent precision loss in the core CDP engine of MCD.
Notes:
The internal representation of `Rad` is an unbounded integer, the last 45 digits of it being treated
as decimal places.
"""
def __init__(self, value):
"""Creates a new Rad number.
Args:
value: an instance of `Rad`, `Ray`, `Wad` or an integer. In case of an integer, the internal representation
of Maker contracts is used which means that passing `1` will create an instance of `Rad`
with a value of `0.000000000000000000000000000000000000000000001'.
"""
if isinstance(value, Rad):
self.value = value.value
elif isinstance(value, Ray):
self.value = int((Decimal(value.value) * (Decimal(10)**Decimal(18))).quantize(1, context=_context))
elif isinstance(value, Wad):
self.value = int((Decimal(value.value) * (Decimal(10)**Decimal(27))).quantize(1, context=_context))
elif isinstance(value, int):
# assert(value >= 0)
self.value = value
else:
raise ArithmeticError
@classmethod
def from_number(cls, number):
# assert(number >= 0)
pwr = Decimal(10) ** 45
dec = Decimal(str(number)) * pwr
return Rad(int(dec.quantize(1, context=_context)))
def __repr__(self):
return "Rad(" + str(self.value) + ")"
def __str__(self):
tmp = str(self.value).zfill(46)
return (tmp[0:len(tmp)-45] + "." + tmp[len(tmp)-45:len(tmp)]).replace("-.", "-0.")
def __add__(self, other):
if isinstance(other, Rad):
return Rad(self.value + other.value)
else:
raise ArithmeticError
def __sub__(self, other):
if isinstance(other, Rad):
return Rad(self.value - other.value)
else:
raise ArithmeticError
def __mod__(self, other):
if isinstance(other, Rad):
return Rad(self.value % other.value)
else:
raise ArithmeticError
def __mul__(self, other):
if isinstance(other, Rad):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(45))
return Rad(int(result.quantize(1, context=_context)))
elif isinstance(other, Ray):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(27))
return Rad(int(result.quantize(1, context=_context)))
elif isinstance(other, Wad):
result = Decimal(self.value) * Decimal(other.value) / (Decimal(10) ** Decimal(18))
return Rad(int(result.quantize(1, context=_context)))
elif isinstance(other, int):
return Rad(int((Decimal(self.value) * Decimal(other)).quantize(1, context=_context)))
else:
raise ArithmeticError
def __truediv__(self, other):
if isinstance(other, Rad):
return Rad(int((Decimal(self.value) * (Decimal(10) ** Decimal(45)) / Decimal(other.value)).quantize(1, context=_context)))
else:
raise ArithmeticError
def __abs__(self):
return Rad(abs(self.value))
def __eq__(self, other):
if isinstance(other, Rad):
return self.value == other.value
else:
raise ArithmeticError
def __hash__(self):
return hash(self.value)
def __lt__(self, other):
if isinstance(other, Rad):
return self.value < other.value
else:
raise ArithmeticError
def __int__(self):
return int(self.value / 10**45)
def __float__(self):
return self.value / 10**45
def __round__(self, ndigits: int = 0):
return Rad(round(self.value, -45 + ndigits))
def __sqrt__(self):
return Rad.from_number(math.sqrt(self.__float__()))
@staticmethod
def min(*args):
"""Returns the lower of the Rad values"""
return reduce(lambda x, y: x if x < y else y, args[1:], args[0])
@staticmethod
def max(*args):
"""Returns the higher of the Rad values"""
return reduce(lambda x, y: x if x > y else y, args[1:], args[0])
| makerdao/keeper | pymaker/numeric.py | Python | agpl-3.0 | 14,935 | 0.003348 |
#!/usr/bin/python
'''Scrape a website using urllib2 (A library for pinging URLs) and BeautifulSoup (A library for parsing HTML)'''
from bs4 import BeautifulSoup
import urllib2
import time
import sys
import socket
start_time = time.time()
#Open files
rfile = open("input.csv","r").read().splitlines()
wfile = open("translations.csv","w")
sepComma = ","
newline = "\n"
counter = 0
tcounter = 0
#Start processing
for pEword in rfile:
retry = 0
# print pEword
while True:
try:
counter += 1
tcounter += 1
url = "http://www.example.com/"
print url
req = urllib2.Request(url)
req.add_header("Connection", "keep-alive")
req.add_header("User-Agent", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0")
# req.add_header("Accept-Encoding", "gzip")
req.add_header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
# req.add_header("Cache-Control", "no-cache")
req.add_header("Accept-Language", "en-US,en;q=0.8")
req.add_header("Host", "filesdownloader.com")
# req.add_header("If-Modified-Since", "Thu, 30 Jan 2014 17:24:29 GMT")
# req.add_header("Cache-Control", "max-age=0")
# req.add_header("If-Modified-Since", "Fri, 31 Jan 2014 21:52:35 GMT")
req.add_header("Cookie", "s=6a11e5h6sald4faibrkcp5bm85; __unam=7639673-143e40de47e-4218148d-4; __utma=127728666.207454719.1391100551.1391208734.1391434591.6; __utmb=127728666.2.10.1391434591; __utmc=127728666; __utmz=127728666.1391197253.2.2.utmcsr=prx.centrump2p.com|utmccn=(referral)|utmcmd=referral|utmcct=/english/")
page = urllib2.urlopen(req,timeout=4)
except urllib2.HTTPError, e:
if retry > 2:
raise e
print e.code
retry += 1
time.sleep(10)
except urllib2.URLError, e:
if retry > 2:
raise e
print e.args
retry += 1
time.sleep(10)
except socket.timeout as e:
if retry > 2:
raise e
print "Request timed out!"
retry += 1
time.sleep(10)
except:
etype, value, tb = sys.exc_info()
response = "%s" % value.message
print etype,response,tb
raise
else:
soup = BeautifulSoup(page.read())
orderedlists = soup.find_all("ol", class_="eirol")
wfile.write(pEword);wfile.write(sepComma)
#Looping <li> tags
for thelist in orderedlists:
for listitems in thelist:
pHword = listitems.next_element.next_sibling.string.encode('utf-8')
print pHword
wfile.write(pHword);wfile.write(sepComma);
# print pHword
wfile.write(newline)
# if counter > 2:
# time.sleep(3)
# counter = 0
if tcounter/1000 in range(15) and tcounter%1000 == 0:
print "{words} words completed".format(words = tcounter)
# if tcounter%300 == 0:
# print "Waiting for 10 mins"
# time.sleep(600)
break
wfile.close()
print time.time() - start_time, "seconds"
print "Successfully created dictionary."
| joelthe1/web-scraping | scrape-website-example-1.py | Python | mit | 3,431 | 0.01195 |
"""Support for Modbus covers."""
from __future__ import annotations
from datetime import timedelta
from typing import Any
from pymodbus.exceptions import ConnectionException, ModbusException
from pymodbus.pdu import ExceptionResponse
from homeassistant.components.cover import SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity
from homeassistant.const import (
CONF_COVERS,
CONF_DEVICE_CLASS,
CONF_NAME,
CONF_SCAN_INTERVAL,
CONF_SLAVE,
)
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import (
ConfigType,
DiscoveryInfoType,
HomeAssistantType,
)
from .const import (
CALL_TYPE_COIL,
CALL_TYPE_REGISTER_HOLDING,
CALL_TYPE_REGISTER_INPUT,
CONF_REGISTER,
CONF_STATE_CLOSED,
CONF_STATE_CLOSING,
CONF_STATE_OPEN,
CONF_STATE_OPENING,
CONF_STATUS_REGISTER,
CONF_STATUS_REGISTER_TYPE,
MODBUS_DOMAIN,
)
from .modbus import ModbusHub
async def async_setup_platform(
hass: HomeAssistantType,
config: ConfigType,
async_add_entities,
discovery_info: DiscoveryInfoType | None = None,
):
"""Read configuration and create Modbus cover."""
if discovery_info is None:
return
covers = []
for cover in discovery_info[CONF_COVERS]:
hub: ModbusHub = hass.data[MODBUS_DOMAIN][discovery_info[CONF_NAME]]
covers.append(ModbusCover(hub, cover))
async_add_entities(covers)
class ModbusCover(CoverEntity, RestoreEntity):
"""Representation of a Modbus cover."""
def __init__(
self,
hub: ModbusHub,
config: dict[str, Any],
):
"""Initialize the modbus cover."""
self._hub: ModbusHub = hub
self._coil = config.get(CALL_TYPE_COIL)
self._device_class = config.get(CONF_DEVICE_CLASS)
self._name = config[CONF_NAME]
self._register = config.get(CONF_REGISTER)
self._slave = config.get(CONF_SLAVE)
self._state_closed = config[CONF_STATE_CLOSED]
self._state_closing = config[CONF_STATE_CLOSING]
self._state_open = config[CONF_STATE_OPEN]
self._state_opening = config[CONF_STATE_OPENING]
self._status_register = config.get(CONF_STATUS_REGISTER)
self._status_register_type = config[CONF_STATUS_REGISTER_TYPE]
self._scan_interval = timedelta(seconds=config[CONF_SCAN_INTERVAL])
self._value = None
self._available = True
# If we read cover status from coil, and not from optional status register,
# we interpret boolean value False as closed cover, and value True as open cover.
# Intermediate states are not supported in such a setup.
if self._coil is not None and self._status_register is None:
self._state_closed = False
self._state_open = True
self._state_closing = None
self._state_opening = None
# If we read cover status from the main register (i.e., an optional
# status register is not specified), we need to make sure the register_type
# is set to "holding".
if self._register is not None and self._status_register is None:
self._status_register = self._register
self._status_register_type = CALL_TYPE_REGISTER_HOLDING
async def async_added_to_hass(self):
"""Handle entity which will be added."""
state = await self.async_get_last_state()
if not state:
return
self._value = state.state
async_track_time_interval(
self.hass, lambda arg: self._update(), self._scan_interval
)
@property
def device_class(self) -> str | None:
"""Return the device class of the sensor."""
return self._device_class
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self._value == self._state_opening
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self._value == self._state_closing
@property
def is_closed(self):
"""Return if the cover is closed or not."""
return self._value == self._state_closed
@property
def should_poll(self):
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
# Handle polling directly in this entity
return False
def open_cover(self, **kwargs: Any) -> None:
"""Open cover."""
if self._coil is not None:
self._write_coil(True)
else:
self._write_register(self._state_open)
self._update()
def close_cover(self, **kwargs: Any) -> None:
"""Close cover."""
if self._coil is not None:
self._write_coil(False)
else:
self._write_register(self._state_closed)
self._update()
def _update(self):
"""Update the state of the cover."""
if self._coil is not None and self._status_register is None:
self._value = self._read_coil()
else:
self._value = self._read_status_register()
self.schedule_update_ha_state()
def _read_status_register(self) -> int | None:
"""Read status register using the Modbus hub slave."""
try:
if self._status_register_type == CALL_TYPE_REGISTER_INPUT:
result = self._hub.read_input_registers(
self._slave, self._status_register, 1
)
else:
result = self._hub.read_holding_registers(
self._slave, self._status_register, 1
)
except ConnectionException:
self._available = False
return
if isinstance(result, (ModbusException, ExceptionResponse)):
self._available = False
return
value = int(result.registers[0])
self._available = True
return value
def _write_register(self, value):
"""Write holding register using the Modbus hub slave."""
try:
self._hub.write_register(self._slave, self._register, value)
except ConnectionException:
self._available = False
return
self._available = True
def _read_coil(self) -> bool | None:
"""Read coil using the Modbus hub slave."""
try:
result = self._hub.read_coils(self._slave, self._coil, 1)
except ConnectionException:
self._available = False
return
if isinstance(result, (ModbusException, ExceptionResponse)):
self._available = False
return
value = bool(result.bits[0] & 1)
self._available = True
return value
def _write_coil(self, value):
"""Write coil using the Modbus hub slave."""
try:
self._hub.write_coil(self._slave, self._coil, value)
except ConnectionException:
self._available = False
return
self._available = True
| w1ll1am23/home-assistant | homeassistant/components/modbus/cover.py | Python | apache-2.0 | 7,501 | 0.000533 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data related functions for packing emnist dataset.
parse_data: extract client data from emnist and add ids to tuples of numpy
arrays.
pack_dataset: pack the numpy arrays into tf.data.Dataset.
"""
import numpy as np
import tensorflow as tf
NUM_EMNIST_CLASSES = 62
MAX_DATA_SIZE = 700000
SPLIT_SIZE = 0.1
SHUFFLE_SIZE = 10000
PARSE_DATA_BATCH_SIZE = 256
def pack_dataset(data_tuple, mode, batch_size=256, with_dist=False):
"""Packs the arrays into tf.data.Dataset.
Args:
data_tuple: tuples of numpy array return from parse_data() as inputs.
It follows the orders:
For with_dist is True:
Input images, client ids, label distributions, labels
For with_dist is False:
Input images, client ids, labels
mode: training mode of test mode.
batch_size: batch size for the dataset.
with_dist: using label distributions as inputs.
Returns:
A tf.data.Dataset
"""
if with_dist:
x, idx, dist, y = data_tuple
dataset = tf.data.Dataset.from_tensor_slices(({
'input_x': x,
'input_id': idx,
'input_dist': dist,
}, y))
else:
x, idx, y = data_tuple
dataset = tf.data.Dataset.from_tensor_slices(({
'input_x': x,
'input_id': idx
}, y))
if mode == 'train':
dataset = dataset.shuffle(SHUFFLE_SIZE)
dataset = dataset.batch(batch_size, drop_remainder=True)
return dataset
def count_dataset(dataset):
cnt = 0
for _ in iter(dataset):
cnt = cnt + 1
return int(cnt)
def get_local_y_dist(client_dataset):
dist = np.zeros((1, NUM_EMNIST_CLASSES))
for x in client_dataset:
y = x['label'].numpy()
dist[y] += 1
return np.array(dist).reshape((1, -1)) / np.sum(dist)
def parse_data(emnist_train,
emnist_test,
client_ids,
cliend_encodings,
with_dist=False):
"""Packs the client dataset into tuples of arrays with client ids.
Args:
emnist_train: the tff clientdata object of the training sets.
emnist_test: the tff clientdata object of the test sets.
client_ids: client ids to extract.
cliend_encodings: a dictionary encoding client string id to number.
with_dist: using label distributions as inputs or not.
Returns:
Three tuples of numpy arrays:
The training set for fine-tuning, the smaller split of the training set,
the test set, each is a tuple of the following np.array:
Input images, input ids, label distributions, labels if with_dist is True
Input images, input ids, labels if with_dist is False
"""
def merge_clients(emnist, split_size=1):
# Cache in the memory for faster training iterations
train_x, train_id, train_y = np.zeros((MAX_DATA_SIZE, 28, 28, 1)), np.zeros(
(MAX_DATA_SIZE)), np.zeros((MAX_DATA_SIZE))
cnt = 0
if with_dist:
train_dist = np.zeros((MAX_DATA_SIZE, NUM_EMNIST_CLASSES))
client_num_list = []
for client_id in client_ids:
ds = emnist.create_tf_dataset_for_client(client_id)
client_id = cliend_encodings[client_id]
ds_np = ds.batch(PARSE_DATA_BATCH_SIZE)
if with_dist:
y_dist = get_local_y_dist(ds)
client_num = 0
for x in ds_np:
y = x['label']
x = tf.expand_dims(x['pixels'], axis=-1)
if split_size < 1:
split_num = int(len(y)*split_size)
ids = np.random.choice(np.arange(len(y)), split_num)
y = tf.gather(y, ids)
x = tf.gather(x, ids)
num = len(y)
idx = np.array([client_id]*num)
train_x[cnt:cnt+num] = x
train_y[cnt:cnt+num] = y
train_id[cnt:cnt+num] = idx
if with_dist:
train_dist[cnt:cnt+num] = np.tile(y_dist, [num, 1])
cnt += num
client_num += num
client_num_list.append(client_num)
train_x = train_x[:cnt]
train_y = train_y[:cnt]
train_id = train_id[:cnt]
if with_dist:
train_dist = train_dist[:cnt]
return train_x, train_id, train_dist, train_y
else:
return train_x, train_id, train_y
return merge_clients(emnist_train), merge_clients(
emnist_train, split_size=SPLIT_SIZE), merge_clients(emnist_test)
| google-research/google-research | basisnet/personalization/centralized_emnist/data_processing.py | Python | apache-2.0 | 4,786 | 0.006686 |
from __future__ import absolute_import
import torch
import torch.nn.functional as F
from torch import nn, autograd
class OIM(autograd.Function):
def __init__(self, lut, momentum=0.5):
super(OIM, self).__init__()
self.lut = lut
self.momentum = momentum
def forward(self, inputs, targets):
self.save_for_backward(inputs, targets)
outputs = inputs.mm(self.lut.t())
return outputs
def backward(self, grad_outputs):
inputs, targets = self.saved_tensors
grad_inputs = None
if self.needs_input_grad[0]:
grad_inputs = grad_outputs.mm(self.lut)
for x, y in zip(inputs, targets):
self.lut[y] = self.momentum * self.lut[y] + (1. - self.momentum) * x
self.lut[y] /= self.lut[y].norm()
return grad_inputs, None
def oim(inputs, targets, lut, momentum=0.5):
return OIM(lut, momentum=momentum)(inputs, targets)
class OIMLoss(nn.Module):
def __init__(self, num_features, num_classes, scalar=1.0, momentum=0.5,
weight=None, size_average=True):
super(OIMLoss, self).__init__()
self.num_features = num_features
self.num_classes = num_classes
self.momentum = momentum
self.scalar = scalar
self.weight = weight
self.size_average = size_average
self.register_buffer('lut', torch.zeros(num_classes, num_features))
def forward(self, inputs, targets):
inputs = oim(inputs, targets, self.lut, momentum=self.momentum)
inputs *= self.scalar
loss = F.cross_entropy(inputs, targets, weight=self.weight,
size_average=self.size_average)
return loss, inputs
| dapengchen123/code_v1 | reid/loss/oim.py | Python | mit | 1,727 | 0.000579 |
#!/usr/bin/env python
# For python 2.6-2.7
from __future__ import print_function
from os.path import *
import re
# from parseBrackets import parseBrackets
from parseDirectiveArgs import parseDirectiveArguments
class MyError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
assertVariants = 'Fail|Equal|True|False|LessThan|LessThanOrEqual|GreaterThan|GreaterThanOrEqual'
assertVariants += '|IsMemberOf|Contains|Any|All|NotAll|None|IsPermutationOf'
assertVariants += '|ExceptionRaised|SameShape|IsNaN|IsFinite'
def cppSetLineAndFile(line, file):
return "#line " + str(line) + ' "' + file + '"\n'
def getSubroutineName(line):
try:
m = re.match('\s*subroutine\s+(\w*)\s*(\\([\w\s,]*\\))?\s*(!.*)*$', line, re.IGNORECASE)
return m.groups()[0]
except:
raise MyError('Improper format in declaration of test procedure.')
def parseArgsFirstRest(directiveName,line):
"""If the @-directive has more than one argument, parse into first and rest strings.
Added for assertAssociated.
"""
if directiveName != '':
m = re.match('\s*'+directiveName+'\s*\\((.*\w.*)\\)\s*$',line,re.IGNORECASE)
if m:
argStr = m.groups()[0]
else:
return None
else:
argStr = line
args = parseDirectiveArguments(argStr)
if args == []:
returnArgs = None
elif len(args) == 1:
returnArgs = [args[0]]
else:
returnArgs = [args[0],','.join(args[1:])]
return returnArgs
def parseArgsFirstSecondRest(directiveName,line):
"""If the @-directive must have at least two arguments, parse into first, second,
and rest strings. Added for assertAssociated.
"""
args1 = parseArgsFirstRest(directiveName,line)
returnArgs = None
if args1 != None:
if len(args1) == 1:
returnArgs = args1
elif len(args1) == 2:
args2 = parseArgsFirstRest('',args1[1])
returnArgs = [args1[0]] + args2
elif len(args1) == 3:
print(-999,'parseArgsFirstSecondRest::error!')
returnArgs = None
return returnArgs
def getSelfObjectName(line):
m = re.match('\s*subroutine\s+\w*\s*\\(\s*(\w+)\s*(,\s*\w+\s*)*\\)\s*$', line, re.IGNORECASE)
if m:
return m.groups()[0]
else:
return m
def getTypeName(line):
m = re.match('\s*type(.*::\s*|\s+)(\w*)\s*$', line, re.IGNORECASE)
return m.groups()[1]
class Action():
def apply(self, line):
m = self.match(line)
if m: self.action(m, line)
return m
class AtTest(Action):
def __init__(self, parser):
self.parser = parser
self.keyword = '@test'
def match(self, line):
m = re.match('\s*'+self.keyword+'(\s*(\\(.*\\))?\s*$)', line, re.IGNORECASE)
return m
def action(self, m, line):
options = re.match('\s*'+self.keyword+'\s*\\((.*)\\)\s*$', line, re.IGNORECASE)
method = {}
if options:
npesOption = re.search('npes\s*=\s*\\[([0-9,\s]+)\\]', options.groups()[0], re.IGNORECASE)
if npesOption:
npesString = npesOption.groups()[0]
npes = map(int, npesString.split(','))
method['npRequests'] = npes
#ifdef is optional
matchIfdef = re.match('.*ifdef\s*=\s*(\w+)', options.groups()[0], re.IGNORECASE)
if matchIfdef:
ifdef = matchIfdef.groups()[0]
method['ifdef'] = ifdef
matchIfndef = re.match('.*ifndef\s*=\s*(\w+)', options.groups()[0], re.IGNORECASE)
if matchIfndef:
ifndef = matchIfndef.groups()[0]
method['ifndef'] = ifndef
matchType = re.match('.*type\s*=\s*(\w+)', options.groups()[0], re.IGNORECASE)
if matchType:
print ('Type', matchType.groups()[0])
method['type'] = matchType.groups()[0]
paramOption = re.search('testParameters\s*=\s*[{](.*)[}]', options.groups()[0], re.IGNORECASE)
if paramOption:
paramExpr = paramOption.groups()[0]
method['testParameters'] = paramExpr
casesOption = re.search('cases\s*=\s*(\\[[0-9,\s]+\\])', options.groups()[0], re.IGNORECASE)
if casesOption:
method['cases'] = casesOption.groups()[0]
nextLine = self.parser.nextLine()
method['name'] = getSubroutineName(nextLine)
# save "self" name for use with @mpiAssert
self.parser.currentSelfObjectName = getSelfObjectName(nextLine)
# save "self" name for use with @mpiAssert
dummyArgument = getSelfObjectName(nextLine)
if dummyArgument:
method['selfObjectName'] = dummyArgument
self.parser.userTestMethods.append(method)
self.parser.commentLine(line)
self.parser.outputFile.write(nextLine)
# deprecated - should now just use @test
class AtMpiTest(AtTest):
def __init__(self, parser):
self.parser = parser
self.keyword = '@mpitest'
class AtTestCase(Action):
def __init__(self, parser):
self.parser = parser
def match(self, line):
m = re.match('\s*@testcase\s*(|\\(.*\\))\s*$', line, re.IGNORECASE)
return m
def action(self, m, line):
options = re.match('\s*@testcase\s*\\((.*)\\)\s*$', line, re.IGNORECASE)
if options:
value = re.search('constructor\s*=\s*(\w*)', options.groups()[0], re.IGNORECASE)
if value:
self.parser.userTestCase['constructor'] = value.groups()[0]
value = re.search('npes\s*=\s*\\[([0-9,\s]+)\\]', options.groups()[0], re.IGNORECASE)
if value:
npesString = value.groups()[0]
npes = map(int,npesString.split(','))
self.parser.userTestCase['npRequests'] = npes
value = re.search('cases\s*=\s*(\\[[0-9,\s]+\\])', options.groups()[0], re.IGNORECASE)
if value:
cases = value.groups()[0]
self.parser.userTestCase['cases'] = cases
value = re.search('testParameters\s*=\s*[{](.*)[}]', options.groups()[0], re.IGNORECASE)
if value:
paramExpr = value.groups()[0]
self.parser.userTestCase['testParameters'] = paramExpr
nextLine = self.parser.nextLine()
self.parser.userTestCase['type']=getTypeName(nextLine)
self.parser.commentLine(line)
self.parser.outputFile.write(nextLine)
class AtSuite(Action):
def __init__(self, parser):
self.parser = parser
def match(self, line):
nameRe = "'\w+'|" + """\w+"""
m = re.match("\s*@suite\s*\\(\s*name\s*=\s*("+nameRe+")\s*\\)\s*$", line, re.IGNORECASE)
return m
def action(self, m, line):
self.parser.suiteName=m.groups()[0][1:-1]
self.parser.wrapModuleName = 'Wrap' + self.parser.suiteName
class AtBegin(Action):
def __init__(self, parser):
self.parser = parser
def match(self, line):
m = re.match('\s*module\s+(\w*)\s*$', line, re.IGNORECASE)
return m
def action(self, m, line):
self.parser.userModuleName = m.groups()[0]
self.parser.wrapModuleName = 'Wrap' + self.parser.userModuleName
if not self.parser.suiteName:
self.parser.suiteName = self.parser.userModuleName + "_suite"
self.parser.outputFile.write(line)
class AtAssert(Action):
def __init__(self, parser):
self.parser = parser
def match(self, line):
m = re.match('\s*@assert('+assertVariants+')\s*\\((.*\w.*)\\)\s*$', line, re.IGNORECASE)
return m
def appendSourceLocation(self, fileHandle, fileName, lineNumber):
fileHandle.write(" & location=SourceLocation( &\n")
fileHandle.write(" & '" + str(basename(fileName)) + "', &\n")
fileHandle.write(" & " + str(lineNumber) + ")")
def action(self, m, line):
p = self.parser
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber, p.fileName))
p.outputFile.write(" call assert"+m.groups()[0]+"(" + m.groups()[1] + ", &\n")
self.appendSourceLocation(p.outputFile, p.fileName, p.currentLineNumber)
p.outputFile.write(" )\n")
p.outputFile.write(" if (anyExceptions()) return\n")
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber+1, p.fileName))
class AtAssertAssociated(Action):
def __init__(self,parser):
self.parser = parser
def match(self, line):
m = re.match('\s*@assertassociated\s*\\((.*\w.*)\\)\s*$', line, re.IGNORECASE)
if not m:
m = re.match( \
'\s*@assertassociated\s*\\((\s*([^,]*\w.*),\s*([^,]*\w.*),(.*\w*.*))\\)\s*$', \
line, re.IGNORECASE)
# How to get both (a,b) and (a,b,c) to match?
if not m:
m = re.match( \
'\s*@assertassociated\s*\\((\s*([^,]*\w.*),\s*([^,]*\w.*))\\)\s*$', \
line, re.IGNORECASE)
return m
def appendSourceLocation(self, fileHandle, fileName, lineNumber):
fileHandle.write(" & location=SourceLocation( &\n")
fileHandle.write(" & '" + str(basename(fileName)) + "', &\n")
fileHandle.write(" & " + str(lineNumber) + ")")
def action(self, m, line):
p = self.parser
# args = parseArgsFirstRest('@assertassociated',line)
args = parseArgsFirstSecondRest('@assertassociated',line)
# print(9000,line)
# print(9001,args)
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber, p.fileName))
if len(args) > 1:
if re.match('.*message=.*',args[1],re.IGNORECASE):
p.outputFile.write(" call assertTrue(associated(" + args[0] + "), " + args[1] + ", &\n")
elif len(args) > 2:
p.outputFile.write(" call assertTrue(associated(" + args[0] + "," + args[1] + "), " + args[2] + ", &\n")
else:
p.outputFile.write(" call assertTrue(associated(" + args[0] + "," + args[1] + "), &\n")
else:
p.outputFile.write(" call assertTrue(associated(" + args[0] + "), &\n")
self.appendSourceLocation(p.outputFile, p.fileName, p.currentLineNumber)
p.outputFile.write(" )\n")
p.outputFile.write(" if (anyExceptions()) return\n")
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber+1, p.fileName))
class AtAssertNotAssociated(Action):
def __init__(self,parser):
self.parser = parser
self.name='@assertnotassociated'
def match(self, line):
m = re.match('\s*@assert(not|un)associated\s*\\((.*\w.*)\\)\s*$', line, re.IGNORECASE)
if m:
self.name='@assert'+m.groups()[0]+'associated'
else:
self.name='@assertnotassociated'
if not m:
m = re.match( \
'\s*@assert(not|un)associated\s*\\((\s*([^,]*\w.*),\s*([^,]*\w.*),(.*\w*.*))\\)\s*$', \
line, re.IGNORECASE)
# How to get both (a,b) and (a,b,c) to match?
if not m:
m = re.match( \
'\s*@assert(not|un)associated\s*\\((\s*([^,]*\w.*),\s*([^,]*\w.*))\\)\s*$', \
line, re.IGNORECASE)
if m:
self.name='@assert'+m.groups()[0]+'associated'
else:
self.name='@assertnotassociated'
return m
def appendSourceLocation(self, fileHandle, fileName, lineNumber):
fileHandle.write(" & location=SourceLocation( &\n")
fileHandle.write(" & '" + str(basename(fileName)) + "', &\n")
fileHandle.write(" & " + str(lineNumber) + ")")
def action(self, m, line):
p = self.parser
#-- args = parseArgsFirstRest('@assertassociated',line)
#ok args = parseArgsFirstSecondRest('@assertassociated',line)
args = parseArgsFirstSecondRest(self.name,line)
# print(9000,line)
# print(9001,args)
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber, p.fileName))
if len(args) > 1:
if re.match('.*message=.*',args[1],re.IGNORECASE):
p.outputFile.write(" call assertFalse(associated(" + args[0] + "), " + args[1] + ", &\n")
elif len(args) > 2:
p.outputFile.write(" call assertFalse(associated(" + args[0] + "," + args[1] + "), " + args[2] + ", &\n")
else:
p.outputFile.write(" call assertFalse(associated(" + args[0] + "," + args[1] + "), &\n")
else:
p.outputFile.write(" call assertFalse(associated(" + args[0] + "), &\n")
self.appendSourceLocation(p.outputFile, p.fileName, p.currentLineNumber)
p.outputFile.write(" )\n")
p.outputFile.write(" if (anyExceptions()) return\n")
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber+1, p.fileName))
class AtAssertEqualUserDefined(Action):
"""Convenience directive replacing (a,b) with a call to assertTrue(a==b)
and an error message, if none is provided when invoked.
"""
def __init__(self,parser):
self.parser = parser
def match(self, line):
m = re.match( \
'\s*@assertequaluserdefined\s*\\((\s*([^,]*\w.*),\s*([^,]*\w.*),(.*\w*.*))\\)\s*$', \
line, re.IGNORECASE)
# How to get both (a,b) and (a,b,c) to match?
if not m:
m = re.match( \
'\s*@assertequaluserdefined\s*\\((\s*([^,]*\w.*),\s*([^,]*\w.*))\\)\s*$', \
line, re.IGNORECASE)
return m
def appendSourceLocation(self, fileHandle, fileName, lineNumber):
fileHandle.write(" & location=SourceLocation( &\n")
fileHandle.write(" & '" + str(basename(fileName)) + "', &\n")
fileHandle.write(" & " + str(lineNumber) + ")")
def action(self, m, line):
p = self.parser
args = parseArgsFirstSecondRest('@assertequaluserdefined',line)
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber, p.fileName))
if len(args) > 2:
p.outputFile.write(" call assertTrue(" \
+ args[0] + "==" + args[1] + ", " + args[2] + ", &\n")
else:
p.outputFile.write(" call assertTrue(" \
+ args[0] + "==" + args[1] + ", &\n")
if not re.match('.*message=.*',line,re.IGNORECASE):
p.outputFile.write(" & message='<" + args[0] + "> not equal to <" + args[1] + ">', &\n")
self.appendSourceLocation(p.outputFile, p.fileName, p.currentLineNumber)
p.outputFile.write(" )\n")
p.outputFile.write(" if (anyExceptions()) return\n")
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber+1, p.fileName))
class AtAssertEquivalent(Action):
"""Convenience directive replacing (a,b) with a call to assertTrue(a.eqv.b)
and an error message, if none is provided when invoked.
"""
def __init__(self,parser):
self.parser = parser
def match(self, line):
m = re.match( \
'\s*@assertequivalent\s*\\((\s*([^,]*\w.*),\s*([^,]*\w.*),(.*\w*.*))\\)\s*$', \
line, re.IGNORECASE)
# How to get both (a,b) and (a,b,c) to match?
if not m:
m = re.match( \
'\s*@assertequivalent\s*\\((\s*([^,]*\w.*),\s*([^,]*\w.*))\\)\s*$', \
line, re.IGNORECASE)
return m
def appendSourceLocation(self, fileHandle, fileName, lineNumber):
fileHandle.write(" & location=SourceLocation( &\n")
fileHandle.write(" & '" + str(basename(fileName)) + "', &\n")
fileHandle.write(" & " + str(lineNumber) + ")")
def action(self, m, line):
p = self.parser
args = parseArgsFirstSecondRest('@assertequivalent',line)
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber, p.fileName))
if len(args) > 2:
p.outputFile.write(" call assertTrue(" \
+ args[0] + ".eqv." + args[1] + ", " + args[2] + ", &\n")
else:
p.outputFile.write(" call assertTrue(" \
+ args[0] + ".eqv." + args[1] + ", &\n")
if not re.match('.*message=.*',line,re.IGNORECASE):
p.outputFile.write(" & message='<" + args[0] + "> not equal to <" + args[1] + ">', &\n")
self.appendSourceLocation(p.outputFile, p.fileName, p.currentLineNumber)
p.outputFile.write(" )\n")
p.outputFile.write(" if (anyExceptions()) return\n")
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber+1, p.fileName))
class AtMpiAssert(Action):
def __init__(self, parser):
self.parser = parser
def match(self, line):
m = re.match('\s*@mpiassert('+assertVariants+')\s*\\((.*\w.*)\\)\s*$', line, re.IGNORECASE)
return m
def appendSourceLocation(self, fileHandle, fileName, lineNumber):
fileHandle.write(" & location=SourceLocation( &\n")
fileHandle.write(" & '" + str(basename(fileName)) + "', &\n")
fileHandle.write(" & " + str(lineNumber) + ")")
def action(self, m, line):
p = self.parser
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber, p.fileName))
p.outputFile.write(" call assert"+m.groups()[0]+"(" + m.groups()[1] + ", &\n")
self.appendSourceLocation(p.outputFile, p.fileName, p.currentLineNumber)
p.outputFile.write(" )\n")
# 'this' object may not exist if test is commented out.
if hasattr(p,'currentSelfObjectName'):
p.outputFile.write(" if (anyExceptions("+p.currentSelfObjectName+"%context)) return\n")
p.outputFile.write(cppSetLineAndFile(p.currentLineNumber+1, p.fileName))
class AtBefore(Action):
def __init__(self, parser):
self.parser = parser
def match(self, line):
m = re.match('\s*@before\s*$', line, re.IGNORECASE)
return m
def action(self, m, line):
nextLine = self.parser.nextLine()
self.parser.userTestCase['setUp'] = getSubroutineName(nextLine)
self.parser.commentLine(line)
self.parser.outputFile.write(nextLine)
class AtAfter(Action):
def __init__(self, parser):
self.parser = parser
def match(self, line):
m = re.match('\s*@after\s*$', line, re.IGNORECASE)
return m
def action(self, m, line):
nextLine = self.parser.nextLine()
self.parser.userTestCase['tearDown'] = getSubroutineName(nextLine)
self.parser.commentLine(line)
self.parser.outputFile.write(nextLine)
class AtTestParameter(Action):
def __init__(self, parser):
self.parser = parser
def match(self, line):
m = re.match('\s*@testParameter\s*(|.*)$', line, re.IGNORECASE)
return m
def action(self, m, line):
options = re.match('\s*@testParameter\s*\\((.*)\\)\s*$', line, re.IGNORECASE)
self.parser.commentLine(line)
nextLine = self.parser.nextLine()
if not 'testParameterType' in self.parser.userTestCase:
self.parser.userTestCase['testParameterType'] = getTypeName(nextLine)
self.parser.outputFile.write(nextLine)
if options:
value = re.search('constructor\s*=\s*(\w*)', options.groups()[0], re.IGNORECASE)
if value:
self.parser.userTestCase['testParameterConstructor'] = value.groups()[0]
else:
self.parser.userTestCase['testParameterConstructor'] = self.parser.userTestCase['testParameterType']
class Parser():
def __init__(self, inputFileName, outputFileName):
def getBaseName(fileName):
from os.path import basename, splitext
base = basename(fileName)
return splitext(base)[0]
self.fileName = inputFileName
self.inputFile = open(inputFileName, 'r')
self.outputFile = open(outputFileName, 'w')
self.defaultSuiteName = getBaseName(inputFileName) + "_suite"
self.suiteName = ''
self.currentLineNumber = 0
self.userModuleName = '' # if any
self.userTestCase = {}
self.userTestCase['setUpMethod'] = ''
self.userTestCase['tearDownMethod'] = ''
self.userTestCase['defaultTestParameterNpes'] = [] # is MPI if not empty
self.userTestCase['defaultTestParametersExpr'] = ''
self.userTestCase['defaultTestParameterCases'] = []
self.userTestMethods = [] # each entry is a dictionary
self.wrapModuleName = "Wrap" + getBaseName(inputFileName)
self.currentLineNumber = 0
self.actions=[]
self.actions.append(AtTest(self))
self.actions.append(AtMpiTest(self))
self.actions.append(AtTestCase(self))
self.actions.append(AtSuite(self))
self.actions.append(AtBegin(self))
self.actions.append(AtAssert(self))
self.actions.append(AtAssertAssociated(self))
# self.actions.append(AtAssertAssociatedWith(self))
self.actions.append(AtAssertNotAssociated(self))
# self.actions.append(AtAssertNotAssociatedWith(self))
self.actions.append(AtAssertEqualUserDefined(self))
self.actions.append(AtAssertEquivalent(self))
self.actions.append(AtMpiAssert(self))
self.actions.append(AtBefore(self))
self.actions.append(AtAfter(self))
self.actions.append(AtTestParameter(self))
def commentLine(self, line):
self.outputFile.write(re.sub('@','!@',line))
def run(self):
def parse(line):
for action in self.actions:
if (action.apply(line)): return
self.outputFile.write(line)
while True:
line = self.nextLine()
if not line: break
parse(line)
if (not self.suiteName): self.suiteName = self.defaultSuiteName
if ('testParameterType' in self.userTestCase and (not 'constructor' in self.userTestCase)):
self.userTestCase['constructor'] = self.userTestCase['testParameterType']
self.makeWrapperModule()
def isComment(self, line):
return re.match('\s*(!.*|)$', line)
def nextLine(self):
while True:
self.currentLineNumber += 1
line = self.inputFile.readline()
if not line: break
if (self.isComment(line)):
self.outputFile.write(line)
pass
else:
break
return line
def printHeader(self):
self.outputFile.write('\n')
self.outputFile.write('module ' + self.wrapModuleName + '\n')
self.outputFile.write(' use pFUnit_mod\n')
if (self.userModuleName): self.outputFile.write(' use ' + self.userModuleName + '\n')
self.outputFile.write(' implicit none\n')
self.outputFile.write(' private\n\n')
def printTail(self):
self.outputFile.write('\n')
self.outputFile.write('end module ' + self.wrapModuleName + '\n\n')
def printWrapUserTestCase(self):
self.outputFile.write(' public :: WrapUserTestCase\n')
self.outputFile.write(' public :: makeCustomTest\n')
self.outputFile.write(' type, extends(' + self.userTestCase['type'] + ') :: WrapUserTestCase\n')
self.outputFile.write(' procedure(userTestMethod), nopass, pointer :: testMethodPtr\n')
self.outputFile.write(' contains\n')
self.outputFile.write(' procedure :: runMethod\n')
self.outputFile.write(' end type WrapUserTestCase\n\n')
self.outputFile.write(' abstract interface\n')
self.outputFile.write(' subroutine userTestMethod(this)\n')
if self.userModuleName:
self.outputFile.write(' use ' + self.userModuleName + '\n')
if 'type' in self.userTestCase:
self.outputFile.write(' class (' + self.userTestCase['type'] + '), intent(inout) :: this\n')
self.outputFile.write(' end subroutine userTestMethod\n')
self.outputFile.write(' end interface\n\n')
def printRunMethod(self):
self.outputFile.write(' subroutine runMethod(this)\n')
self.outputFile.write(' class (WrapUserTestCase), intent(inout) :: this\n\n')
self.outputFile.write(' call this%testMethodPtr(this)\n')
self.outputFile.write(' end subroutine runMethod\n\n')
def printParameterHeader(self, type):
self.outputFile.write(' type (' + type + '), allocatable :: testParameters(:)\n')
self.outputFile.write(' type (' + type + ') :: testParameter\n')
self.outputFile.write(' integer :: iParam \n')
self.outputFile.write(' integer, allocatable :: cases(:) \n')
self.outputFile.write(' \n')
def printMakeSuite(self):
self.outputFile.write('function ' + self.suiteName + '() result(suite)\n')
self.outputFile.write(' use pFUnit_mod\n')
if (self.userModuleName): self.outputFile.write(' use ' + self.userModuleName + '\n')
self.outputFile.write(' use '+ self.wrapModuleName + '\n')
self.outputFile.write(' type (TestSuite) :: suite\n\n')
if not self.userModuleName:
for testMethod in self.userTestMethods:
if ('ifdef' in testMethod):
self.outputFile.write('#ifdef ' + testMethod['ifdef'] + '\n')
elif ('ifndef' in testMethod):
self.outputFile.write('#ifndef ' + testMethod['ifndef'] + '\n')
self.outputFile.write(' external ' + testMethod['name'] + '\n')
if ('ifdef' in testMethod or 'ifndef' in testMethod):
self.outputFile.write('#endif\n')
self.outputFile.write('\n')
if 'setUp' in self.userTestCase:
self.outputFile.write(' external ' + self.userTestCase['setUp'] + '\n')
if 'tearDown' in self.userTestCase:
self.outputFile.write(' external ' + self.userTestCase['tearDown'] + '\n')
self.outputFile.write('\n')
if 'testParameterType' in self.userTestCase:
type = self.userTestCase['testParameterType']
self.printParameterHeader(type)
self.outputFile.write(" suite = newTestSuite('" + self.suiteName + "')\n\n")
for testMethod in self.userTestMethods:
if ('ifdef' in testMethod):
self.outputFile.write('#ifdef ' + testMethod['ifdef'] + '\n')
elif ('ifndef' in testMethod):
self.outputFile.write('#ifndef ' + testMethod['ifndef'] + '\n')
if 'type' in self.userTestCase:
self.addUserTestMethod(testMethod)
else:
if 'npRequests' in testMethod:
self.addMpiTestMethod(testMethod)
else: # vanilla
self.addSimpleTestMethod(testMethod)
self.outputFile.write('\n')
if ('ifdef' in testMethod or 'ifndef' in testMethod):
self.outputFile.write('#endif\n')
self.outputFile.write('\nend function ' + self.suiteName + '\n\n')
def addSimpleTestMethod(self, testMethod):
args = "'" + testMethod['name'] + "', " + testMethod['name']
if 'setUp' in testMethod:
args += ', ' + testMethod['setUp']
elif 'setUp' in self.userTestCase:
args += ', ' + self.userTestCase['setUp']
if 'tearDown' in testMethod:
args += ', ' + testMethod['tearDown']
elif 'tearDown' in self.userTestCase:
args += ', ' + self.userTestCase['tearDown']
if 'type' in testMethod:
type = testMethod['type']
else:
type = 'newTestMethod'
self.outputFile.write(' call suite%addTest(' + type + '(' + args + '))\n')
def addMpiTestMethod(self, testMethod):
for npes in testMethod['npRequests']:
args = "'" + testMethod['name'] + "', " + testMethod['name'] + ", " + str(npes)
if 'setUp' in testMethod:
args += ', ' + testMethod['setUp']
elif 'setUp' in self.userTestCase:
args += ', ' + self.userTestCase['setUp']
if 'tearDown' in testMethod:
args += ', ' + testMethod['tearDown']
elif 'tearDown' in self.userTestCase:
args += ', ' + self.userTestCase['tearDown']
if 'type' in testMethod:
type = testMethod['type']
else:
type = 'newMpiTestMethod'
self.outputFile.write(' call suite%addTest(' + type + '(' + args + '))\n')
def addUserTestMethod(self, testMethod):
args = "'" + testMethod['name'] + "', " + testMethod['name']
if 'npRequests' in testMethod:
npRequests = testMethod['npRequests']
else:
if 'npRequests' in self.userTestCase:
npRequests = self.userTestCase['npRequests']
else:
npRequests = [1]
if 'cases' in testMethod:
cases = testMethod['cases']
elif 'cases' in self.userTestCase:
cases = self.userTestCase['cases']
testParameterArg = '' # unless
if 'cases' in locals():
testParameterArg = ', testParameter'
self.outputFile.write(' cases = ' + testMethod['cases'] + '\n')
self.outputFile.write(' testParameters = [(' +
self.userTestCase['testParameterConstructor'] +
'(cases(iCase)), iCase = 1, size(cases))]\n\n')
if 'testParameterType' in self.userTestCase:
if 'testParameters' in testMethod:
testParameters = testMethod['testParameters']
elif 'testParameters' in self.userTestCase:
testParameters = self.userTestCase['testParameters']
isMpiTestCase = 'npRequests' in self.userTestCase
isMpiTestCase = isMpiTestCase or any('npRequests' in testMethod for testMethod in self.userTestMethods)
if 'testParameters' in locals():
testParameterArg = ', testParameter'
self.outputFile.write(' testParameters = ' + testParameters + '\n\n')
elif isMpiTestCase:
testParameterArg = ', testParameter'
for npes in npRequests:
if 'testParameters' in locals() or 'cases' in locals():
self.outputFile.write(' do iParam = 1, size(testParameters)\n')
self.outputFile.write(' testParameter = testParameters(iParam)\n')
if isMpiTestCase:
self.outputFile.write(' call testParameter%setNumProcessesRequested(' + str(npes) + ')\n')
self.outputFile.write(' call suite%addTest(makeCustomTest(' +
args + testParameterArg + '))\n')
if 'cases' in locals() or 'testParameters' in locals():
self.outputFile.write(' end do\n')
def printMakeCustomTest(self, isMpiTestCase):
args = 'methodName, testMethod'
declareArgs = '#ifdef INTEL_13\n'
declareArgs += ' use pfunit_mod, only: testCase\n'
declareArgs += '#endif\n'
declareArgs += ' type (WrapUserTestCase) :: aTest\n'
declareArgs += '#ifdef INTEL_13\n'
declareArgs += ' target :: aTest\n'
declareArgs += ' class (WrapUserTestCase), pointer :: p\n'
declareArgs += '#endif\n'
declareArgs += ' character(len=*), intent(in) :: methodName\n'
declareArgs += ' procedure(userTestMethod) :: testMethod\n'
if 'testParameterType' in self.userTestCase:
args += ', testParameter'
declareArgs += ' type (' + self.userTestCase['testParameterType'] + '), intent(in) :: testParameter\n'
self.outputFile.write(' function makeCustomTest(' + args + ') result(aTest)\n')
self.outputFile.write(declareArgs)
if 'constructor' in self.userTestCase:
if 'testParameterType' in self.userTestCase:
constructor = self.userTestCase['constructor'] + '(testParameter)'
else:
constructor = self.userTestCase['constructor'] + '()'
self.outputFile.write(' aTest%' + self.userTestCase['type'] + ' = ' + constructor + '\n\n')
self.outputFile.write(' aTest%testMethodPtr => testMethod\n')
self.outputFile.write('#ifdef INTEL_13\n')
self.outputFile.write(' p => aTest\n')
self.outputFile.write(' call p%setName(methodName)\n')
self.outputFile.write('#else\n')
self.outputFile.write(' call aTest%setName(methodName)\n')
self.outputFile.write('#endif\n')
if 'testParameterType' in self.userTestCase:
self.outputFile.write(' call aTest%setTestParameter(testParameter)\n')
self.outputFile.write(' end function makeCustomTest\n')
def makeWrapperModule(self):
# ! Start here
self.printHeader()
if 'type' in self.userTestCase:
self.printWrapUserTestCase()
self.outputFile.write('contains\n\n')
if 'type' in self.userTestCase:
self.printRunMethod()
if 'type' in self.userTestCase:
isMpiTestCase = 'npRequests' in self.userTestCase
isMpiTestCase = isMpiTestCase or any('npRequests' in testMethod for testMethod in self.userTestMethods)
if isMpiTestCase and not 'testParameterType' in self.userTestCase:
self.userTestCase['testParameterType'] = 'MpiTestParameter'
self.printMakeCustomTest(isMpiTestCase)
self.printTail()
self.printMakeSuite()
def final(self):
self.inputFile.close()
self.outputFile.close()
if __name__ == "__main__":
import sys
print("Processing file", sys.argv[1])
p = Parser(sys.argv[1], sys.argv[2])
p.run()
p.final()
print(" ... Done. Results in", sys.argv[2])
| LungNoodle/lungsim | tests/pFUnit-3.2.9/bin/pFUnitParser.py | Python | apache-2.0 | 34,426 | 0.011735 |
"""
Copyright 2008-2016 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import os
import sys
from . import ParseXML, Messages, Constants
from .Config import Config
from .Element import Element
from .generator import Generator
from .FlowGraph import FlowGraph
from .Connection import Connection
from .Block import Block
from .Port import Port
from .Param import Param
from .utils import odict, extract_docs
class Platform(Element):
Config = Config
Generator = Generator
FlowGraph = FlowGraph
Connection = Connection
Block = Block
Port = Port
Param = Param
is_platform = True
def __init__(self, *args, **kwargs):
""" Make a platform for GNU Radio """
Element.__init__(self)
self.config = self.Config(*args, **kwargs)
self.block_docstrings = {}
self.block_docstrings_loaded_callback = lambda: None # dummy to be replaced by BlockTreeWindow
self._docstring_extractor = extract_docs.SubprocessLoader(
callback_query_result=self._save_docstring_extraction_result,
callback_finished=lambda: self.block_docstrings_loaded_callback()
)
# Create a dummy flow graph for the blocks
self._flow_graph = Element(self)
self._flow_graph.connections = []
self.blocks = odict()
self._blocks_n = odict()
self._block_categories = {}
self.domains = {}
self.connection_templates = {}
self._auto_hier_block_generate_chain = set()
self.build_block_library()
def __str__(self):
return 'Platform - {}({})'.format(self.config.key, self.config.name)
@staticmethod
def find_file_in_paths(filename, paths, cwd):
"""Checks the provided paths relative to cwd for a certain filename"""
if not os.path.isdir(cwd):
cwd = os.path.dirname(cwd)
if isinstance(paths, str):
paths = (p for p in paths.split(':') if p)
for path in paths:
path = os.path.expanduser(path)
if not os.path.isabs(path):
path = os.path.normpath(os.path.join(cwd, path))
file_path = os.path.join(path, filename)
if os.path.exists(os.path.normpath(file_path)):
return file_path
def load_and_generate_flow_graph(self, file_path):
"""Loads a flow graph from file and generates it"""
Messages.set_indent(len(self._auto_hier_block_generate_chain))
Messages.send('>>> Loading: %r\n' % file_path)
if file_path in self._auto_hier_block_generate_chain:
Messages.send(' >>> Warning: cyclic hier_block dependency\n')
return False
self._auto_hier_block_generate_chain.add(file_path)
try:
flow_graph = self.get_new_flow_graph()
flow_graph.grc_file_path = file_path
# Other, nested higiter_blocks might be auto-loaded here
flow_graph.import_data(self.parse_flow_graph(file_path))
flow_graph.rewrite()
flow_graph.validate()
if not flow_graph.is_valid():
raise Exception('Flowgraph invalid')
if not flow_graph.get_option('generate_options').startswith('hb'):
raise Exception('Not a hier block')
except Exception as e:
Messages.send('>>> Load Error: {}: {}\n'.format(file_path, str(e)))
return False
finally:
self._auto_hier_block_generate_chain.discard(file_path)
Messages.set_indent(len(self._auto_hier_block_generate_chain))
try:
Messages.send('>>> Generating: {}\n'.format(file_path))
generator = self.Generator(flow_graph, file_path)
generator.write()
except Exception as e:
Messages.send('>>> Generate Error: {}: {}\n'.format(file_path, str(e)))
return False
self.load_block_xml(generator.get_file_path_xml())
return True
def build_block_library(self):
"""load the blocks and block tree from the search paths"""
self._docstring_extractor.start()
# Reset
self.blocks.clear()
self._blocks_n.clear()
self._block_categories.clear()
self.domains.clear()
self.connection_templates.clear()
ParseXML.xml_failures.clear()
# Try to parse and load blocks
for xml_file in self.iter_xml_files():
try:
if xml_file.endswith("block_tree.xml"):
self.load_category_tree_xml(xml_file)
elif xml_file.endswith('domain.xml'):
self.load_domain_xml(xml_file)
else:
self.load_block_xml(xml_file)
except ParseXML.XMLSyntaxError as e:
# print >> sys.stderr, 'Warning: Block validation failed:\n\t%s\n\tIgnoring: %s' % (e, xml_file)
pass
except Exception as e:
print >> sys.stderr, 'Warning: XML parsing failed:\n\t%r\n\tIgnoring: %s' % (e, xml_file)
# Add blocks to block tree
for key, block in self.blocks.iteritems():
category = self._block_categories.get(key, block.category)
# Blocks with empty categories are hidden
if not category:
continue
root = category[0]
if root.startswith('[') and root.endswith(']'):
category[0] = root[1:-1]
else:
category.insert(0, Constants.DEFAULT_BLOCK_MODULE_NAME)
block.category = category
self._docstring_extractor.finish()
# self._docstring_extractor.wait()
def iter_xml_files(self):
"""Iterator for block descriptions and category trees"""
for block_path in self.config.block_paths:
if os.path.isfile(block_path):
yield block_path
elif os.path.isdir(block_path):
for dirpath, dirnames, filenames in os.walk(block_path):
for filename in sorted(filter(lambda f: f.endswith('.xml'), filenames)):
yield os.path.join(dirpath, filename)
def load_block_xml(self, xml_file):
"""Load block description from xml file"""
# Validate and import
ParseXML.validate_dtd(xml_file, Constants.BLOCK_DTD)
n = ParseXML.from_file(xml_file).find('block')
n['block_wrapper_path'] = xml_file # inject block wrapper path
# Get block instance and add it to the list of blocks
block = self.Block(self._flow_graph, n)
key = block.get_key()
if key in self.blocks:
print >> sys.stderr, 'Warning: Block with key "{}" already exists.\n\tIgnoring: {}'.format(key, xml_file)
else: # Store the block
self.blocks[key] = block
self._blocks_n[key] = n
self._docstring_extractor.query(
block.get_key(),
block.get_imports(raw=True),
block.get_make(raw=True)
)
def load_category_tree_xml(self, xml_file):
"""Validate and parse category tree file and add it to list"""
ParseXML.validate_dtd(xml_file, Constants.BLOCK_TREE_DTD)
xml = ParseXML.from_file(xml_file)
path = []
def load_category(cat_n):
path.append(cat_n.find('name').strip())
for block_key in cat_n.findall('block'):
if block_key not in self._block_categories:
self._block_categories[block_key] = list(path)
for sub_cat_n in cat_n.findall('cat'):
load_category(sub_cat_n)
path.pop()
load_category(xml.find('cat'))
def load_domain_xml(self, xml_file):
"""Load a domain properties and connection templates from XML"""
ParseXML.validate_dtd(xml_file, Constants.DOMAIN_DTD)
n = ParseXML.from_file(xml_file).find('domain')
key = n.find('key')
if not key:
print >> sys.stderr, 'Warning: Domain with emtpy key.\n\tIgnoring: {}'.format(xml_file)
return
if key in self.domains: # test against repeated keys
print >> sys.stderr, 'Warning: Domain with key "{}" already exists.\n\tIgnoring: {}'.format(key, xml_file)
return
#to_bool = lambda s, d: d if s is None else s.lower() not in ('false', 'off', '0', '')
def to_bool(s, d):
if s is not None:
return s.lower() not in ('false', 'off', '0', '')
return d
color = n.find('color') or ''
try:
import gtk # ugly but handy
gtk.gdk.color_parse(color)
except (ValueError, ImportError):
if color: # no color is okay, default set in GUI
print >> sys.stderr, 'Warning: Can\'t parse color code "{}" for domain "{}" '.format(color, key)
color = None
self.domains[key] = dict(
name=n.find('name') or key,
multiple_sinks=to_bool(n.find('multiple_sinks'), True),
multiple_sources=to_bool(n.find('multiple_sources'), False),
color=color
)
for connection_n in n.findall('connection'):
key = (connection_n.find('source_domain'), connection_n.find('sink_domain'))
if not all(key):
print >> sys.stderr, 'Warning: Empty domain key(s) in connection template.\n\t{}'.format(xml_file)
elif key in self.connection_templates:
print >> sys.stderr, 'Warning: Connection template "{}" already exists.\n\t{}'.format(key, xml_file)
else:
self.connection_templates[key] = connection_n.find('make') or ''
def _save_docstring_extraction_result(self, key, docstrings):
docs = {}
for match, docstring in docstrings.iteritems():
if not docstring or match.endswith('_sptr'):
continue
docstring = docstring.replace('\n\n', '\n').strip()
docs[match] = docstring
self.block_docstrings[key] = docs
##############################################
# Access
##############################################
def parse_flow_graph(self, flow_graph_file):
"""
Parse a saved flow graph file.
Ensure that the file exists, and passes the dtd check.
Args:
flow_graph_file: the flow graph file
Returns:
nested data
@throws exception if the validation fails
"""
flow_graph_file = flow_graph_file or self.config.default_flow_graph
open(flow_graph_file, 'r').close() # Test open
ParseXML.validate_dtd(flow_graph_file, Constants.FLOW_GRAPH_DTD)
return ParseXML.from_file(flow_graph_file)
def get_new_flow_graph(self):
return self.FlowGraph(platform=self)
def get_blocks(self):
return self.blocks.values()
def get_new_block(self, flow_graph, key):
return self.Block(flow_graph, n=self._blocks_n[key])
def get_colors(self):
return [(name, color) for name, key, sizeof, color in Constants.CORE_TYPES]
| stwunsch/gnuradio | grc/core/Platform.py | Python | gpl-3.0 | 11,876 | 0.001347 |
#CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2016, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#chipsec@intel.com
#
## \defgroup config Platform Configuration
# chipsec/cfg/\<platform\>.py - configuration for a specific \<platform\>
| chipsecintel/chipsec | source/tool/chipsec/cfg/__init__.py | Python | gpl-2.0 | 894 | 0.020134 |
<<<<<<< HEAD
<<<<<<< HEAD
# -*- coding: utf-8 -*-
"""Tests for distutils.archive_util."""
import unittest
import os
import sys
import tarfile
from os.path import splitdrive
import warnings
from distutils import archive_util
from distutils.archive_util import (check_archive_formats, make_tarball,
make_zipfile, make_archive,
ARCHIVE_FORMATS)
from distutils.spawn import find_executable, spawn
from distutils.tests import support
from test.support import check_warnings, run_unittest, patch
try:
import grp
import pwd
UID_GID_SUPPORT = True
except ImportError:
UID_GID_SUPPORT = False
try:
import zipfile
ZIP_SUPPORT = True
except ImportError:
ZIP_SUPPORT = find_executable('zip')
try:
import zlib
ZLIB_SUPPORT = True
except ImportError:
ZLIB_SUPPORT = False
def can_fs_encode(filename):
"""
Return True if the filename can be saved in the file system.
"""
if os.path.supports_unicode_filenames:
return True
try:
filename.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
return False
return True
class ArchiveUtilTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
def test_make_tarball(self):
self._make_tarball('archive')
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
@unittest.skipUnless(can_fs_encode('årchiv'),
'File system cannot handle this filename')
def test_make_tarball_latin1(self):
"""
Mirror test_make_tarball, except filename contains latin characters.
"""
self._make_tarball('årchiv') # note this isn't a real word
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
@unittest.skipUnless(can_fs_encode('のアーカイブ'),
'File system cannot handle this filename')
def test_make_tarball_extended(self):
"""
Mirror test_make_tarball, except filename contains extended
characters outside the latin charset.
"""
self._make_tarball('のアーカイブ') # japanese for archive
def _make_tarball(self, target_name):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
os.mkdir(os.path.join(tmpdir, 'sub'))
self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
tmpdir2 = self.mkdtemp()
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
"source and target should be on same drive")
base_name = os.path.join(tmpdir2, target_name)
# working with relative paths to avoid tar warnings
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(splitdrive(base_name)[1], '.')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, target_name)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(splitdrive(base_name)[1], '.', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
def _tarinfo(self, path):
tar = tarfile.open(path)
try:
names = tar.getnames()
names.sort()
return tuple(names)
finally:
tar.close()
def _create_files(self):
# creating something to tar
tmpdir = self.mkdtemp()
dist = os.path.join(tmpdir, 'dist')
os.mkdir(dist)
self.write_file([dist, 'file1'], 'xxx')
self.write_file([dist, 'file2'], 'xxx')
os.mkdir(os.path.join(dist, 'sub'))
self.write_file([dist, 'sub', 'file3'], 'xxx')
os.mkdir(os.path.join(dist, 'sub2'))
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
return tmpdir, tmpdir2, base_name
@unittest.skipUnless(find_executable('tar') and find_executable('gzip')
and ZLIB_SUPPORT,
'Need the tar, gzip and zlib command to run')
def test_tarfile_vs_tar(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# now create another tarball using `tar`
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
gzip_cmd = ['gzip', '-f9', 'archive2.tar']
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
spawn(tar_cmd)
spawn(gzip_cmd)
finally:
os.chdir(old_dir)
self.assertTrue(os.path.exists(tarball2))
# let's compare both tarballs
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
# now for a dry_run
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None, dry_run=True)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
@unittest.skipUnless(find_executable('compress'),
'The compress program is required')
def test_compress_deprecated(self):
tmpdir, tmpdir2, base_name = self._create_files()
# using compress and testing the PendingDeprecationWarning
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress')
finally:
os.chdir(old_dir)
tarball = base_name + '.tar.Z'
self.assertTrue(os.path.exists(tarball))
self.assertEqual(len(w.warnings), 1)
# same test with dry_run
os.remove(tarball)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress',
dry_run=True)
finally:
os.chdir(old_dir)
self.assertFalse(os.path.exists(tarball))
self.assertEqual(len(w.warnings), 1)
@unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT,
'Need zip and zlib support to run')
def test_make_zipfile(self):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
make_zipfile(base_name, tmpdir)
# check if the compressed tarball was created
tarball = base_name + '.zip'
self.assertTrue(os.path.exists(tarball))
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
def test_make_zipfile_no_zlib(self):
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
called = []
zipfile_class = zipfile.ZipFile
def fake_zipfile(*a, **kw):
if kw.get('compression', None) == zipfile.ZIP_STORED:
called.append((a, kw))
return zipfile_class(*a, **kw)
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
# create something to tar and compress
tmpdir, tmpdir2, base_name = self._create_files()
make_zipfile(base_name, tmpdir)
tarball = base_name + '.zip'
self.assertEqual(called,
[((tarball, "w"), {'compression': zipfile.ZIP_STORED})])
self.assertTrue(os.path.exists(tarball))
def test_check_archive_formats(self):
self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
'xxx')
self.assertEqual(check_archive_formats(['gztar', 'zip']), None)
def test_make_archive(self):
tmpdir = self.mkdtemp()
base_name = os.path.join(tmpdir, 'archive')
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
def test_make_archive_cwd(self):
current_dir = os.getcwd()
def _breaks(*args, **kw):
raise RuntimeError()
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
try:
try:
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
except:
pass
self.assertEqual(os.getcwd(), current_dir)
finally:
del ARCHIVE_FORMATS['xxx']
def test_make_archive_owner_group(self):
# testing make_archive with owner and group, with various combinations
# this works even if there's not gid/uid support
if UID_GID_SUPPORT:
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
else:
group = owner = 'root'
base_dir, root_dir, base_name = self._create_files()
base_name = os.path.join(self.mkdtemp() , 'archive')
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'zip', root_dir, base_dir)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner=owner, group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner='kjhkjhkjg', group='oihohoh')
self.assertTrue(os.path.exists(res))
@unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib")
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
def test_tarfile_root_owner(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
try:
archive_name = make_tarball(base_name, 'dist', compress=None,
owner=owner, group=group)
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
self.assertTrue(os.path.exists(archive_name))
# now checks the rights
archive = tarfile.open(archive_name)
try:
for member in archive.getmembers():
self.assertEqual(member.uid, 0)
self.assertEqual(member.gid, 0)
finally:
archive.close()
def test_suite():
return unittest.makeSuite(ArchiveUtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
=======
# -*- coding: utf-8 -*-
"""Tests for distutils.archive_util."""
import unittest
import os
import sys
import tarfile
from os.path import splitdrive
import warnings
from distutils import archive_util
from distutils.archive_util import (check_archive_formats, make_tarball,
make_zipfile, make_archive,
ARCHIVE_FORMATS)
from distutils.spawn import find_executable, spawn
from distutils.tests import support
from test.support import check_warnings, run_unittest, patch
try:
import grp
import pwd
UID_GID_SUPPORT = True
except ImportError:
UID_GID_SUPPORT = False
try:
import zipfile
ZIP_SUPPORT = True
except ImportError:
ZIP_SUPPORT = find_executable('zip')
try:
import zlib
ZLIB_SUPPORT = True
except ImportError:
ZLIB_SUPPORT = False
def can_fs_encode(filename):
"""
Return True if the filename can be saved in the file system.
"""
if os.path.supports_unicode_filenames:
return True
try:
filename.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
return False
return True
class ArchiveUtilTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
def test_make_tarball(self):
self._make_tarball('archive')
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
@unittest.skipUnless(can_fs_encode('årchiv'),
'File system cannot handle this filename')
def test_make_tarball_latin1(self):
"""
Mirror test_make_tarball, except filename contains latin characters.
"""
self._make_tarball('årchiv') # note this isn't a real word
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
@unittest.skipUnless(can_fs_encode('のアーカイブ'),
'File system cannot handle this filename')
def test_make_tarball_extended(self):
"""
Mirror test_make_tarball, except filename contains extended
characters outside the latin charset.
"""
self._make_tarball('のアーカイブ') # japanese for archive
def _make_tarball(self, target_name):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
os.mkdir(os.path.join(tmpdir, 'sub'))
self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
tmpdir2 = self.mkdtemp()
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
"source and target should be on same drive")
base_name = os.path.join(tmpdir2, target_name)
# working with relative paths to avoid tar warnings
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(splitdrive(base_name)[1], '.')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, target_name)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(splitdrive(base_name)[1], '.', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
def _tarinfo(self, path):
tar = tarfile.open(path)
try:
names = tar.getnames()
names.sort()
return tuple(names)
finally:
tar.close()
def _create_files(self):
# creating something to tar
tmpdir = self.mkdtemp()
dist = os.path.join(tmpdir, 'dist')
os.mkdir(dist)
self.write_file([dist, 'file1'], 'xxx')
self.write_file([dist, 'file2'], 'xxx')
os.mkdir(os.path.join(dist, 'sub'))
self.write_file([dist, 'sub', 'file3'], 'xxx')
os.mkdir(os.path.join(dist, 'sub2'))
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
return tmpdir, tmpdir2, base_name
@unittest.skipUnless(find_executable('tar') and find_executable('gzip')
and ZLIB_SUPPORT,
'Need the tar, gzip and zlib command to run')
def test_tarfile_vs_tar(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# now create another tarball using `tar`
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
gzip_cmd = ['gzip', '-f9', 'archive2.tar']
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
spawn(tar_cmd)
spawn(gzip_cmd)
finally:
os.chdir(old_dir)
self.assertTrue(os.path.exists(tarball2))
# let's compare both tarballs
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
# now for a dry_run
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None, dry_run=True)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
@unittest.skipUnless(find_executable('compress'),
'The compress program is required')
def test_compress_deprecated(self):
tmpdir, tmpdir2, base_name = self._create_files()
# using compress and testing the PendingDeprecationWarning
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress')
finally:
os.chdir(old_dir)
tarball = base_name + '.tar.Z'
self.assertTrue(os.path.exists(tarball))
self.assertEqual(len(w.warnings), 1)
# same test with dry_run
os.remove(tarball)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress',
dry_run=True)
finally:
os.chdir(old_dir)
self.assertFalse(os.path.exists(tarball))
self.assertEqual(len(w.warnings), 1)
@unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT,
'Need zip and zlib support to run')
def test_make_zipfile(self):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
make_zipfile(base_name, tmpdir)
# check if the compressed tarball was created
tarball = base_name + '.zip'
self.assertTrue(os.path.exists(tarball))
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
def test_make_zipfile_no_zlib(self):
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
called = []
zipfile_class = zipfile.ZipFile
def fake_zipfile(*a, **kw):
if kw.get('compression', None) == zipfile.ZIP_STORED:
called.append((a, kw))
return zipfile_class(*a, **kw)
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
# create something to tar and compress
tmpdir, tmpdir2, base_name = self._create_files()
make_zipfile(base_name, tmpdir)
tarball = base_name + '.zip'
self.assertEqual(called,
[((tarball, "w"), {'compression': zipfile.ZIP_STORED})])
self.assertTrue(os.path.exists(tarball))
def test_check_archive_formats(self):
self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
'xxx')
self.assertEqual(check_archive_formats(['gztar', 'zip']), None)
def test_make_archive(self):
tmpdir = self.mkdtemp()
base_name = os.path.join(tmpdir, 'archive')
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
def test_make_archive_cwd(self):
current_dir = os.getcwd()
def _breaks(*args, **kw):
raise RuntimeError()
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
try:
try:
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
except:
pass
self.assertEqual(os.getcwd(), current_dir)
finally:
del ARCHIVE_FORMATS['xxx']
def test_make_archive_owner_group(self):
# testing make_archive with owner and group, with various combinations
# this works even if there's not gid/uid support
if UID_GID_SUPPORT:
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
else:
group = owner = 'root'
base_dir, root_dir, base_name = self._create_files()
base_name = os.path.join(self.mkdtemp() , 'archive')
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'zip', root_dir, base_dir)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner=owner, group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner='kjhkjhkjg', group='oihohoh')
self.assertTrue(os.path.exists(res))
@unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib")
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
def test_tarfile_root_owner(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
try:
archive_name = make_tarball(base_name, 'dist', compress=None,
owner=owner, group=group)
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
self.assertTrue(os.path.exists(archive_name))
# now checks the rights
archive = tarfile.open(archive_name)
try:
for member in archive.getmembers():
self.assertEqual(member.uid, 0)
self.assertEqual(member.gid, 0)
finally:
archive.close()
def test_suite():
return unittest.makeSuite(ArchiveUtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
# -*- coding: utf-8 -*-
"""Tests for distutils.archive_util."""
import unittest
import os
import sys
import tarfile
from os.path import splitdrive
import warnings
from distutils import archive_util
from distutils.archive_util import (check_archive_formats, make_tarball,
make_zipfile, make_archive,
ARCHIVE_FORMATS)
from distutils.spawn import find_executable, spawn
from distutils.tests import support
from test.support import check_warnings, run_unittest, patch
try:
import grp
import pwd
UID_GID_SUPPORT = True
except ImportError:
UID_GID_SUPPORT = False
try:
import zipfile
ZIP_SUPPORT = True
except ImportError:
ZIP_SUPPORT = find_executable('zip')
try:
import zlib
ZLIB_SUPPORT = True
except ImportError:
ZLIB_SUPPORT = False
def can_fs_encode(filename):
"""
Return True if the filename can be saved in the file system.
"""
if os.path.supports_unicode_filenames:
return True
try:
filename.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
return False
return True
class ArchiveUtilTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
def test_make_tarball(self):
self._make_tarball('archive')
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
@unittest.skipUnless(can_fs_encode('årchiv'),
'File system cannot handle this filename')
def test_make_tarball_latin1(self):
"""
Mirror test_make_tarball, except filename contains latin characters.
"""
self._make_tarball('årchiv') # note this isn't a real word
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
@unittest.skipUnless(can_fs_encode('のアーカイブ'),
'File system cannot handle this filename')
def test_make_tarball_extended(self):
"""
Mirror test_make_tarball, except filename contains extended
characters outside the latin charset.
"""
self._make_tarball('のアーカイブ') # japanese for archive
def _make_tarball(self, target_name):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
os.mkdir(os.path.join(tmpdir, 'sub'))
self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
tmpdir2 = self.mkdtemp()
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
"source and target should be on same drive")
base_name = os.path.join(tmpdir2, target_name)
# working with relative paths to avoid tar warnings
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(splitdrive(base_name)[1], '.')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, target_name)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(splitdrive(base_name)[1], '.', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
def _tarinfo(self, path):
tar = tarfile.open(path)
try:
names = tar.getnames()
names.sort()
return tuple(names)
finally:
tar.close()
def _create_files(self):
# creating something to tar
tmpdir = self.mkdtemp()
dist = os.path.join(tmpdir, 'dist')
os.mkdir(dist)
self.write_file([dist, 'file1'], 'xxx')
self.write_file([dist, 'file2'], 'xxx')
os.mkdir(os.path.join(dist, 'sub'))
self.write_file([dist, 'sub', 'file3'], 'xxx')
os.mkdir(os.path.join(dist, 'sub2'))
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
return tmpdir, tmpdir2, base_name
@unittest.skipUnless(find_executable('tar') and find_executable('gzip')
and ZLIB_SUPPORT,
'Need the tar, gzip and zlib command to run')
def test_tarfile_vs_tar(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# now create another tarball using `tar`
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
gzip_cmd = ['gzip', '-f9', 'archive2.tar']
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
spawn(tar_cmd)
spawn(gzip_cmd)
finally:
os.chdir(old_dir)
self.assertTrue(os.path.exists(tarball2))
# let's compare both tarballs
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
# now for a dry_run
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None, dry_run=True)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
@unittest.skipUnless(find_executable('compress'),
'The compress program is required')
def test_compress_deprecated(self):
tmpdir, tmpdir2, base_name = self._create_files()
# using compress and testing the PendingDeprecationWarning
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress')
finally:
os.chdir(old_dir)
tarball = base_name + '.tar.Z'
self.assertTrue(os.path.exists(tarball))
self.assertEqual(len(w.warnings), 1)
# same test with dry_run
os.remove(tarball)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress',
dry_run=True)
finally:
os.chdir(old_dir)
self.assertFalse(os.path.exists(tarball))
self.assertEqual(len(w.warnings), 1)
@unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT,
'Need zip and zlib support to run')
def test_make_zipfile(self):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
make_zipfile(base_name, tmpdir)
# check if the compressed tarball was created
tarball = base_name + '.zip'
self.assertTrue(os.path.exists(tarball))
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
def test_make_zipfile_no_zlib(self):
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
called = []
zipfile_class = zipfile.ZipFile
def fake_zipfile(*a, **kw):
if kw.get('compression', None) == zipfile.ZIP_STORED:
called.append((a, kw))
return zipfile_class(*a, **kw)
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
# create something to tar and compress
tmpdir, tmpdir2, base_name = self._create_files()
make_zipfile(base_name, tmpdir)
tarball = base_name + '.zip'
self.assertEqual(called,
[((tarball, "w"), {'compression': zipfile.ZIP_STORED})])
self.assertTrue(os.path.exists(tarball))
def test_check_archive_formats(self):
self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
'xxx')
self.assertEqual(check_archive_formats(['gztar', 'zip']), None)
def test_make_archive(self):
tmpdir = self.mkdtemp()
base_name = os.path.join(tmpdir, 'archive')
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
def test_make_archive_cwd(self):
current_dir = os.getcwd()
def _breaks(*args, **kw):
raise RuntimeError()
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
try:
try:
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
except:
pass
self.assertEqual(os.getcwd(), current_dir)
finally:
del ARCHIVE_FORMATS['xxx']
def test_make_archive_owner_group(self):
# testing make_archive with owner and group, with various combinations
# this works even if there's not gid/uid support
if UID_GID_SUPPORT:
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
else:
group = owner = 'root'
base_dir, root_dir, base_name = self._create_files()
base_name = os.path.join(self.mkdtemp() , 'archive')
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'zip', root_dir, base_dir)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner=owner, group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner='kjhkjhkjg', group='oihohoh')
self.assertTrue(os.path.exists(res))
@unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib")
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
def test_tarfile_root_owner(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
try:
archive_name = make_tarball(base_name, 'dist', compress=None,
owner=owner, group=group)
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
self.assertTrue(os.path.exists(archive_name))
# now checks the rights
archive = tarfile.open(archive_name)
try:
for member in archive.getmembers():
self.assertEqual(member.uid, 0)
self.assertEqual(member.gid, 0)
finally:
archive.close()
def test_suite():
return unittest.makeSuite(ArchiveUtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| ArcherSys/ArcherSys | Lib/distutils/tests/test_archive_util.py | Python | mit | 35,333 | 0.002723 |
from tests import unittest
from tests import mock
from unbound_ec2 import server
from tests import attrs
class TestServer(server.Server):
HANDLE_FORWARD_RESULT = 'dummy_handle_forward'
HANDLE_PASS_RESULT = True
DNSMSG = mock.MagicMock()
def handle_request(self, _id, event, qstate, qdata, request_type):
return self.HANDLE_FORWARD_RESULT
def new_dns_msg(self, qname):
return self.DNSMSG
class TestAbstractServer(unittest.TestCase):
def setUp(self):
server.log_info = mock.Mock()
lookup_mock = mock.MagicMock()
self.zone = '.bogus.tld'
self.reverse_zone = '127.in-addr.arpa'
self.ttl = 'bogus_ttl'
self.ip_order = 'bogus_ip_order'
self.forwarded_zones = ''
self.srv = TestServer(self.zone, self.reverse_zone, self.ttl, lookup_mock, self.ip_order, self.forwarded_zones)
def tearDown(self):
self.srv = None
def test_operate_event_new(self):
id = 'bogus_id'
event = attrs['MODULE_EVENT_NEW']
qstate = mock.MagicMock()
qdata = mock.MagicMock()
qstate.qinfo.qname_str = "fqdn.not-bogus.tld"
self.assertTrue(self.srv.operate(id, event, qstate, qdata))
qstate.ext_state.__setitem__.assert_called_with(id, attrs['MODULE_WAIT_MODULE'])
def test_operate_event_pass(self):
id = 'bogus_id'
event = attrs['MODULE_EVENT_PASS']
qstate = mock.MagicMock()
qdata = mock.MagicMock()
qstate.qinfo.qname_str = "fqdn.not-bogus.tld"
self.assertTrue(self.srv.operate(id, event, qstate, qdata))
qstate.ext_state.__setitem__.assert_called_with(id, attrs['MODULE_WAIT_MODULE'])
def test_operate_event_moddone(self):
id = 'bogus_id'
event = attrs['MODULE_EVENT_MODDONE']
qstate = mock.MagicMock()
qdata = mock.MagicMock()
self.assertTrue(self.srv.operate(id, event, qstate, qdata))
qstate.ext_state.__setitem__.assert_called_with(id, attrs['MODULE_FINISHED'])
def test_operate_forward(self):
id = 'bogus_id'
event = attrs['MODULE_EVENT_NEW']
qstate = mock.MagicMock()
qstate.qinfo.qtype = attrs['RR_TYPE_A']
qstate.qinfo.qname_str = 'bogus-name%s.' % self.zone
qdata = mock.MagicMock()
self.assertEqual(self.srv.operate(id, event, qstate, qdata), TestServer.HANDLE_FORWARD_RESULT)
qstate.qinfo.qtype = attrs['RR_TYPE_ANY']
self.assertEqual(self.srv.operate(id, event, qstate, qdata), TestServer.HANDLE_FORWARD_RESULT)
def test_forwarded_zones(self):
server.log_info = mock.Mock()
lookup_mock = mock.MagicMock()
forwarded_zones = '.subdomain%s' % self.zone
self.srv2 = TestServer(self.zone, self.reverse_zone, self.ttl, lookup_mock, self.ip_order, forwarded_zones)
id = 'bogus_id'
event = attrs['MODULE_EVENT_NEW']
qstate = mock.MagicMock()
qstate.qinfo.qtype = attrs['RR_TYPE_A']
qstate.qinfo.qname_str = 'bogus-name%s' % self.forwarded_zones
qdata = mock.MagicMock()
self.assertEqual(self.srv.operate(id, event, qstate, qdata), TestServer.HANDLE_PASS_RESULT)
qstate.ext_state.__setitem__.assert_called_with(id, attrs['MODULE_WAIT_MODULE'])
class TestAuthoritativeServer(unittest.TestCase):
def setUp(self):
server.log_info = mock.Mock()
lookup_mock = mock.MagicMock()
self.zone = '.bogus.tld'
self.reverse_zone = '127.in-addr.arpa'
self.ttl = 'bogus_ttl'
self.ip_order = 'bogus_ip_order'
self.forwarded_zones = ''
self.srv = server.Authoritative(self.zone, self.reverse_zone, self.ttl, lookup_mock, self.ip_order,
self.forwarded_zones)
def tearDown(self):
self.srv = None
def test_handle_forward(self):
id = 'bogus_id'
event = attrs['MODULE_EVENT_NEW']
qstate = mock.MagicMock()
qstate.qinfo.qtype = attrs['RR_TYPE_A']
qstate.qinfo.qname_str = 'bogus-name%s.' % self.zone
qdata = mock.MagicMock()
server.DNSMessage = mock.MagicMock()
self.assertTrue(self.srv.operate(id, event, qstate, qdata))
def test_handle_empty(self):
id = 'bogus_id'
event = attrs['MODULE_EVENT_NEW']
qstate = mock.MagicMock()
qstate.qinfo.qtype = attrs['RR_TYPE_TXT']
qstate.qinfo.qname_str = 'bogus-name%s.' % self.zone
qdata = mock.MagicMock()
server.DNSMessage = mock.MagicMock()
self.assertTrue(self.srv.operate(id, event, qstate, qdata))
class TestCachingServer(unittest.TestCase):
def setUp(self):
server.log_info = mock.Mock()
self.lookup_mock = mock.MagicMock()
self.zone = '.bogus.tld'
self.reverse_zone = '127.in-addr.arpa'
self.ttl = 88888881
self.ip_order = 'bogus_ip_order'
self.forwarded_zones = ''
self.srv = server.Caching(self.zone, self.reverse_zone, self.ttl, self.lookup_mock, self.ip_order,
self.forwarded_zones)
def tearDown(self):
self.srv = None
def test_handle_forward(self):
server.storeQueryInCache = mock.Mock()
server.DNSMessage = mock.MagicMock()
instances_mock = mock.MagicMock()
instances_mock.tags = {'Address': 'bogus_ip_address'}
self.lookup_mock.lookup.return_value = [instances_mock]
id = 'bogus_id'
event = attrs['MODULE_EVENT_NEW']
qstate = mock.MagicMock()
qstate.qinfo.qtype = attrs['RR_TYPE_A']
qstate.qinfo.qname_str = 'bogus-name%s.' % self.zone
qdata = mock.MagicMock()
self.assertTrue(self.srv.operate(id, event, qstate, qdata))
qstate.ext_state.__setitem__.assert_called_with(id, attrs['MODULE_FINISHED'])
self.assertEqual(qstate.return_msg.rep.security, 2)
server.DNSMessage.return_value.answer.append.assert_called_with(
'%s %d IN A %s' % (qstate.qinfo.qname_str, self.ttl, 'bogus_ip_address'))
| unibet/unbound-ec2 | tests/unit/test_server.py | Python | isc | 6,080 | 0.002138 |
__author__ = 'jmoran'
from Asteroids import Object
class MovingObject(Object):
def __init__(self, window, game, init_point, slope):
Object.__init__(self, window, game)
self.point = init_point
self.slope = slope
| waddedMeat/asteroids-ish | Asteroids/MovingObject.py | Python | mit | 242 | 0 |
import os
import numpy as np
import sys
label_file = open('/home/hypan/data/celebA/test.txt', 'r')
lines = label_file.readlines()
label_file.close()
acc = np.zeros(40)
cou = 0
for line in lines:
info = line.strip('\r\n').split()
name = info[0].split('.')[0]
gt_labels = info[1: ]
feat_path = '/home/hypan/data/celebA/result/' + sys.argv[1] + '/test_feature/' + name + '.npy'
if not os.path.exists(feat_path):
print '{} has not predict feature.'.format(name)
pd_labels = np.load(feat_path)
cnt = len(pd_labels)
for i in range(cnt):
gt_label = int(gt_labels[i])
pd_label = pd_labels[i]
if pd_label >= 0:
pd_label = 1
else:
pd_label = -1
if gt_label == pd_label:
acc[i] += 1
cou += 1
for i in range(40):
print i, acc[i] * 1.0 / cou
| last-one/tools | caffe/result/celeba_multilabel_acc.py | Python | bsd-2-clause | 857 | 0.002334 |
from __future__ import absolute_import, print_function
import petname
import six
import re
from bitfield import BitField
from uuid import uuid4
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from six.moves.urllib.parse import urlparse
from sentry import options, features
from sentry.db.models import (
Model,
BaseManager,
BoundedPositiveIntegerField,
FlexibleForeignKey,
JSONField,
sane_repr,
)
from sentry.tasks.relay import schedule_update_config_cache
_uuid4_re = re.compile(r"^[a-f0-9]{32}$")
# TODO(dcramer): pull in enum library
class ProjectKeyStatus(object):
ACTIVE = 0
INACTIVE = 1
class ProjectKeyManager(BaseManager):
def post_save(self, instance, **kwargs):
schedule_update_config_cache(
project_id=instance.project_id, generate=True, update_reason="projectkey.post_save"
)
def post_delete(self, instance, **kwargs):
schedule_update_config_cache(
project_id=instance.project_id, generate=True, update_reason="projectkey.post_delete"
)
class ProjectKey(Model):
__core__ = True
project = FlexibleForeignKey("sentry.Project", related_name="key_set")
label = models.CharField(max_length=64, blank=True, null=True)
public_key = models.CharField(max_length=32, unique=True, null=True)
secret_key = models.CharField(max_length=32, unique=True, null=True)
roles = BitField(
flags=(
# access to post events to the store endpoint
(u"store", u"Event API access"),
# read/write access to rest API
(u"api", u"Web API access"),
),
default=["store"],
)
status = BoundedPositiveIntegerField(
default=0,
choices=(
(ProjectKeyStatus.ACTIVE, _("Active")),
(ProjectKeyStatus.INACTIVE, _("Inactive")),
),
db_index=True,
)
date_added = models.DateTimeField(default=timezone.now, null=True)
rate_limit_count = BoundedPositiveIntegerField(null=True)
rate_limit_window = BoundedPositiveIntegerField(null=True)
objects = ProjectKeyManager(
cache_fields=("public_key", "secret_key"),
# store projectkeys in memcached for longer than other models,
# specifically to make the relay_projectconfig endpoint faster.
cache_ttl=60 * 30,
)
data = JSONField()
# support legacy project keys in API
scopes = (
"project:read",
"project:write",
"project:admin",
"project:releases",
"event:read",
"event:write",
"event:admin",
)
class Meta:
app_label = "sentry"
db_table = "sentry_projectkey"
__repr__ = sane_repr("project_id", "public_key")
def __unicode__(self):
return six.text_type(self.public_key)
@classmethod
def generate_api_key(cls):
return uuid4().hex
@classmethod
def looks_like_api_key(cls, key):
return bool(_uuid4_re.match(key))
@classmethod
def from_dsn(cls, dsn):
urlparts = urlparse(dsn)
public_key = urlparts.username
project_id = urlparts.path.rsplit("/", 1)[-1]
try:
return ProjectKey.objects.get(public_key=public_key, project=project_id)
except ValueError:
# ValueError would come from a non-integer project_id,
# which is obviously a DoesNotExist. We catch and rethrow this
# so anything downstream expecting DoesNotExist works fine
raise ProjectKey.DoesNotExist("ProjectKey matching query does not exist.")
@classmethod
def get_default(cls, project):
return cls.objects.filter(
project=project,
roles=models.F("roles").bitor(cls.roles.store),
status=ProjectKeyStatus.ACTIVE,
).first()
@property
def is_active(self):
return self.status == ProjectKeyStatus.ACTIVE
@property
def rate_limit(self):
if self.rate_limit_count and self.rate_limit_window:
return (self.rate_limit_count, self.rate_limit_window)
return (0, 0)
def save(self, *args, **kwargs):
if not self.public_key:
self.public_key = ProjectKey.generate_api_key()
if not self.secret_key:
self.secret_key = ProjectKey.generate_api_key()
if not self.label:
self.label = petname.Generate(2, " ", letters=10).title()
super(ProjectKey, self).save(*args, **kwargs)
def get_dsn(self, domain=None, secure=True, public=False):
urlparts = urlparse(self.get_endpoint(public=public))
if not public:
key = "%s:%s" % (self.public_key, self.secret_key)
else:
key = self.public_key
# If we do not have a scheme or domain/hostname, dsn is never valid
if not urlparts.netloc or not urlparts.scheme:
return ""
return "%s://%s@%s/%s" % (
urlparts.scheme,
key,
urlparts.netloc + urlparts.path,
self.project_id,
)
@property
def organization_id(self):
return self.project.organization_id
@property
def organization(self):
return self.project.organization
@property
def dsn_private(self):
return self.get_dsn(public=False)
@property
def dsn_public(self):
return self.get_dsn(public=True)
@property
def csp_endpoint(self):
endpoint = self.get_endpoint()
return "%s/api/%s/csp-report/?sentry_key=%s" % (endpoint, self.project_id, self.public_key)
@property
def security_endpoint(self):
endpoint = self.get_endpoint()
return "%s/api/%s/security/?sentry_key=%s" % (endpoint, self.project_id, self.public_key)
@property
def minidump_endpoint(self):
endpoint = self.get_endpoint()
return "%s/api/%s/minidump/?sentry_key=%s" % (endpoint, self.project_id, self.public_key)
@property
def unreal_endpoint(self):
return "%s/api/%s/unreal/%s/" % (self.get_endpoint(), self.project_id, self.public_key)
@property
def js_sdk_loader_cdn_url(self):
if settings.JS_SDK_LOADER_CDN_URL:
return "%s%s.min.js" % (settings.JS_SDK_LOADER_CDN_URL, self.public_key)
else:
endpoint = self.get_endpoint()
return "%s%s" % (
endpoint,
reverse("sentry-js-sdk-loader", args=[self.public_key, ".min"]),
)
def get_endpoint(self, public=True):
if public:
endpoint = settings.SENTRY_PUBLIC_ENDPOINT or settings.SENTRY_ENDPOINT
else:
endpoint = settings.SENTRY_ENDPOINT
if not endpoint:
endpoint = options.get("system.url-prefix")
if features.has("organizations:org-subdomains", self.project.organization):
urlparts = urlparse(endpoint)
if urlparts.scheme and urlparts.netloc:
endpoint = "%s://%s.%s%s" % (
urlparts.scheme,
settings.SENTRY_ORG_SUBDOMAIN_TEMPLATE.format(
organization_id=self.project.organization_id
),
urlparts.netloc,
urlparts.path,
)
return endpoint
def get_allowed_origins(self):
from sentry.utils.http import get_origins
return get_origins(self.project)
def get_audit_log_data(self):
return {
"label": self.label,
"public_key": self.public_key,
"secret_key": self.secret_key,
"roles": int(self.roles),
"status": self.status,
"rate_limit_count": self.rate_limit_count,
"rate_limit_window": self.rate_limit_window,
}
def get_scopes(self):
return self.scopes
| beeftornado/sentry | src/sentry/models/projectkey.py | Python | bsd-3-clause | 8,014 | 0.001497 |
# -*- coding: utf-8 -*-
__author__ = 'eveliotc'
__license__ = 'See LICENSE'
import alfred
from alfred import Item
import sys
from subprocess import Popen, PIPE
def json_to_obj(x):
if isinstance(x, dict):
return type('X', (), {k: json_to_obj(v) for k, v in x.iteritems()})
else:
return x
def join_query(dic):
return ' '.join(dic)
def le_result(r, exit = True):
alfred.write(r)
if exit:
sys.exit()
def xml_result(r, exit = True):
if len(r) < 1:
empty_result(exit)
else:
le_result(alfred.xml(r), exit)
def empty_result(exit = True):
empty = Item(
attributes={'uid': alfred.uid('empty'), 'arg': ''},
title='Gradle Please',
subtitle=u':( Nothing found.',
icon=u'icon.png')
xml_result([empty], exit)
def apple_script(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
return stdout
def tell_alfred(what):
apple_script('tell application "Alfred 2" to search "%s"' % what)
# TODO refactor gp.py to use this instead of dynamic obj
class Pom(object):
a = ''
g = ''
p = ''
latestVersion = ''
source = ''
@property
def id(self):
return self.g + ':' + self.a
def __repr__(self):
#notjson #justdebugginthings
return '{id:%s a:%s g:%s p:%s v:%s}' % (self.id, self.a, self.g, self.p, self.latestVersion)
| eveliotc/gradleplease-workflow | common.py | Python | apache-2.0 | 1,475 | 0.013559 |
from random import randrange
import fractions
def get_primes(n):
numbers = set(range(n, 1, -1))
primes = []
while numbers:
p = numbers.pop()
primes.append(p)
numbers.difference_update(set(range(p*2, n+1, p)))
return primes
def egcd(a, b):
if a == 0:
return (b, 0, 1)
else:
g, y, x = egcd(b % a, a)
return (g, x - (b // a) * y, y)
def modinv(a, m):
g, x, y = egcd(a, m)
if g != 1:
raise Exception('modular inverse does not exist')
else:
return x % m
def miller_rabin(n, k):
r, s = 0, n - 1
while s % 2 == 0:
r += 1
s //= 2
for _ in range(k):
a = randrange(2, n - 1)
x = pow(a, s, n)
if x == 1 or x == n - 1:
continue
for _ in range(r - 1):
x = pow(x, 2, n)
if x == n - 1:
break
else:
return False
return True
### main #################
primes = get_primes(443)
primes.sort()
del primes[0]
#print primes
pi = 1;
for x in primes:
pi *= x
print "pi=%X" % pi
while True:
kp = randrange(1, 2**12) + 2**12 + 2**13 + 2**14 + \
2**15 + 2**16 + 2**17 + 2**18 + 2**19
print "kp=%X" % kp
tp = 0
while fractions.gcd(tp, pi) != 1:
print "trying..."
tp = randrange(1, 2**399);
print "tp=%X" % tp
p = kp * pi * 2**400 + tp
print "p=%X" % p
print "bitlength(p)=", len(bin(p))-2
if miller_rabin(p, 40) == True:
break
while True:
kq = randrange(1, 2**12) + 2**12 + 2**13 + 2**14 + \
2**15 + 2**16 + 2**17 + 2**18 + 2**19
print "kq=%X" % kq
tq = 0
while fractions.gcd(tq, pi) != 1:
print "trying..."
tq = randrange(1, 2**399);
print "tq=%X" % tq
q = kq * pi * 2**400 + tq
print "q=%X" % q
print "bitlength(q)=", len(bin(q))-2
if miller_rabin(q, 40) == True:
break
print "p=%X" % p
print "q=%X" % q
n = p * q
print "n=%X" % n
print "bitlength(n)=", len(bin(n))-2
e = 2**16 + 1
print "e=%X" % e
#print "bitlength(e)=", len(bin(e))-2
d = modinv(e, (p-1)*(q-1))
print "d=%X" % d
#print "bitlength(d)=", len(bin(d))-2
m = 12354178254918274687189741234123412398461982374619827346981756309845712384198076
print "m=%X" % m
print "bitlength(m)=", len(bin(m))-2
c = pow(m, e, n)
print "c=%X" % c
print "bitlength(c)=", len(bin(c))-2
m2 = pow(c, d, n)
print "m2=%X" % m2
print "bitlength(m2)=", len(bin(m2))-2
| Qwaz/solved-hacking-problem | SharifCTF/2016/RSA-Keygen/generate-key.py | Python | gpl-2.0 | 2,346 | 0.020887 |
from django.utils.translation import ugettext_lazy as _
from crystal_dashboard.dashboards.crystal import dashboard
import horizon
class Controllers(horizon.Panel):
name = _("Controllers")
slug = 'controllers'
dashboard.CrystalController.register(Controllers)
| Crystal-SDS/dashboard | crystal_dashboard/dashboards/crystal/controllers/panel.py | Python | gpl-3.0 | 271 | 0 |
"""
Abstract base for a specific IP transports (TCP or UDP).
* It starts and stops a socket
* It handles callbacks for incoming frame service types
"""
from __future__ import annotations
from abc import ABC, abstractmethod
import asyncio
import logging
from typing import Callable, cast
from xknx.exceptions import CommunicationError
from xknx.knxip import HPAI, KNXIPFrame, KNXIPServiceType
TransportCallbackType = Callable[[KNXIPFrame, HPAI, "KNXIPTransport"], None]
knx_logger = logging.getLogger("xknx.knx")
class KNXIPTransport(ABC):
"""Abstract base class for KNX/IP transports."""
callbacks: list[KNXIPTransport.Callback]
local_hpai: HPAI
remote_addr: tuple[str, int]
transport: asyncio.BaseTransport | None
class Callback:
"""Callback class for handling callbacks for different 'KNX service types' of received packets."""
def __init__(
self,
callback: TransportCallbackType,
service_types: list[KNXIPServiceType] | None = None,
):
"""Initialize Callback class."""
self.callback = callback
self.service_types = service_types or []
def has_service(self, service_type: KNXIPServiceType) -> bool:
"""Test if callback is listening for given service type."""
return not self.service_types or service_type in self.service_types
def register_callback(
self,
callback: TransportCallbackType,
service_types: list[KNXIPServiceType] | None = None,
) -> KNXIPTransport.Callback:
"""Register callback."""
if service_types is None:
service_types = []
callb = KNXIPTransport.Callback(callback, service_types)
self.callbacks.append(callb)
return callb
def unregister_callback(self, callb: KNXIPTransport.Callback) -> None:
"""Unregister callback."""
self.callbacks.remove(callb)
def handle_knxipframe(self, knxipframe: KNXIPFrame, source: HPAI) -> None:
"""Handle KNXIP Frame and call all callbacks matching the service type ident."""
handled = False
for callback in self.callbacks:
if callback.has_service(knxipframe.header.service_type_ident):
callback.callback(knxipframe, source, self)
handled = True
if not handled:
knx_logger.debug(
"Unhandled: %s from: %s",
knxipframe.header.service_type_ident,
source,
)
@abstractmethod
async def connect(self) -> None:
"""Connect transport."""
@abstractmethod
def send(self, knxipframe: KNXIPFrame, addr: tuple[str, int] | None = None) -> None:
"""Send KNXIPFrame via transport."""
def getsockname(self) -> tuple[str, int]:
"""Return socket IP and port."""
if self.transport is None:
raise CommunicationError(
"No transport defined. Socket information not resolveable"
)
return cast(tuple[str, int], self.transport.get_extra_info("sockname"))
def getremote(self) -> str | None:
"""Return peername."""
return (
self.transport.get_extra_info("peername")
if self.transport is not None
else None
)
def stop(self) -> None:
"""Stop socket."""
if self.transport is not None:
self.transport.close()
| XKNX/xknx | xknx/io/transport/ip_transport.py | Python | mit | 3,449 | 0.00087 |
import re
from copy import copy
from random import randint
class Server(object):
def __init__(self, ip, port, hostname):
self.ip = ip
self.port = port
self.hostname = hostname
self.weight = 500
self.maxconn = None
def __cmp__(self, other):
if not isinstance(other, Server):
return -1
return cmp((self.ip, self.port, self.weight, self.maxconn), (other.ip, other.port, other.weight, other.maxconn))
def __hash__(self):
return hash((self.ip, self.port, self.weight, self.maxconn))
def __str__(self):
extra = []
if self.weight != 500:
extra.append("weight=%d" % self.weight)
if self.maxconn:
extra.append("maxconn=%d" % self.maxconn)
result = '%s:%s' % (self.ip, self.port)
if extra:
result += '(%s)' % ','.join(extra)
return result
def __repr__(self):
return 'Server(%s, %s, %s, %s)' % (repr(self.ip), repr(self.port), repr(self.weight), repr(self.maxconn))
def clone(self):
return copy(self)
def setWeight(self, weight):
clone = self.clone()
clone.weight = weight
return clone
def setMaxconn(self, maxconn):
clone = self.clone()
clone.maxconn = maxconn
return clone
class Service(object):
def __init__(self, name, source, port, protocol, application='binary', healthcheck=False, healthcheckurl='/', timeoutclient=None, timeoutserver=None):
self.name = name
self.source = source
self.port = port
self.protocol = protocol
self.application = application
self.healthcheck = healthcheck
self.healthcheckurl = healthcheckurl
self.timeoutclient = timeoutclient
self.timeoutserver = timeoutserver
self.servers = set()
self.slots = []
# Check if there's a port override
match = re.search('.@(\d+)$', self.name)
if match:
self.name = self.name[0:-(len(match.group(1))+1)]
self.port = int(match.group(1))
def clone(self):
clone = Service(self.name, self.source, self.port, self.protocol, self.application, self.healthcheck, self.healthcheckurl, self.timeoutclient,
self.timeoutserver)
clone.servers = set(self.servers)
clone.slots = list(self.slots)
return clone
def __str__(self):
# Represent misc. service attributes as k=v pairs, but only if their value is not None
service_attributes = ['timeoutclient', 'timeoutserver']
service_options = ['%s=%s' % (attr, getattr(self, attr)) for attr in service_attributes if getattr(self, attr) is not None]
# Only use healthcheckurl if healtcheck has a meaningful value
if self.healthcheck:
service_options.append('healtcheck=%s' % self.healthcheck)
service_options.append('healthcheckurl=%s' % self.healthcheckurl)
return '%s:%s/%s%s -> [%s]' % (
self.name, self.port, self.application if self.application != 'binary' else self.protocol,
'(%s)' % ','.join(service_options) if service_options else '',
', '.join([str(s) for s in sorted(self.servers)]))
def __repr__(self):
return 'Service(%s, %s, %s, %s, %s)' % (repr(self.name), repr(self.port), repr(self.protocol), repr(self.application), repr(sorted(self.servers)))
def __cmp__(self, other):
if not isinstance(other, Service):
return -1
return cmp((self.name, self.port, self.protocol, self.servers), (other.name, other.port, other.protocol, other.servers))
def __hash__(self):
return hash((self.name, self.port, self.protocol, self.servers))
@property
def portname(self):
return re.sub('[^a-zA-Z0-9]', '_', str(self.port))
@property
def marathonpath(self):
ret = ''
for s in self.name.split('.'):
if ret is not '':
ret = s + '.' + ret
else:
ret = s
return ret
def update(self, other):
"""
Returns an new updated Service object
"""
clone = self.clone()
clone.name = other.name
clone.source = other.source
clone.port = other.port
clone.protocol = other.protocol
clone.timeoutclient = other.timeoutclient
clone.timeoutserver = other.timeoutserver
for server in clone.servers - other.servers:
clone._remove(server)
for server in other.servers - clone.servers:
clone._add(server)
return clone
def addServer(self, server):
clone = self.clone()
clone._add(server)
return clone
def setApplication(self, application):
clone = self.clone()
clone.application = application
return clone
def _add(self, server):
self.servers.add(server)
# Keep servers in the same index when they're added
for i in range(len(self.slots)):
if not self.slots[i]:
self.slots[i] = server
return
# Not present in list, just insert randomly
self.slots.insert(randint(0, len(self.slots)), server)
def _remove(self, server):
self.servers.remove(server)
# Set the server slot to None
for i in range(len(self.slots)):
if self.slots[i] == server:
del self.slots[i]
return
raise KeyError(str(server))
| meltwater/proxymatic | src/proxymatic/services.py | Python | mit | 5,559 | 0.002159 |
'''
Created on Mar 28, 2017
@author: J001684
'''
from math import hypot
class Vector:
'''
classdocs
'''
def __init__(self, x=0, y=0):
'''
Constructor
'''
self.x = x
self.y = y
def _repr_(self):
return 'Vector({x}, {y})'.format(x=self.x, y=self.y)
def _abs_(self):
return hypot(self.x, self.y)
def _bool_(self):
return bool(abs(self))
def _add_(self, other):
x = self.x + other.x
y = self.y + other.y
return Vector(x, y)
def _mul_(self, scalar):
return Vector(self.x * scalar, self.y * scalar)
v1 = Vector(2, 4)
print v1
| domchoi/fluentPython | dataModel/vector.py | Python | gpl-3.0 | 743 | 0.014805 |
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2015 Mozilla Corporation
#
# Contributors:
# Aaron Meihm <ameihm@mozilla.com>
from lib.alerttask import AlertTask
import pyes
import json
import re
from configlib import getConfig, OptionParser
# Note: this plugin requires a configuration file (unauth_ssh_pyes.conf)
# to exist in the same directory as the plugin.
#
# It should contain content such as:
# [options]
# hostfilter <ES compatible regexp>
# user username
# skiphosts 1.2.3.4 2.3.4.5
class AlertUnauthSSH(AlertTask):
def main(self):
date_timedelta = dict(minutes=30)
self.config_file = './unauth_ssh_pyes.conf'
self.config = None
self.initConfiguration()
must = [
pyes.TermFilter('_type', 'event'),
pyes.TermFilter('category', 'syslog'),
pyes.TermFilter('details.program', 'sshd'),
pyes.QueryFilter(pyes.QueryStringQuery('details.hostname: /{}/'.format(self.config.hostfilter))),
pyes.QueryFilter(pyes.MatchQuery('summary', 'Accepted publickey {}'.format(self.config.user), operator='and'))
]
must_not = []
for x in self.config.skiphosts:
must_not.append(pyes.QueryFilter(pyes.MatchQuery('summary', x)))
self.filtersManual(date_timedelta, must=must, must_not=must_not)
self.searchEventsSimple()
self.walkEvents()
def initConfiguration(self):
myparser = OptionParser()
(self.config, args) = myparser.parse_args([])
self.config.hostfilter = getConfig('hostfilter', '', self.config_file)
self.config.user = getConfig('user', '', self.config_file)
self.config.skiphosts = getConfig('skiphosts', '', self.config_file).split()
# Set alert properties
def onEvent(self, event):
category = 'unauthaccess'
tags = ['ssh']
severity = 'WARNING'
targethost = 'unknown'
sourceipaddress = 'unknown'
x = event['_source']
if 'details' in x:
if 'hostname' in x['details']:
targethost = x['details']['hostname']
if 'sourceipaddress' in x['details']:
sourceipaddress = x['details']['sourceipaddress']
targetuser = 'unknown'
expr = re.compile('Accepted publickey for ([A-Za-z0-9]+) from')
m = expr.match(event['_source']['summary'])
groups = m.groups()
if len(groups) > 0:
targetuser = groups[0]
summary = 'Unauthorized SSH account usage by {0} on {1} user {2}'.format(sourceipaddress, targethost, targetuser)
return self.createAlertDict(summary, category, tags, [event], severity)
| serbyy/MozDef | alerts/unauth_ssh_pyes.py | Python | mpl-2.0 | 2,867 | 0.002093 |
#
# Copyright (c) 2015 Juniper Networks, Inc. All rights reserved.
#
from gevent import monkey
monkey.patch_all()
import os
import sys
import socket
import subprocess
import json
import time
import datetime
import platform
import gevent
import ConfigParser
from nodemgr.common.event_manager import EventManager
from pysandesh.sandesh_base import *
from sandesh_common.vns.ttypes import Module, NodeType
from sandesh_common.vns.constants import ModuleNames, NodeTypeNames,\
Module2NodeType
from subprocess import Popen, PIPE
from nodemgr.common.sandesh.nodeinfo.ttypes import *
from nodemgr.common.sandesh.nodeinfo.cpuinfo.ttypes import *
from nodemgr.common.sandesh.nodeinfo.process_info.ttypes import *
from nodemgr.common.sandesh.nodeinfo.process_info.constants import *
from pysandesh.connection_info import ConnectionState
class AnalyticsEventManager(EventManager):
def __init__(self, rule_file, discovery_server,
discovery_port, collector_addr):
EventManager.__init__(
self, rule_file, discovery_server,
discovery_port, collector_addr, sandesh_global)
self.node_type = 'contrail-analytics'
self.table = "ObjectCollectorInfo"
self.module = Module.ANALYTICS_NODE_MGR
self.module_id = ModuleNames[self.module]
self.supervisor_serverurl = "unix:///var/run/supervisord_analytics.sock"
self.add_current_process()
node_type = Module2NodeType[self.module]
node_type_name = NodeTypeNames[node_type]
_disc = self.get_discovery_client()
sandesh_global.init_generator(
self.module_id, socket.gethostname(),
node_type_name, self.instance_id, self.collector_addr,
self.module_id, 8104, ['nodemgr.common.sandesh'], _disc)
sandesh_global.set_logging_params(enable_local_log=True)
ConnectionState.init(sandesh_global, socket.gethostname(), self.module_id,
self.instance_id,
staticmethod(ConnectionState.get_process_state_cb),
NodeStatusUVE, NodeStatus, self.table)
self.send_system_cpu_info()
self.third_party_process_dict = {}
# end __init__
def process(self):
if self.rule_file is '':
self.rule_file = "/etc/contrail/" + \
"supervisord_analytics_files/contrail-analytics.rules"
json_file = open(self.rule_file)
self.rules_data = json.load(json_file)
def send_process_state_db(self, group_names):
self.send_process_state_db_base(
group_names, ProcessInfo)
def send_nodemgr_process_status(self):
self.send_nodemgr_process_status_base(
ProcessStateNames, ProcessState, ProcessStatus)
def get_node_third_party_process_dict(self):
return self.third_party_process_dict
def get_process_state(self, fail_status_bits):
return self.get_process_state_base(
fail_status_bits, ProcessStateNames, ProcessState)
| tcpcloud/contrail-controller | src/nodemgr/analytics_nodemgr/analytics_event_manager.py | Python | apache-2.0 | 2,984 | 0.008378 |
"""distutils.command.build_clib
Implements the Distutils 'build_clib' command, to build a C/C++ library
that is included in the module distribution and needed by an extension
module."""
__revision__ = "$Id$"
# XXX this module has *lots* of code ripped-off quite transparently from
# build_ext.py -- not surprisingly really, as the work required to build
# a static library from a collection of C source files is not really all
# that different from what's required to build a shared object file from
# a collection of C source files. Nevertheless, I haven't done the
# necessary refactoring to account for the overlap in code between the
# two modules, mainly because a number of subtle details changed in the
# cut 'n paste. Sigh.
import os
from distutils.core import Command
from distutils.errors import DistutilsSetupError
from distutils.ccompiler import customize_compiler
from distutils import log
def show_compilers():
from distutils.ccompiler import show_compilers
show_compilers()
class build_clib(Command):
description = "build C/C++ libraries used by Python extensions"
user_options = [
('build-clib=', 'b',
"directory to build C/C++ libraries to"),
('build-temp=', 't',
"directory to put temporary build by-products"),
('debug', 'g',
"compile with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
]
boolean_options = ['debug', 'force']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.build_clib = None
self.build_temp = None
# List of libraries to build
self.libraries = None
# Compilation options for all libraries
self.include_dirs = None
self.define = None
self.undef = None
self.debug = None
self.force = 0
self.compiler = None
def finalize_options(self):
# This might be confusing: both build-clib and build-temp default
# to build-temp as defined by the "build" command. This is because
# I think that C libraries are really just temporary build
# by-products, at least from the point of view of building Python
# extensions -- but I want to keep my options open.
self.set_undefined_options('build',
('build_temp', 'build_clib'),
('build_temp', 'build_temp'),
('compiler', 'compiler'),
('debug', 'debug'),
('force', 'force'))
self.libraries = self.distribution.libraries
if self.libraries:
self.check_library_list(self.libraries)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# XXX same as for build_ext -- what about 'self.define' and
# 'self.undef' ?
def run(self):
if not self.libraries:
return
# Yech -- this is cut 'n pasted from build_ext.py!
from distutils.ccompiler import new_compiler
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler)
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
self.build_libraries(self.libraries)
def check_library_list(self, libraries):
"""Ensure that the list of libraries is valid.
`library` is presumably provided as a command option 'libraries'.
This method checks that it is a list of 2-tuples, where the tuples
are (library_name, build_info_dict).
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.
"""
if not isinstance(libraries, list):
raise DistutilsSetupError, \
"'libraries' option must be a list of tuples"
for lib in libraries:
if not isinstance(lib, tuple) and len(lib) != 2:
raise DistutilsSetupError, \
"each element of 'libraries' must a 2-tuple"
name, build_info = lib
if not isinstance(name, str):
raise DistutilsSetupError, \
"first element of each tuple in 'libraries' " + \
"must be a string (the library name)"
if '/' in name or (os.sep != '/' and os.sep in name):
raise DistutilsSetupError, \
("bad library name '%s': " +
"may not contain directory separators") % \
lib[0]
if not isinstance(build_info, dict):
raise DistutilsSetupError, \
"second element of each tuple in 'libraries' " + \
"must be a dictionary (build info)"
def get_library_names(self):
# Assume the library list is valid -- 'check_library_list()' is
# called from 'finalize_options()', so it should be!
if not self.libraries:
return None
lib_names = []
for (lib_name, build_info) in self.libraries:
lib_names.append(lib_name)
return lib_names
def get_source_files(self):
self.check_library_list(self.libraries)
filenames = []
for (lib_name, build_info) in self.libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError, \
("in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames") % lib_name
filenames.extend(sources)
return filenames
def build_libraries(self, libraries):
for (lib_name, build_info) in libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError, \
("in 'libraries' option (library '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % lib_name
sources = list(sources)
log.info("building '%s' library", lib_name)
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
# files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
objects = self.compiler.compile(sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug)
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(objects, lib_name,
output_dir=self.build_clib,
debug=self.debug)
| ktan2020/legacy-automation | win/Lib/distutils/command/build_clib.py | Python | mit | 8,340 | 0.001439 |
import json
import logging
from functools import wraps
logger = logging.getLogger(__name__)
class PandaError(Exception):
pass
def error_check(func):
@wraps(func)
def check(*args, **kwargs):
try:
res = func(*args, **kwargs)
if "error" in res:
logger.error(res["message"])
raise PandaError(res["message"])
except Exception as e:
logger.error(e)
raise
return res
return check
class Retriever(object):
def __init__(self, panda, model_type, path = None):
self.panda = panda
self.model_type = model_type
if path:
self.path = path
else:
self.path = model_type.path
class GroupRetriever(Retriever):
@error_check
def _all(self, **kwargs):
json_data = self.panda.get("{0}.json".format(self.path), kwargs)
return json.loads(json_data)
@error_check
def new(self, *args, **kwargs):
return self.model_type(self.panda, *args, **kwargs)
@error_check
def create(self, *args, **kwargs):
return self.new(*args, **kwargs).create(**kwargs)
@error_check
def find(self, val, **kwargs):
json_data = self.panda.get("{0}/{1}.json".format(self.path, val), **kwargs)
return self.model_type(self.panda, **json.loads(json_data))
def all(self, **kwargs):
return [self.model_type(self.panda, **json_attr) for json_attr in self._all(**kwargs)]
def where(self, pred, **kwargs):
return [self.model_type(self.panda, **json_attr) for json_attr in self._all(**kwargs) if pred(json_attr)]
class SingleRetriever(Retriever):
@error_check
def get(self, **kwargs):
json_data = self.panda.get("{0}.json".format(self.path), **kwargs)
return self.model_type(self.panda, json.loads(json_data))
@error_check
def post(self, **kwargs):
json_data = self.panda.post("{0}.json".format(self.path), **kwargs)
return self.model_type(self.panda, json.loads(json_data))
class PandaDict(dict):
def __init__(self, panda, *arg, **kwarg):
self.panda = panda
super(PandaDict, self).__init__(*arg, **kwarg)
def to_json(self, *args, **kwargs):
return json.dumps(self, *args, **kwargs)
class PandaModel(PandaDict):
def dup(self):
copy = self.copy()
if "id" in copy:
copy["id"]
return copy
def reload(self):
json_data = self.panda.get("{0}/{1}.json".format(self.path, self["id"]))
self.clear()
parsed = json.loads(json_data)
self.update(parsed)
@error_check
def create(self, **kwargs):
json_data = self.panda.post("{0}.json".format(self.path), kwargs)
return self.__class__(self.panda, json.loads(json_data))
@error_check
def delete(self, **kwargs):
json_data = self.panda.delete("{0}/{1}.json".format(self.path, self["id"]), kwargs)
return self.__class__(self.panda, json.loads(json_data))
class UpdatablePandaModel(PandaModel):
changed_values = {}
@error_check
def save(self):
put_path = "{0}/{1}.json".format(self.path, self["id"])
ret = type(self)(self.panda, json.loads(self.panda.put(put_path, self.changed_values)))
if "error" not in ret:
self.changed_values = {}
return ret
def __setitem__(self, key, val):
self.changed_values[key] = val
super(UpdatablePandaModel, self).__setitem__(key, val)
# http://stackoverflow.com/a/2588648/1542900
def update(self, *args, **kwargs):
if args:
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, got %d" % len(args))
other = dict(args[0])
for key in other:
self[key] = other[key]
for key in kwargs:
self[key] = kwargs[key]
# http://stackoverflow.com/a/2588648/1542900
def setdefault(self, key, value=None):
if key not in self:
self[key] = value
return self[key]
class Video(PandaModel):
path = "/videos"
def encodings(self):
return GroupRetriever(self.panda, Encoding, "/videos/{0}/encodings".format(self["id"])).all()
def metadata(self):
return SingleRetriever(self.panda, Metadata, "/videos/{0}/metadata".format(self["id"])).get()
class Cloud(UpdatablePandaModel):
path = "/clouds"
class Encoding(PandaModel):
path = "/encodings"
def video(self):
return SingleRetriever(self.panda, Video, "/videos/{0}".format(self["video_id"])).get()
def profile(self):
key = self["profile_name"] or self["profile_id"]
return SingleRetriever(self.panda, Video, "/profiles/{0}".format(key)).get()
def cancel(self):
return SingleRetriever(self.panda, PandaDict, "/encodings/{0}/cancel.json".format(self["id"])).post()
def retry(self):
return SingleRetriever(self.panda, PandaDict, "/encodings/{0}/retry.json".format(self["id"])).post()
class Profile(UpdatablePandaModel):
path = "/profiles"
class Notifications(UpdatablePandaModel):
path = "/notifications"
@error_check
def save(self):
tmp = dict(self)
for event in tmp["events"]:
tmp["events"][event] = str(tmp["events"][event]).lower()
return Notifications(self.panda, json.loads(self.panda.put("/notifications.json", tmp)))
def delete(self):
raise AttributeError("Notification instance has no attribute 'delete'")
def reload(self):
json_data = self.panda.get("/notifications.json")
self.clear()
self.update(json.loads(json_data))
class Metadata(PandaDict):
pass
| pandastream/panda_client_python | panda/models.py | Python | mit | 5,728 | 0.005237 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-02-22 20:39
from __future__ import unicode_literals
import time
import logging
import progressbar
from django.db import connection, migrations
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from bulk_update.helper import bulk_update
from addons.wiki.models import WikiPage, WikiVersion
from osf.models import Comment, Guid
logger = logging.getLogger(__name__)
def reverse_func(state, schema):
"""
Reverses NodeWikiPage migration. Repoints guids back to each NodeWikiPage,
repoints comment_targets, comments_viewed_timestamps, and deletes all WikiVersions and WikiPages
"""
NodeWikiPage = state.get_model('addons_wiki', 'nodewikipage')
AbstractNode = state.get_model('osf', 'AbstractNode')
nwp_content_type_id = ContentType.objects.get_for_model(NodeWikiPage).id
nodes = AbstractNode.objects.exclude(wiki_pages_versions={})
progress_bar = progressbar.ProgressBar(maxval=nodes.count() or 100).start()
for i, node in enumerate(nodes, 1):
progress_bar.update(i)
for wiki_key, version_list in node.wiki_pages_versions.items():
if version_list:
for index, version in enumerate(version_list):
nwp = NodeWikiPage.objects.filter(former_guid=version).include(None)[0]
# All NodeWikiPages associated with a certain wiki key on a node point to the same WikiPage.
wp = WikiPage.load(version)
guid = migrate_guid_referent(Guid.load(version), nwp, nwp_content_type_id)
guid.save()
nwp = guid.referent
# Moved only for last item in wiki_pages_versions array for every page_name, NWP->WP is a many-to-one mapping. NWP->WV is a one-to-one mapping.
move_comment_target(Guid.load(wp._id), nwp)
update_comments_viewed_timestamp(node, wp._id, nwp)
progress_bar.finish()
WikiVersion.objects.all().delete()
WikiPage.objects.all().delete()
logger.info('NodeWikiPages restored and WikiVersions and WikiPages removed.')
def move_comment_target(current_guid, desired_target):
"""
Move the comment's target from the current target to the desired target
Specifically for repointing WikiPage comments -> NodeWikiPage comments
"""
desired_target_guid_id = Guid.load(desired_target.former_guid).id
if Comment.objects.filter(Q(root_target=current_guid) | Q(target=current_guid)).exists():
Comment.objects.filter(root_target=current_guid).update(root_target_id=desired_target_guid_id)
Comment.objects.filter(target=current_guid).update(target_id=desired_target_guid_id)
return
def update_comments_viewed_timestamp(node, current_wiki_guid, desired_wiki_object):
"""Replace the current_wiki_object keys in the comments_viewed_timestamp dict with the desired wiki_object_id """
users_pending_save = []
# We iterate over .contributor_set instead of .contributors in order
# to take advantage of .include('contributor__user')
for contrib in node.contributor_set.all():
user = contrib.user
if user.comments_viewed_timestamp.get(current_wiki_guid, None):
timestamp = user.comments_viewed_timestamp[current_wiki_guid]
user.comments_viewed_timestamp[desired_wiki_object._id] = timestamp
del user.comments_viewed_timestamp[current_wiki_guid]
users_pending_save.append(user)
if users_pending_save:
bulk_update(users_pending_save, update_fields=['comments_viewed_timestamp'])
return users_pending_save
def migrate_guid_referent(guid, desired_referent, content_type_id):
"""
Point the guid towards the desired_referent.
Pointing the NodeWikiPage guid towards the WikiPage will still allow links to work.
"""
guid.content_type_id = content_type_id
guid.object_id = desired_referent.id
return guid
def migrate_node_wiki_pages(state, schema):
create_wiki_pages_sql(state, schema)
create_guids(state, schema)
create_wiki_versions_and_repoint_comments_sql(state, schema)
migrate_comments_viewed_timestamp_sql(state, schema)
migrate_guid_referent_sql(state, schema)
def create_wiki_pages_sql(state, schema):
NodeWikiPage = state.get_model('addons_wiki', 'nodewikipage')
then = time.time()
logger.info('Starting migration of WikiPages [SQL]:')
wikipage_content_type_id = ContentType.objects.get_for_model(WikiPage).id
nodewikipage_content_type_id = ContentType.objects.get_for_model(NodeWikiPage).id
with connection.cursor() as cursor:
cursor.execute(
"""
CREATE TEMPORARY TABLE temp_wikipages
(
node_id INTEGER,
user_id INTEGER,
page_name_key TEXT,
latest_page_name_guid TEXT,
first_page_name_guid TEXT,
page_name_display TEXT,
created TIMESTAMP,
modified TIMESTAMP
)
ON COMMIT DROP;
-- Flatten out the wiki_page_versions json keys
INSERT INTO temp_wikipages (node_id, page_name_key)
SELECT
oan.id AS node_id
, jsonb_object_keys(oan.wiki_pages_versions) as page_name_key
FROM osf_abstractnode AS oan;
-- Retrieve the latest guid for the json key
UPDATE temp_wikipages AS twp
SET
latest_page_name_guid = (
SELECT trim(v::text, '"')
FROM osf_abstractnode ioan
, jsonb_array_elements(oan.wiki_pages_versions->twp.page_name_key) WITH ORDINALITY v(v, rn)
WHERE ioan.id = oan.id
ORDER BY v.rn DESC
LIMIT 1
)
FROM osf_abstractnode AS oan
WHERE oan.id = twp.node_id;
-- Retrieve the first guid for the json key
UPDATE temp_wikipages AS twp
SET
first_page_name_guid = (
SELECT trim(v::text, '"')
FROM osf_abstractnode ioan
, jsonb_array_elements(oan.wiki_pages_versions->twp.page_name_key) WITH ORDINALITY v(v, rn)
WHERE ioan.id = oan.id
ORDER BY v.rn ASC
LIMIT 1
)
FROM osf_abstractnode AS oan
WHERE oan.id = twp.node_id;
-- Remove any json keys that reference empty arrays (bad data? e.g. abstract_node id=232092)
DELETE FROM temp_wikipages AS twp
WHERE twp.latest_page_name_guid IS NULL;
-- Retrieve page_name nodewikipage field for the latest wiki page guid
UPDATE temp_wikipages AS twp
SET
page_name_display = anwp.page_name
FROM osf_guid AS og INNER JOIN addons_wiki_nodewikipage AS anwp ON (og.object_id = anwp.id AND og.content_type_id = %s)
WHERE og._id = twp.latest_page_name_guid;
-- Retrieve user_id, created, and modified nodewikipage field for the first wiki page guid
UPDATE temp_wikipages AS twp
SET
user_id = anwp.user_id
, created = anwp.date
, modified = anwp.modified
FROM osf_guid AS og INNER JOIN addons_wiki_nodewikipage AS anwp ON (og.object_id = anwp.id AND og.content_type_id = %s)
WHERE og._id = twp.first_page_name_guid;
-- Populate the wikipage table
INSERT INTO addons_wiki_wikipage (node_id, user_id, content_type_pk, page_name, created, modified)
SELECT
twp.node_id
, twp.user_id
, %s
, twp.page_name_display
, twp.created
, twp.modified
FROM temp_wikipages AS twp;
""", [nodewikipage_content_type_id, nodewikipage_content_type_id, wikipage_content_type_id]
)
now = time.time()
logger.info('Finished migration of WikiPages [SQL]: {:.5} seconds'.format(now - then))
def create_guids(state, schema):
then = time.time()
content_type = ContentType.objects.get_for_model(WikiPage)
progress_bar = progressbar.ProgressBar(maxval=WikiPage.objects.count() or 100).start()
logger.info('Creating new guids for all WikiPages:')
for i, wiki_page_id in enumerate(WikiPage.objects.values_list('id', flat=True), 1):
# looping instead of bulk_create, so _id's are not the same
progress_bar.update(i)
Guid.objects.create(object_id=wiki_page_id, content_type_id=content_type.id)
progress_bar.finish()
now = time.time()
logger.info('WikiPage guids created: {:.5} seconds'.format(now - then))
return
def create_wiki_versions_and_repoint_comments_sql(state, schema):
NodeWikiPage = state.get_model('addons_wiki', 'nodewikipage')
then = time.time()
logger.info('Starting migration of WikiVersions [SQL]:')
nodewikipage_content_type_id = ContentType.objects.get_for_model(NodeWikiPage).id
wikipage_content_type_id = ContentType.objects.get_for_model(WikiPage).id
with connection.cursor() as cursor:
cursor.execute(
"""
CREATE TEMPORARY TABLE temp_wikiversions
(
node_id INTEGER,
user_id INTEGER,
page_name_key TEXT,
wiki_page_id INTEGER,
content TEXT,
identifier INTEGER,
created TIMESTAMP,
modified TIMESTAMP,
nwp_guid TEXT,
latest_page_name_guid TEXT,
page_name_display TEXT
)
ON COMMIT DROP;
CREATE INDEX ON temp_wikiversions (nwp_guid ASC);
CREATE INDEX ON temp_wikiversions (wiki_page_id ASC);
-- Flatten out the wiki_page_versions arrays for each key
INSERT INTO temp_wikiversions (node_id, page_name_key, nwp_guid, content, user_id, modified, identifier, created)
SELECT
oan.id as node_id,
wiki_pages_versions.key,
trim(nwp_guid::text, '"') as node_wiki_page_guid,
nwp.content,
nwp.user_id,
nwp.modified,
nwp.version as identifier,
nwp.date as created
FROM osf_abstractnode as oan,
jsonb_each(oan.wiki_pages_versions) as wiki_pages_versions,
jsonb_array_elements(wiki_pages_versions->wiki_pages_versions.key) as nwp_guid
INNER JOIN addons_wiki_nodewikipage as nwp ON nwp.former_guid = trim(nwp_guid::text, '"');
-- Retrieve the latest guid for the json key
UPDATE temp_wikiversions AS twp
SET
latest_page_name_guid = (
SELECT trim(v::text, '"')
FROM osf_abstractnode ioan
, jsonb_array_elements(oan.wiki_pages_versions->twp.page_name_key) WITH ORDINALITY v(v, rn)
WHERE ioan.id = oan.id
ORDER BY v.rn DESC
LIMIT 1
)
FROM osf_abstractnode AS oan
WHERE oan.id = twp.node_id;
-- Retrieve page_name nodewikipage field for the latest wiki page guid
UPDATE temp_wikiversions AS twb
SET
page_name_display = anwp.page_name
FROM osf_guid AS og INNER JOIN addons_wiki_nodewikipage AS anwp ON (og.object_id = anwp.id AND og.content_type_id = %s)
WHERE og._id = twb.latest_page_name_guid;
-- Retrieve wiki page id
UPDATE temp_wikiversions AS twc
SET
wiki_page_id = (
SELECT awp.id
FROM addons_wiki_wikipage as awp
WHERE (awp.node_id = twc.node_id
AND awp.page_name = twc.page_name_display)
LIMIT 1
);
-- Borrowed from https://gist.github.com/jamarparris/6100413
CREATE OR REPLACE FUNCTION generate_object_id() RETURNS varchar AS $$
DECLARE
time_component bigint;
machine_id bigint := FLOOR(random() * 16777215);
process_id bigint;
seq_id bigint := FLOOR(random() * 16777215);
result varchar:= '';
BEGIN
SELECT FLOOR(EXTRACT(EPOCH FROM clock_timestamp())) INTO time_component;
SELECT pg_backend_pid() INTO process_id;
result := result || lpad(to_hex(time_component), 8, '0');
result := result || lpad(to_hex(machine_id), 6, '0');
result := result || lpad(to_hex(process_id), 4, '0');
result := result || lpad(to_hex(seq_id), 6, '0');
RETURN result;
END;
$$ LANGUAGE PLPGSQL;
-- Populate the wiki_version table
INSERT INTO addons_wiki_wikiversion (user_id, wiki_page_id, content, identifier, created, modified, _id)
SELECT
twv.user_id
, twv.wiki_page_id
, twv.content
, twv.identifier
, twv.created
, twv.modified
, generate_object_id()
FROM temp_wikiversions AS twv;
-- Migrate Comments on NodeWikiPages to point to WikiPages
-- Create temporary view to store mapping of NodeWikiPage's Guid.pk => WikiPage.id
CREATE OR REPLACE TEMPORARY VIEW nwp_guids_to_wp_id AS (
SELECT
osf_guid.id as nwp_guid_id,
twv.wiki_page_id
FROM osf_guid
INNER JOIN temp_wikiversions twv ON (osf_guid._id = twv.nwp_guid)
WHERE osf_guid._id = twv.nwp_guid
);
-- Use above view to construct a mapping between NodeWikiPage GUID pk => WikiPage GUID pk
CREATE OR REPLACE TEMPORARY VIEW nwp_guids_to_wiki_page_guids as (
SELECT
nwp_guids_to_wp_id.nwp_guid_id as nwp_guid_id,
osf_guid.id as wiki_page_guid_id
FROM osf_guid
INNER JOIN nwp_guids_to_wp_id ON (osf_guid.object_id = nwp_guids_to_wp_id.wiki_page_id)
WHERE osf_guid.object_id = nwp_guids_to_wp_id.wiki_page_id AND osf_guid.content_type_id = %s
);
-- Change Comment.root_target from NodeWikiPages to their corresponding WikiPage
UPDATE osf_comment
SET
root_target_id = (
SELECT nwp_guids_to_wiki_page_guids.wiki_page_guid_id
FROM nwp_guids_to_wiki_page_guids
WHERE osf_comment.root_target_id = nwp_guids_to_wiki_page_guids.nwp_guid_id
LIMIT 1
)
WHERE root_target_id IN (SELECT nwp_guid_id FROM nwp_guids_to_wiki_page_guids);
-- Change Comment.target from NodeWikiPages to their corresponding WikiPage
UPDATE osf_comment
SET
target_id = (
SELECT nwp_guids_to_wiki_page_guids.wiki_page_guid_id
FROM nwp_guids_to_wiki_page_guids
WHERE osf_comment.target_id = nwp_guids_to_wiki_page_guids.nwp_guid_id
LIMIT 1
)
WHERE target_id IN (SELECT nwp_guid_id FROM nwp_guids_to_wiki_page_guids);
""", [nodewikipage_content_type_id, wikipage_content_type_id]
)
now = time.time()
logger.info('Finished migration of WikiVersions [SQL]: {:.5} seconds'.format(now - then))
def migrate_comments_viewed_timestamp_sql(state, schema):
then = time.time()
logger.info('Starting migration of user comments_viewed_timestamp [SQL]:')
wikipage_content_type_id = ContentType.objects.get_for_model(WikiPage).id
with connection.cursor() as cursor:
cursor.execute(
"""
CREATE FUNCTION key_exists(json_field json, dictionary_key text)
RETURNS boolean AS $$
BEGIN
RETURN (json_field->dictionary_key) IS NOT NULL;
END;
$$ LANGUAGE plpgsql;
-- Defining a temporary table that has every update that needs to happen to users.
-- Obsolete NodeWikiPage guids in comments_viewed_timestamp need to be replaced with
-- corresponding new WikiPage guid
-- Table has node_id, user_id, nwp_guid (old NodeWikiPage guid) and wp_guid (WikiPage guid)
CREATE OR REPLACE FUNCTION update_comments_viewed_timestamp_sql()
RETURNS SETOF varchar AS
$func$
DECLARE
rec record;
BEGIN
FOR rec IN
SELECT
oan.id as node_id,
osf_contributor.user_id as user_id,
(SELECT U0._id
FROM osf_guid AS U0
WHERE U0.object_id=wp.id AND U0.content_type_id = %s) AS wp_guid,
nwp_guid
FROM osf_abstractnode as oan
-- Joins contributor to node on contributor.node_id
JOIN osf_contributor ON (oan.id = osf_contributor.node_id)
JOIN osf_osfuser ON (osf_osfuser.id = user_id)
-- Joins each of the wiki page key/version list from wiki_pages_versions
LEFT JOIN LATERAL jsonb_each(oan.wiki_pages_versions) AS wiki_pages_versions ON TRUE
-- Adds the last NWP id
LEFT JOIN LATERAL cast(
(
SELECT trim(v::text, '"')
FROM osf_abstractnode ioan, jsonb_array_elements(wiki_pages_versions->wiki_pages_versions.key) WITH ORDINALITY v(v, rn)
WHERE ioan.id = oan.id
ORDER BY v.rn DESC
LIMIT 1
) AS text) AS nwp_guid ON TRUE
-- Joins the wiki page object, by finding the wiki page object on the node that has a page name similar to the key stored on wiki-pages versions
-- Should work most of the time, there is some bad data though
JOIN addons_wiki_wikipage AS wp ON (wp.node_id = oan.id) AND UPPER(wp.page_name::text) LIKE UPPER(wiki_pages_versions.key::text)
WHERE oan.wiki_pages_versions != '{}' AND osf_osfuser.comments_viewed_timestamp != '{}' AND key_exists(osf_osfuser.comments_viewed_timestamp::json, nwp_guid)
LOOP
-- Loops through every row in temporary table above, and deletes old nwp_guid key and replaces with wp_guid key.
-- Looping instead of joining to osf_user table because temporary table above has multiple rows with the same user
UPDATE osf_osfuser
SET comments_viewed_timestamp = comments_viewed_timestamp - rec.nwp_guid || jsonb_build_object(rec.wp_guid, comments_viewed_timestamp->rec.nwp_guid)
WHERE osf_osfuser.id = rec.user_id;
END LOOP;
END
$func$ LANGUAGE plpgsql;
SELECT update_comments_viewed_timestamp_sql();
""", [wikipage_content_type_id]
)
now = time.time()
logger.info('Finished migration of comments_viewed_timestamp [SQL]: {:.5} seconds'.format(now - then))
def migrate_guid_referent_sql(state, schema):
NodeWikiPage = state.get_model('addons_wiki', 'nodewikipage')
then = time.time()
logger.info('Starting migration of Node Wiki Page guids, repointing them to Wiki Page guids [SQL]:')
wikipage_content_type_id = ContentType.objects.get_for_model(WikiPage).id
nodewikipage_content_type_id = ContentType.objects.get_for_model(NodeWikiPage).id
with connection.cursor() as cursor:
cursor.execute(
"""
CREATE TEMPORARY TABLE repoint_guids
(
node_id INTEGER,
page_name_key TEXT,
nwp_guid TEXT,
latest_page_name_guid TEXT,
wiki_page_id INTEGER,
page_name_display TEXT
)
ON COMMIT DROP;
-- Flatten out the wiki_page_versions arrays for each key
INSERT INTO repoint_guids (node_id, page_name_key, nwp_guid)
SELECT
oan.id as node_id,
wiki_pages_versions.key,
trim(nwp_guid::text, '"') as node_wiki_page_guid
FROM osf_abstractnode as oan,
jsonb_each(oan.wiki_pages_versions) as wiki_pages_versions,
jsonb_array_elements(wiki_pages_versions->wiki_pages_versions.key) as nwp_guid
INNER JOIN addons_wiki_nodewikipage as nwp ON nwp.former_guid = trim(nwp_guid::text, '"');
-- Retrieve the latest guid for the json key
-- For example, if you have {'home': ['abcde', '12345', 'zyxwv']}, I need to preserve 'zyxwv'
UPDATE repoint_guids AS twp
SET
latest_page_name_guid = (
SELECT trim(v::text, '"')
FROM osf_abstractnode ioan
, jsonb_array_elements(oan.wiki_pages_versions->twp.page_name_key) WITH ORDINALITY v(v, rn)
WHERE ioan.id = oan.id
ORDER BY v.rn DESC
LIMIT 1
)
FROM osf_abstractnode AS oan
WHERE oan.id = twp.node_id;
-- Retrieve page_name nodewikipage field for the latest wiki page guid (The latest one is most current because wikis can be renamed)
UPDATE repoint_guids AS twb
SET
page_name_display = anwp.page_name
FROM osf_guid AS og INNER JOIN addons_wiki_nodewikipage AS anwp ON (og.object_id = anwp.id AND og.content_type_id = %s)
WHERE og._id = twb.latest_page_name_guid;
-- Retrieve wiki page id using the node id and page name
UPDATE repoint_guids AS twc
SET
wiki_page_id = (
SELECT awp.id
FROM addons_wiki_wikipage as awp
WHERE (awp.node_id = twc.node_id
AND awp.page_name = twc.page_name_display)
LIMIT 1
);
-- Update osf_guid by joining with temporary table repoint_guids.
UPDATE osf_guid
SET content_type_id = %s, object_id = wiki_page_id
FROM repoint_guids
WHERE repoint_guids.nwp_guid = osf_guid._id;
""", [nodewikipage_content_type_id, wikipage_content_type_id]
)
now = time.time()
logger.info('Finished repointing Node Wiki Page guids to Wiki Pages [SQL]: {:.5} seconds'.format(now - then))
class Migration(migrations.Migration):
dependencies = [
('addons_wiki', '0009_auto_20180302_1404'),
]
operations = [
migrations.RunPython(migrate_node_wiki_pages, reverse_func),
]
| caseyrollins/osf.io | addons/wiki/migrations/0010_migrate_node_wiki_pages.py | Python | apache-2.0 | 23,077 | 0.003467 |
# ~*~ coding: utf-8 ~*~
from __future__ import unicode_literals
from django.conf.urls import url
from rest_framework.routers import DefaultRouter
from .. import api
app_name = "audits"
router = DefaultRouter()
router.register(r'ftp-log', api.FTPLogViewSet, 'ftp-log')
urlpatterns = [
]
urlpatterns += router.urls
| eli261/jumpserver | apps/audits/urls/api_urls.py | Python | gpl-2.0 | 319 | 0 |
# -*- coding: utf-8 -*-
import datetime
import unittest
import clowder
import mock
# import psutil
class BaseClowderTestCase(unittest.TestCase):
"""Base class for all clowder test cases."""
def assert_send_contains_data(self, send_mock, key, value):
"""Assert that the given send mock was called with the given key and
value pair.
:param send_mock: A mock
:type send_mock: mock.MagicMock
:param key: A key
:type key: hashable
:param value: The expected value
:type value: mixed
"""
self.assertIn(key, send_mock.call_args[0][0])
self.assertEqual(value, send_mock.call_args[0][0][key])
class TestCleanFrequency(unittest.TestCase):
def test_should_return_value_if_int_given(self):
self.assertEqual(100, clowder._clean_frequency(100))
def test_should_return_total_seconds_if_timedelta_given(self):
fixture = datetime.timedelta(hours=1)
self.assertEqual(
fixture.total_seconds(), clowder._clean_frequency(fixture)
)
def test_should_raise_error_if_any_other_type_value_given(self):
self.assertRaisesRegexp(
ValueError,
"Invalid frequency 'hello'",
clowder._clean_frequency,
"hello"
)
class TestValidateData(unittest.TestCase):
def test_should_succeed_if_only_valid_keys_given(self):
clowder._validate_data({
'name': 'my-test',
'url': clowder.CLOWDER_API_URL,
'value': 123,
'status': 1,
'frequency': 1098123098
})
def test_should_raise_error_if_invalid_data_given(self):
self.assertRaisesRegexp(
ValueError,
"Invalid data keys 'herp, derp'",
clowder._validate_data,
{'name': 'Hey', 'status': 1, 'herp': 123, 'derp': 456}
)
def test_should_raise_error_if_missing_keys(self):
self.assertRaisesRegexp(
ValueError,
"Missing keys 'name'",
clowder._validate_data,
{'value': 1}
)
class TestFail(BaseClowderTestCase):
def test_should_raise_error_if_status_given(self):
self.assertRaisesRegexp(
AttributeError,
"Status should not be provided to fail",
clowder.fail,
{'status': 'should fail'}
)
@mock.patch('clowder._send')
def test_should_send_value_provided_along(self, send):
clowder.fail({'name': 'Invalid stuff'})
send.assert_called_once()
self.assert_send_contains_data(send, 'name', 'Invalid stuff')
@mock.patch('clowder._send')
def test_should_send_status_of_negative_one(self, send):
clowder.fail({'value': "Invalid stuff"})
send.assert_called_once()
self.assert_send_contains_data(send, 'status', -1)
class TestOk(BaseClowderTestCase):
def test_should_raise_error_if_status_given(self):
self.assertRaisesRegexp(
AttributeError,
"Status should not be provided to ok",
clowder.ok,
{'name': 'Test', 'status': 'should fail'}
)
@mock.patch('clowder._send')
def test_should_send_value_provided_along(self, send):
clowder.ok({'value': 'Invalid stuff'})
send.assert_called_once()
self.assert_send_contains_data(send, 'value', 'Invalid stuff')
@mock.patch('clowder._send')
def test_should_send_status_of_one(self, send):
clowder.ok({'value': "Invalid stuff"})
send.assert_called_once()
self.assert_send_contains_data(send, 'status', 1)
class TestDelete(BaseClowderTestCase):
@mock.patch('clowder._send')
def test_should_use_correct_delete_url(self, send):
clowder.delete('test')
send.assert_called_once()
self.assert_send_contains_data(send, 'url', clowder.CLOWDER_DELETE_URL)
class TestSubmit(BaseClowderTestCase):
def test_should_raise_error_if_alert_not_given(self):
self.assertRaisesRegexp(
ValueError,
"Alert required",
clowder.submit,
name='Hello',
value=123
)
def test_should_raise_error_if_value_not_given(self):
self.assertRaisesRegexp(
ValueError,
"Value required",
clowder.submit,
name='Test',
alert=lambda x: (x > 10)
)
@mock.patch('clowder.fail')
def test_should_call_fail_if_predicate_returns_true(self, fail):
clowder.submit(alert=lambda x: x > 10, value=15)
fail.assert_called_once()
@mock.patch('clowder.ok')
def test_should_call_ok_if_predicate_returns_false(self, ok):
clowder.submit(alert=lambda x: x > 10, value=10)
ok.assert_called_once()
class TestSend(BaseClowderTestCase):
def setUp(self):
super(TestSend, self).setUp()
self.fixture = {'name': 'hello', 'status': 1}
@mock.patch('requests.post')
def test_should_use_default_clowder_api_url(self, post):
clowder._send(self.fixture)
post.assert_called_once()
args = post.call_args[0]
url = args[0]
self.assertEqual(url, clowder.CLOWDER_API_URL)
@mock.patch('requests.post')
def test_should_contain_provided_data(self, post):
clowder._send(self.fixture)
post.assert_called_once()
kwargs = post.call_args[1]
self.assertIn('data', kwargs)
self.assertEqual(kwargs['data'], self.fixture)
def test_should_raise_error_if_invalid_data_given(self):
self.assertRaisesRegexp(
ValueError,
"Invalid data keys 'herp'",
clowder._send,
{'name': 'Test', 'herp': 123}
)
def test_should_raise_error_if_missing_keys(self):
self.assertRaisesRegexp(
ValueError,
"Missing keys 'name'",
clowder._send,
{'value': 1}
)
# clowder.ok({
# 'name': 'CPU Percent',
# 'value': psutil.cpu_percent(interval=1),
# 'frequency': datetime.timedelta(minutes=0.5)
# })
# clowder.ok({
# 'name': 'Memory Utilization',
# 'value': psutil.phymem_usage().percent
# })
| keithhackbarth/clowder_python_client | tests.py | Python | gpl-2.0 | 6,224 | 0 |
# Copyright (c) 2013 Jose Cruz-Toledo
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Command-line application for creating NTriples representation of a ligand neighbourhood of a pdb file.
This program takes as input a directory of pdb files and searches those structures for any residues found in a known dictionary of ligands:
(https://docs.google.com/spreadsheet/pub?key=0AnGgKfZdJasrdC00bUxHcVRXaFloSnJYb3VmYkwyVnc&single=true&gid=0&output=csv). If a ligand is found
in a structure an NTriples file is generated that includes details about the neighbourhood members and
Usage:
$ python ligandneighbours.py -dir /path/to/loca/dir/with/pdb/files --radius 4.8 --out /path/to/output
"""
import os
import sys
import argparse
import urllib2
import csv
import re
import hashlib
import random
from Bio.PDB import *
from collections import defaultdict
#parser for command-line arguments
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-dir','--input_dir', help='a local direcory containing PDB structures (in the .pdb format) for which you wish to find the ligand neighbourhood', required=True)
parser.add_argument('-out', '--output_file', help='the file where the output will be stored as CSV', required=True)
parser.add_argument('--radius', nargs='?', const=5.0, type=float, default=5.0)
pdb_to_ligand_list_url = 'https://docs.google.com/spreadsheet/pub?key=0AnGgKfZdJasrdC00bUxHcVRXaFloSnJYb3VmYkwyVnc&single=true&gid=0&output=csv'
base_uri = 'http://bio2rdf.org'
rdfs = 'http://www.w3.org/2000/01/rdf-schema#'
rdf = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
def main(argv):
#parse the command-line flags.
flags = parser.parse_args(argv[1:])
local_dir = flags.input_dir
output_dir = flags.output_file
radius = flags.radius
#fetch a list of all the pdb files in the input directory
filepaths = fetchPdbFilePaths(local_dir)
#fetch the ligand list
ligands = fetchLigandList(pdb_to_ligand_list_url)
for fp in filepaths:
#get the file name and extension of the pdb file
fn, fe = os.path.splitext(fp)
pdbId = fn.rsplit('/')[-1]
# dict of ligands (PDB.Residue) to residues (PDB.Residue)
ln = findNeighbours(fp, ligands, radius)
if ln:
#now we can generate a list of uris for each ligand neighbor
luri = makeURIHood(ln)
hoodNTriples = makeHoodNTriplesAnnotation(luri, pdbId, radius)
#write an N3 file as output
writeN3Hood(hoodNTriples, pdbId, output_dir)
#Creates a ligand neighborhood
def makeHoodNTriplesAnnotation(ligand_uri_dict, aPdbId, aRadius):
rm = ''
#make a hood uri
hood_uri = base_uri+'/lighood_resource:'+hashlib.sha224(str(aPdbId)+str(aRadius)+str(random.random())).hexdigest()
#type the hood
rm += "<"+hood_uri+"> <"+rdf+"type> <"+base_uri+"/lighood_vocabulary:ligand_neighbourhood> .\n"
rm += "<"+base_uri+"/lighood_vocabulary:ligand_neighbourhood> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2000/01/rdf-schema#Class> .\n"
rm += "<"+hood_uri+"> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#NamedIndividual> .\n"
#link it to the pdb structure
rm += "<"+base_uri+"/pdb:"+aPdbId+"> <"+base_uri+"/lighood_vocabulary:has_neighborhood> <"+hood_uri+"> .\n"
#add the radius
radius_uri = base_uri+'/lighood_resource:'+hashlib.sha224(str(aRadius)+str(random.random())).hexdigest()
rm += "<"+hood_uri+"> <"+base_uri+"/lighood_vocabulary:has_attribute> <"+radius_uri+">. \n"
rm += "<"+radius_uri+"> <"+rdf+"type> <"+base_uri+"/lighood_vocabulary:radius> .\n"
rm += "<"+base_uri+"/lighood_vocabulary:radius> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2000/01/rdf-schema#Class> .\n"
rm += "<"+radius_uri+"> <"+"<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#NamedIndividual> .\n"
rm += "<"+radius_uri+"> <"+base_uri+"/lighood_vocabulary:has_value> \""+str(aRadius)+"\". \n"
for (ligand_uri, res_uri) in ligand_uri_dict.items():
#add ligand
rm += "<"+hood_uri+"> <"+base_uri+"/lighood_vocabulary:has_member> <"+ligand_uri+"> .\n"
#type the ligand
rm += "<"+ligand_uri+"> <"+rdf+"type> <"+base_uri+"/lighood_vocabulary:ligand> .\n"
rm += "<"+base_uri+"/lighood_vocabulary:ligand> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2000/01/rdf-schema#Class> .\n"
rm += "<"+ligand_uri+"> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#NamedIndividual> .\n"
for aru in res_uri:
#add parts
rm += "<"+hood_uri+"> <"+base_uri+"/lighood_vocabulary:has_member> <"+aru+"> .\n"
#link ligand to neighbors
rm += "<"+ligand_uri+"> <"+base_uri+"/lighood_vocabulary:has_neighbor> <"+aru+"> .\n"
#type the neighbors
rm += "<"+aru+"> <"+rdf+"type> <"+base_uri+"/lighood_vocabulary:neighbor> .\n"
rm += "<"+base_uri+"/lighood_vocabulary:neighbor> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2000/01/rdf-schema#Class> .\n"
rm += "<"+aru+"> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#NamedIndividual> .\n"
return rm
#creates an N3 file with aPdbId in the specified anOutputDirectory
# by parsing the ligand_uri_dict
def writeN3Hood(someNTriples, aPdbId, anOutputDirectory):
if someNTriples:
f = open(anOutputDirectory+'/'+aPdbId+'-ligand-neighborhood.nt','w')
f.write(someNTriples)
f.close()
#returns a defaultdict(list) where the key is the Bio2RDF URI of
# the ligand residue in this structure and the value is a list of
# residue URIs that are in the radius of the given ligand
def makeURIHood(aLigandDictList):
rm = defaultdict(list)
for (ligand, hood) in aLigandDictList.items():
ligfi = ligand.get_full_id()
#build ligand uri
ligand_uri = base_uri+'/pdb_resource:'+ligfi[0]+'/chemicalComponent_'+ligfi[2]+str(ligfi[3][1])
residue_uris = []
for aResidue in hood:
fi = aResidue.get_full_id()
res_pdbid = fi[0]
res_chain = fi[2]
res_position = fi[3][1]
res_uri = base_uri+'/pdb_resource:'+res_pdbid+'/chemicalComponent_'+res_chain+str(res_position)
residue_uris.append(res_uri)
if not ligand in rm:
rm[ligand_uri] = residue_uris
return rm
# compute the ligand neighbourhood for the given pdb file.
# someVerifiedLigands is the contents of the spreadsheet with known ligands
# aRadius is the threshold underwhich the comparison will be made
# this method returns a defaultdict(list) where the key is a ligand
# and the value is a list of PDB.Residue that exist within aRadius of the given
# ligand
def findNeighbours(aPdbFilePath, someVerifiedLigands, aRadius):
rm = defaultdict(list)
fn, fe = os.path.splitext(aPdbFilePath)
pdbId = fn.rsplit('/')[-1]
match = re.match('^\w{4}$', pdbId)
if match:
p = PDBParser(PERMISSIVE=1, QUIET=1)
structure = p.get_structure(pdbId, aPdbFilePath)
models = structure.get_list()
#iterate over the models
for aModel in models:
chains = aModel.get_list()
#get all the atoms ('A') in this model
model_atoms = Selection.unfold_entities(aModel,'A')
#create a neighbor search
ns = NeighborSearch(model_atoms)
#search the chains for any known ligands
for aChain in chains:
#get the residues in this chainln
residues = aChain.get_list()
#iterate over the residues
for aR in residues:
if findLigandIdInListOfLigands(someVerifiedLigands,aR.get_resname()) != None:
#found a ligand!
ligand = aR
neighborset = computeNeighborSet(ns, ligand, aRadius)
rm[ligand] = neighborset
return rm
#given a BioPython.NeighborSearch object a ligand residue and a radius
# this method iterates over all the atoms in a ligand and finds the
# residue neighbors every atom and stores it in a set
# this method then returns a set of all the residues that exist
# within the specified threshold distance (aRadius) of aLigand
def computeNeighborSet(aNeighborSearch, aLigand, aRadius):
rm = set()
#get the atoms ('A') of this residue
ligand_atom_list = Selection.unfold_entities(aLigand, 'A')
#iterate over the list of atoms
for anAtom in ligand_atom_list:
#set a center
center = anAtom.get_coord()
neighbor_atoms = aNeighborSearch.search(center, aRadius)
if neighbor_atoms:
#get the residues that those atoms correspond to
nr = Selection.unfold_entities(neighbor_atoms, 'R')
#now add these residues to the rm set
for ar in nr:
#do not add the residue that may have the ligand residue name in it
pos = ar.get_resname().find(aLigand.get_resname())
if pos == -1:
rm.add(ar)
return rm
#downloads the latest version of the google doc
def fetchLigandList(aUrl):
rm = []
print "fetching latest ligand list ..."
r = urllib2.urlopen(pdb_to_ligand_list_url)
reader = csv.reader(r)
rownum = 0;
for row in reader:
if rownum != 0:
ligand_check = row[6]
if ligand_check == 'Y':
rm.append(row)
rownum += 1
if len(rm) == 0:
raise Exception ("No valid ligands found in list !")
sys.exit()
return rm
# search aListOfLigands for a given ligand id
# aListOfLigands is derived from the google speadsheet
# returns the ligid or None if not found
def findLigandIdInListOfLigands(aListOfLigands, aLigId):
for al in aListOfLigands:
anid = al[0]
if anid == aLigId:
return anid
return None
#retrieve a list of files with .pdb as extension from the given local directory (alocaldir)
def fetchPdbFilePaths(alocaldir):
rm = []
for fn in os.listdir(alocaldir):
fileName, fileExtension = os.path.splitext(alocaldir+'/'+fn)
if fileExtension == '.pdb':
rm.append(fileName+fileExtension)
if len(rm) == 0:
raise Exception("No pdb files found in provided folder!")
sys.exit()
return rm
#start the program
if __name__ == '__main__':
main(sys.argv) | jctoledo/ligandneighbours | ligandneighbours.py | Python | mit | 10,769 | 0.023865 |
# -*- coding: utf-8 -*-
# Future
from __future__ import absolute_import, division, print_function, \
unicode_literals, with_statement
# Standard Library
from datetime import datetime
# Third Party
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D # Load 3d plots capabilities
# First Party
from metaopt.plugin.plugin import Plugin
NUMBER_OF_SAMPLES = 200
COLORMAP = cm.jet
REVERSED_COLORMAP = cm.jet_r
class VisualizeBestFitnessPlugin(Plugin):
"""Visualize optimization progess"""
def __init__(self):
self.best_fitnesses = []
self.timestamps = []
self.start_time = None
self.current_best = None
self.return_spec = None
def setup(self, f, param_spec, return_spec):
del f, param_spec
self.return_spec = return_spec
if not self.start_time:
self.start_time = datetime.now()
def on_result(self, invocation):
fitness = invocation.current_result
if self.current_best is None or fitness < self.current_best:
self.current_best = fitness
self.best_fitnesses.append(self.current_best.raw_values)
time_delta = datetime.now() - self.start_time
self.timestamps.append(time_delta.total_seconds())
def show_fitness_invocations_plot(self):
"""Show a fitness--invocations plot"""
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel("Number of Invocations")
ax.set_ylabel(self.get_y_label())
ax.plot(self.best_fitnesses)
plt.show()
def show_fitness_time_plot(self):
"""Show a fitness--time plot"""
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel("Time")
ax.set_ylabel(self.get_y_label())
ax.plot(self.timestamps, self.best_fitnesses)
plt.show()
def get_y_label(self):
return self.return_spec.return_values[0]["name"]
| cigroup-ol/metaopt | metaopt/plugin/visualization/best_fitness.py | Python | bsd-3-clause | 1,969 | 0 |
#!/usr/bin/env python3
import fileinput
import string
import sys
DELETE = ''
REPLACE = {'“': '``',
'”': '\'\'',
'’': '\'',
'\\': '\\textbackslash ',
'*': '\\textasteriskcentered ',
'_': '\\_',
'#': '\\#',
'$': '\\$',
'%': '\\%',
'{': '\\{',
'}': '\\}',
'&': '\\&',
'…': '\\dots ',
'~': '\\~{}',
'^': '\\^{}'}
def main():
all_deleted, all_replaced, all_specials = set(), set(), set()
for line in fileinput.input():
line, deleted = delete(line, DELETE)
all_deleted.update(deleted)
line, replaced = replace(line, REPLACE)
all_replaced.update(replaced)
specials = special_characters(line)
all_specials.update(specials)
sys.stdout.write(line)
print('Deleted characters: {}'.format(' '.join(sorted(all_deleted))),
file=sys.stderr)
print('Replaced characters: {}'.format(' '.join(sorted(all_replaced))),
file=sys.stderr)
prtxt = 'Remaining special characters: {}'
print(prtxt.format(' '.join(sorted(all_specials))),
file=sys.stderr)
def delete(text, illegals):
deleted = {char for char in illegals if char in text}
table = {char: None for char in illegals}
text = text.translate(str.maketrans(table))
return text, deleted
def replace(text, table):
replaced = {char for char in table if char in text}
text = text.translate(str.maketrans(table))
return text, replaced
def special_characters(text):
return {char for char in text if char not in string.printable}
if __name__ == '__main__':
main()
| st3f4n/latex-sanitizer | sanitize.py | Python | gpl-3.0 | 1,706 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-05 13:59
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('marketplace', '0012_auto_20170604_1335'),
]
operations = [
migrations.AlterField(
model_name='package',
name='name',
field=models.CharField(max_length=255, unique=True, validators=[django.core.validators.RegexValidator('^[a-z]*$', 'Only lowercase letters are allowed.')]),
),
]
| MOOCworkbench/MOOCworkbench | marketplace/migrations/0013_auto_20170605_1359.py | Python | mit | 601 | 0.001664 |
import Base
import VS
import GUI
import XGUITypes
import XGUIDebug
XGUIRootSingleton = None
XGUIPythonScriptAPISingleton = None
"""----------------------------------------------------------------"""
""" """
""" XGUIRoot - root management interface for the XML-GUI framework."""
""" """
"""----------------------------------------------------------------"""
class XGUIRoot:
def __init__(self):
self.templates = {}
def getTemplate(self,type,name):
if type in self.templates and name in self.templates[type]:
return self.templates[type][name]
else:
return None
def addTemplate(self,tpl):
type = tpl.getType()
name = tpl.getName()
if not type in self.templates:
XGUIDebug.trace(1,"XGUI: Initializing template category \"" + str(type) + "\"\n")
self.templates[type] = {}
XGUIDebug.trace(2,"XGUI: Loading template \"" + str(name) + "\" into category \"" + str(type) + "\"\n")
self.templates[type][name] = tpl
class XGUIPythonScript:
def __init__(self,code,filename):
code = code.replace("\r\n","\n")
code += "\n"
self.code = compile(code,filename,'exec')
def execute(self,context):
exec(self.code, context)
return context
"""----------------------------------------------------------------"""
""" """
""" XGUIPythonScriptAPI - through this class, all PythonScript """
""" API calls are routed. """
""" """
"""----------------------------------------------------------------"""
class XGUIPythonScriptAPI:
def __init__(self,layout,room):
self.layout = layout
self.room = room
"""----------------------------------------------------------------"""
""" """
""" XGUI global initialization """
""" """
"""----------------------------------------------------------------"""
def XGUIInit():
XGUIRootSingleton = XGUIRoot()
| vegastrike/Assets-Production | modules/XGUI.py | Python | gpl-2.0 | 2,379 | 0.008407 |
# -*- coding: utf-8 -*-
#
# test_quantal_stp_synapse.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
# This script compares the two variants of the Tsodyks/Markram synapse in NEST.
import nest
import numpy
import unittest
@nest.check_stack
class QuantalSTPSynapseTestCase(unittest.TestCase):
"""Compare quantal_stp_synapse with its deterministic equivalent."""
def test_QuantalSTPSynapse(self):
"""Compare quantal_stp_synapse with its deterministic equivalent"""
nest.ResetKernel()
nest.set_verbosity(100)
n_syn = 12 # number of synapses in a connection
n_trials = 50 # number of measurement trials
# parameter set for facilitation
fac_params = {"U": 0.03, "u": 0.03,
"tau_fac": 500., "tau_rec": 200., "weight": 1.}
dep_params = {"U": 0.5, "u": 0.5, "tau_fac": 15.,
"tau_rec": 670., "weight": 1.}
lin_params = {"U": 0.3, "u": 0.3, "tau_fac": 330.,
"tau_rec": 330., "weight": 1.}
# Here we assign the parameter set to the synapse models
t1_params = fac_params # for tsodyks2_synapse
t2_params = t1_params.copy() # for furhmann_synapse
t2_params['n'] = n_syn
t2_params['weight'] = 1. / n_syn
nest.SetDefaults("tsodyks2_synapse", t1_params)
nest.SetDefaults("quantal_stp_synapse", t2_params)
nest.SetDefaults("iaf_psc_exp", {"tau_syn_ex": 3., 'tau_m': 70.})
source = nest.Create('spike_generator')
nest.SetStatus(
source,
{
'spike_times': [
30., 60., 90., 120., 150., 180., 210., 240.,
270., 300., 330., 360., 390., 900.]
}
)
parrot = nest.Create('parrot_neuron')
neuron = nest.Create("iaf_psc_exp", 2)
# We must send spikes via parrot because devices cannot
# connect through plastic synapses
# See #478.
nest.Connect(source, parrot)
nest.Connect(parrot, neuron[:1], syn_spec="tsodyks2_synapse")
nest.Connect(parrot, neuron[1:], syn_spec="quantal_stp_synapse")
voltmeter = nest.Create("voltmeter", 2)
nest.SetStatus(voltmeter, {"withgid": False, "withtime": True})
t_plot = 1000.
t_tot = 1500.
# the following is a dry run trial so that the synapse dynamics is
# idential in all subsequent trials.
nest.Simulate(t_tot)
# Now we connect the voltmeters
nest.Connect([voltmeter[0]], [neuron[0]])
nest.Connect([voltmeter[1]], [neuron[1]])
for t in range(n_trials):
t_net = nest.GetKernelStatus('time')
nest.SetStatus(source, {'origin': t_net})
nest.Simulate(t_tot)
nest.Simulate(.1) # flush the last voltmeter events from the queue
vm = numpy.array(nest.GetStatus([voltmeter[1]], 'events')[0]['V_m'])
vm_reference = numpy.array(nest.GetStatus(
[voltmeter[0]], 'events')[0]['V_m'])
vm.shape = (n_trials, t_tot)
vm_reference.shape = (n_trials, t_tot)
vm_mean = numpy.array([numpy.mean(vm[:, i])
for i in range(int(t_tot))])
vm_ref_mean = numpy.array(
[numpy.mean(vm_reference[:, i]) for i in range(int(t_tot))])
error = numpy.sqrt((vm_ref_mean[:t_plot] - vm_mean[:t_plot])**2)
self.assertTrue(numpy.max(error) < 4.0e-4)
def suite():
suite = unittest.makeSuite(QuantalSTPSynapseTestCase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
| HBPNeurorobotics/nest-simulator | pynest/nest/tests/test_quantal_stp_synapse.py | Python | gpl-2.0 | 4,353 | 0 |
import clr
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
clr.AddReference("RevitServices")
import RevitServices
from RevitServices.Persistence import DocumentManager
from RevitServices.Transactions import TransactionManager
doc = DocumentManager.Instance.CurrentDBDocument
faminstances = UnwrapElement(IN[0])
booleans = []
TransactionManager.Instance.EnsureInTransaction(doc)
for item in faminstances:
try:
item.FlipFromToRoom()
booleans.append(True)
except:
booleans.append(False)
TransactionManager.Instance.TransactionTaskDone()
OUT = (faminstances,booleans) | andydandy74/ClockworkForDynamo | nodes/2.x/python/FamilyInstance.FlipFromToRoom.py | Python | mit | 612 | 0.011438 |
import tests.periodicities.period_test as per
per.buildModel((7 , 'T' , 1600));
| antoinecarme/pyaf | tests/periodicities/Minute/Cycle_Minute_1600_T_7.py | Python | bsd-3-clause | 82 | 0.04878 |
import re
"""Rules are based on Brunot & Bruneau (1949).
"""
estre_replace = [('^sereient$|^fussions$|^fussiens$|^sereies$|^sereiet$|^serïens$|^seriiez$|^fussiez$|^fussent$|^ierent$|^fustes$|^furent$|^ierent$|^sereie$|^seroie$|^sereit$|^seiens$|^seient$|^fusses$|^fussez$|^estant$|^seiens$|^somes$|^estes$|^ieres$|^ieres$|^eiens$|^eriez$|^erent$|^fumes$|^irmes$|^ertes$|^seies$|^seiet$|^seiez$|^fusse$|^seies$|^seiez$|^suis$|^sont$|^iere$|^eres$|^eret$|^iers$|^iert$|^seie$|^seit$|^fust$|^esté$|^ies$|^est$|^ere$|^ert$|^fui$|^fus$|^ier$|^ert$|^es$|^fu$', 'estre')]
avoir_replace = [('^avreient$|^avroient$|^eüssions$|^eüssiens$|^avrarai$|^avreies$|'
'^avroies$|^avreiet$|^avroiet$|^avrïens$|^avrïons$|^avriiez$|'
'^eüssiez$|^eüssent$|^eüstes$|^óurent$|^avrons$|^avront$|^avreie$|'
'^avrïez$|^eüsses$|^eüssez$|^avons$|^eümes$|^orent$|^avrai$|'
'^avras$|^avrez$|^aiens$|^ayons$|^aient$|^eüsse$|^avez$|^avra$|'
'^arai$|^aies$|^aiet$|^aiez$|^ayez$|^eüst$|^ont$|^eüs$|'
'^oüs$|^óut$|^oiz$|^aie$|^ait$|^ai$|^as$|^at$|^oi$|'
'^ot$|^oü$|^eü$|^a$', 'avoir')]
auxiliary_rules = estre_replace+avoir_replace
first_conj_rules = [('es$|e$|ons$|ez$|ent$|z$|(e)ai$|(e)as$|(e)a$|(e)at$|(e)ames$|(e)astes$|(e)erent$|(e)asse$|é$', 'er')]
i_type_rules = [('i$|is$|it$|imes$|istes$|irent$|isse$', 'ir')]
u_type_rules = [('ui$|us$|ut$|umes$|ustes$|urent$|usse$', 'oir')]
verbal_rules = u_type_rules+i_type_rules+first_conj_rules
regime_rules = [('on$|ain$', 'e')]
plural_rules = [('ales$|aux$|aus$', 'al'),
('s$', '')]
masc_to_fem_rules = [('se$', 'x'),
('ive$', 'if'),
('ee$', 'e')]
french_nominal_rules = regime_rules+plural_rules+masc_to_fem_rules
misc_rules = [('x$', 'l'),
('z$', 't'),
('un$', 'on'),
('eus$', 'os'),
('^e$', 'et')]
determiner_rules= [('^li$|^lo$|^le$|^la$|^les$', 'le'),
('^del$|^du$', 'de le'),
('^al$|^au$', 'a le'),
('^as$|^aus$|^aux$', "a les"),
('^uns$|^une$|^unes$', 'un')]
reduction_rules = [("d'", 'de'),
("m'", 'me'),
("t'", 'te'),
("l'", 'le'),
("qu'", "que")]
patterns = determiner_rules+misc_rules+auxiliary_rules+verbal_rules+french_nominal_rules+reduction_rules
def build_match_and_apply_functions(pattern, replace):
def matches_rule(word):
return re.search(pattern, word)
def apply_rule(word):
return re.sub(pattern, replace, word)
return (matches_rule, apply_rule)
rules = [build_match_and_apply_functions(pattern, replace)
for (pattern, replace) in patterns]
def regex(token):
for matches_rule, apply_rule in rules:
if matches_rule(token):
return apply_rule(token)
| LBenzahia/cltk | cltk/lemmatize/french/french.py | Python | mit | 2,931 | 0.006188 |
# -*- coding: utf-8 -*-
#===============================================================================
#
# Copyright 2013 Horacio Guillermo de Oro <hgdeoro@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
import datetime
import os
import wx
from gmp.garnisher import do_garnish, BORDER_SIZE_BOTTOM
from gmp.utils import GMP_OUTPUT_DIR, GMP_AUTHOR, GMP_FONT, \
GMP_DEFAULT_FONT_SIZE, GMP_OUTPUT_QUALITY, GMP_BORDER, GMP_COLOR, \
GMP_DEFAULT_MAX_SIZE, GMP_TITLE, GMP_TITLE_IMAGE, GMP_EXIF_COPYRIGHT
class MyFileDropTarget(wx.FileDropTarget):
def __init__(self, window):
wx.FileDropTarget.__init__(self)
self.window = window
def OnDropFiles(self, x, y, filenames):
"""
When files are dropped, write where they were dropped and then
the file paths themselves
"""
self.window.SetInsertionPointEnd()
self.window.clearText()
for filepath in filenames:
self.window.updateText(filepath + '\n')
self.window.updateText(" + Procesing " + os.path.normpath(os.path.abspath(filepath)) + "...")
self.window.refreshWindow()
exit_status = do_garnish(filepath, GMP_OUTPUT_DIR,
author=GMP_AUTHOR,
overwrite=True,
font_file=GMP_FONT,
font_size=GMP_DEFAULT_FONT_SIZE,
output_quality=GMP_OUTPUT_QUALITY,
border_size=GMP_BORDER,
border_color=GMP_COLOR,
border_size_bottom=BORDER_SIZE_BOTTOM,
max_size=[int(x) for x in GMP_DEFAULT_MAX_SIZE.split('x')],
title=GMP_TITLE,
title_img=GMP_TITLE_IMAGE,
year=datetime.date.today().year,
technical_info=True,
exif_copyright=GMP_EXIF_COPYRIGHT,
rotate=0,
)
self.window.updateText(" OK\n")
self.window.refreshWindow()
self.window.updateText("\nFinished!\n")
class DnDPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent=parent)
file_drop_target = MyFileDropTarget(self)
lbl = wx.StaticText(self, label="Drag file to process here:")
self.fileTextCtrl = wx.TextCtrl(self,
style=wx.TE_MULTILINE | wx.HSCROLL | wx.TE_READONLY)
self.fileTextCtrl.SetDropTarget(file_drop_target)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(lbl, 0, wx.ALL, 5)
sizer.Add(self.fileTextCtrl, 1, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
def SetInsertionPointEnd(self):
"""
Put insertion point at end of text control to prevent overwriting
"""
self.fileTextCtrl.SetInsertionPointEnd()
def updateText(self, text):
"""
Write text to the text control
"""
self.fileTextCtrl.WriteText(text)
def clearText(self):
self.fileTextCtrl.Clear()
def refreshWindow(self):
self.Refresh()
self.Update()
self.UpdateWindowUI()
class DnDFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, parent=None, title="DnD for GMP")
panel = DnDPanel(self)
self.Show()
if __name__ == "__main__":
app = wx.App(False)
frame = DnDFrame()
app.MainLoop()
| hgdeoro/GarnishMyPic | gmp/dnd.py | Python | gpl-3.0 | 4,062 | 0.003693 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_change_currency(osv.osv_memory):
_name = 'account.change.currency'
_description = 'Change Currency'
_columns = {
'currency_id': fields.many2one('res.currency', 'Change to', required=True, help="Select a currency to apply on the invoice"),
}
def view_init(self, cr , uid , fields_list, context=None):
obj_inv = self.pool.get('account.invoice')
if context is None:
context = {}
if context.get('active_id',False):
if obj_inv.browse(cr, uid, context['active_id']).state != 'draft':
raise osv.except_osv(_('Error!'), _('You can only change currency for Draft Invoice.'))
pass
def change_currency(self, cr, uid, ids, context=None):
obj_inv = self.pool.get('account.invoice')
obj_inv_line = self.pool.get('account.invoice.line')
obj_currency = self.pool.get('res.currency')
if context is None:
context = {}
data = self.browse(cr, uid, ids, context=context)[0]
new_currency = data.currency_id.id
invoice = obj_inv.browse(cr, uid, context['active_id'], context=context)
if invoice.currency_id.id == new_currency:
return {}
rate = obj_currency.browse(cr, uid, new_currency, context=context).rate
for line in invoice.invoice_line:
new_price = 0
if invoice.company_id.currency_id.id == invoice.currency_id.id:
new_price = line.price_unit * rate
if new_price <= 0:
raise osv.except_osv(_('Error!'), _('New currency is not configured properly.'))
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id == new_currency:
old_rate = invoice.currency_id.rate
if old_rate <= 0:
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
new_price = line.price_unit / old_rate
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id != new_currency:
old_rate = invoice.currency_id.rate
if old_rate <= 0:
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
new_price = (line.price_unit / old_rate ) * rate
obj_inv_line.write(cr, uid, [line.id], {'price_unit': new_price})
obj_inv.write(cr, uid, [invoice.id], {'currency_id': new_currency}, context=context)
return {'type': 'ir.actions.act_window_close'}
| OpusVL/odoo | addons/account/wizard/account_change_currency.py | Python | agpl-3.0 | 3,683 | 0.003801 |
## www.pubnub.com - PubNub Real-time push service in the cloud.
# coding=utf8
## PubNub Real-time Push APIs and Notifications Framework
## Copyright (c) 2010 Stephen Blum
## http://www.pubnub.com/
import sys
from pubnub import PubnubTornado as Pubnub
publish_key = len(sys.argv) > 1 and sys.argv[1] or 'demo'
subscribe_key = len(sys.argv) > 2 and sys.argv[2] or 'demo'
secret_key = len(sys.argv) > 3 and sys.argv[3] or 'demo'
cipher_key = len(sys.argv) > 4 and sys.argv[4] or ''
ssl_on = len(sys.argv) > 5 and bool(sys.argv[5]) or False
## -----------------------------------------------------------------------
## Initiate Pubnub State
## -----------------------------------------------------------------------
pubnub = Pubnub(publish_key=publish_key, subscribe_key=subscribe_key,
secret_key=secret_key, cipher_key=cipher_key, ssl_on=ssl_on)
channel = 'hello_world'
# Asynchronous usage
def callback(message):
print(message)
pubnub.here_now(channel, callback=callback, error=callback)
pubnub.start()
| teddywing/pubnub-python | python-tornado/examples/here-now.py | Python | mit | 1,031 | 0.007759 |
import string
import random
import webserver.views.api.exceptions
def generate_string(length):
"""Generates random string with a specified length."""
return ''.join([random.SystemRandom().choice(
string.ascii_letters + string.digits
) for _ in range(length)])
def reformat_date(value, fmt="%b %d, %Y"):
return value.strftime(fmt)
def reformat_datetime(value, fmt="%b %d, %Y, %H:%M %Z"):
return value.strftime(fmt)
def validate_offset(offset):
"""Validate the offset.
If the offset is None, return 0, otherwise interpret it as a number. If it is
not a number, raise 400.
"""
if offset:
try:
offset = int(offset)
except ValueError:
raise webserver.views.api.exceptions.APIBadRequest("Offset must be an integer value")
else:
offset = 0
return offset
| metabrainz/acousticbrainz-server | webserver/utils.py | Python | gpl-2.0 | 860 | 0.002326 |
# coding: utf-8
# ## Plot velocity from non-CF HOPS dataset
# In[5]:
get_ipython().magic(u'matplotlib inline')
import netCDF4
import matplotlib.pyplot as plt
# In[6]:
url='http://geoport.whoi.edu/thredds/dodsC/usgs/data2/rsignell/gdrive/nsf-alpha/Data/MIT_MSEAS/MSEAS_Tides_20160317/mseas_tides_2015071612_2015081612_01h.nc'
# In[8]:
nc = netCDF4.Dataset(url)
# In[9]:
ncv = nc.variables
# In[ ]:
# extract lon,lat variables from vgrid2 variable
lon = ncv['vgrid2'][:,:,0]
lat = ncv['vgrid2'][:,:,1]
# In[20]:
# extract u,v variables from vbaro variable
itime = -1
u = ncv['vbaro'][itime,:,:,0]
v = ncv['vbaro'][itime,:,:,1]
# In[30]:
n=10
fig = plt.figure(figsize=(12,8))
plt.quiver(lon[::n,::n],lat[::n,::n],u[::n,::n],v[::n,::n])
#plt.axis([-70.6,-70.4,41.2,41.4])
# In[ ]:
# In[ ]:
# In[ ]:
| rsignell-usgs/notebook | HOPS/hops_velocity.py | Python | mit | 830 | 0.03253 |
# SConsBuildFramework - Copyright (C) 2013, Nicolas Papier.
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation.
# Author Nicolas Papier
import os
from src.sbfRsync import createRsyncAction
from src.SConsBuildFramework import stringFormatter
# To be able to use SConsBuildFramework.py without SCons
import __builtin__
try:
from SCons.Script import *
except ImportError as e:
if not hasattr(__builtin__, 'SConsBuildFrameworkQuietImport'):
print ('sbfWarning: unable to import SCons.[Environment,Options,Script]')
### special doxygen related targets : dox_build dox_install dox dox_clean dox_mrproper ###
def printDoxygenBuild( target, source, localenv ) :
return '\n' + stringFormatter( localenv, "Build documentation with doxygen" )
def printDoxygenInstall( target, source, localenv ) :
return '\n' + stringFormatter( localenv, "Install doxygen documentation" )
# Creates a custom doxyfile
def doxyfileAction( target, source, env ) :
sbf = env.sbf
# Compute inputList, examplePath and imagePath parameters of doxyfile
inputList = ''
examplePath = ''
imagePath = ''
for projectName in sbf.myParsedProjects :
localenv = sbf.myParsedProjects[projectName]
projectPathName = localenv['sbf_projectPathName']
newPathEntry = os.path.join(projectPathName, 'include') + ' '
if os.path.exists( newPathEntry ) :
inputList += newPathEntry
newPathEntry = os.path.join(projectPathName, 'src') + ' '
if os.path.exists( newPathEntry ) :
inputList += newPathEntry
newPathEntry = os.path.join(projectPathName, 'doc') + ' '
if os.path.exists( newPathEntry ) :
inputList += newPathEntry
newPathEntry = os.path.join(projectPathName, 'doc', 'example') + ' '
if os.path.exists( newPathEntry ) :
examplePath += newPathEntry
newPathEntry = os.path.join(projectPathName, 'doc', 'image') + ' '
if os.path.exists( newPathEntry ) :
imagePath += newPathEntry
# Create a custom doxyfile
import shutil
targetName = str(target[0])
sourceName = str(source[0])
print 'Generating {}'.format( targetName )
shutil.copyfile(sourceName, targetName) # or env.Execute( Copy(targetName, sourceName) )
with open( targetName, 'a' ) as file:
file.write( '\n### Added by SConsBuildFramework\n' )
file.write( 'LAYOUT_FILE = "%s"\n' % os.path.join(sbf.mySCONS_BUILD_FRAMEWORK, 'DoxygenLayout.xml') )
file.write( 'PROJECT_NAME = "%s"\n' % sbf.myProject )
file.write( 'PROJECT_NUMBER = "%s generated at %s"\n' % (sbf.myVersion, sbf.myDateTime) )
file.write( 'OUTPUT_DIRECTORY = "%s"\n' % (targetName + '_build') )
file.write( 'INPUT = %s\n' % inputList )
#FIXME: FILE_PATTERNS, EXCLUDE, EXCLUDE_PATTERNS
file.write( 'EXAMPLE_PATH = %s\n' % examplePath )
file.write( 'IMAGE_PATH = %s\n' % imagePath )
file.write( 'ENABLED_SECTIONS = %s\n' % sbf.myProject )
# Synchronizes files from source to target.
# target should be yourDestinationPath/dummy.out
# Recursively copy the entire directory tree rooted at source to the destination directory (named by os.path.dirname(target)).
# Remark : the destination directory would be removed before the copying occurs (even if not empty, so be carefull).
def syncAction( target, source, env ) :
import shutil
sourcePath = str(source[0])
destinationPath = os.path.dirname(str(target[0]))
print 'Copying %s at %s' % (sourcePath, destinationPath)
if ( os.path.ismount(destinationPath) ) :
print 'sbfError: Try to use %s as an installation/desinstallation directory. Stop action to prevent any unwanted file destruction'
return None
shutil.rmtree( destinationPath, True )
if ( os.path.isdir( os.path.dirname(destinationPath) ) == False ):
os.makedirs( os.path.dirname(destinationPath) )
shutil.copytree( sourcePath, destinationPath )
def configureDoxTarget( env ):
# @todo improves output message
sbf = env.sbf
if ( ('dox_build' in sbf.myBuildTargets) or
('dox_install' in sbf.myBuildTargets) or
('dox' in sbf.myBuildTargets) or
('dox_clean' in sbf.myBuildTargets) or
('dox_mrproper' in sbf.myBuildTargets) ):
if ( ('dox_clean' in sbf.myBuildTargets) or
('dox_mrproper' in sbf.myBuildTargets) ):
env.SetOption('clean', 1)
#@todo use other doxyfile(s). see doxInputDoxyfile
doxInputDoxyfile = os.path.join(sbf.mySCONS_BUILD_FRAMEWORK, 'doxyfile')
doxOutputPath = os.path.join(sbf.myBuildPath, 'doxygen', sbf.myProject, sbf.myVersion )
doxOutputCustomDoxyfile = os.path.join(doxOutputPath, 'doxyfile.sbf')
doxBuildPath = os.path.join(doxOutputPath, 'doxyfile.sbf_build')
doxInstallPath = os.path.join(sbf.myInstallDirectory, 'doc', sbf.myProject, sbf.myVersion)
# target dox_build
commandGenerateDoxyfile = env.Command( doxOutputCustomDoxyfile, doxInputDoxyfile, Action(doxyfileAction, printDoxygenBuild) )
env.Alias( 'dox_build', commandGenerateDoxyfile )
commandCompileDoxygen = env.Command( 'dox_build.out', 'dummy.in', 'doxygen ' + doxOutputCustomDoxyfile )
env.Alias( 'dox_build', commandCompileDoxygen )
env.AlwaysBuild( [commandGenerateDoxyfile, commandCompileDoxygen] )
env.Depends( commandCompileDoxygen, commandGenerateDoxyfile )
# target dox_install
dox_install_cmd = env.Command( os.path.join(doxInstallPath,'dummy.out'), Dir(os.path.join(doxBuildPath, 'html')), Action(syncAction, printDoxygenInstall) )
env.Alias( 'dox_install', [ 'dox_build', dox_install_cmd ] )
env.AlwaysBuild( dox_install_cmd )
env.Depends( dox_install_cmd, 'dox_build' )
# target dox
env.Alias( 'dox', 'dox_install' )
if env['publishOn'] :
rsyncAction = createRsyncAction( env, 'doc_%s_%s' % (sbf.myProject, sbf.myVersion), Dir(os.path.join(doxBuildPath, 'html')), 'dox' )
env.Depends( rsyncAction, 'dox_install' )
# target dox_clean
env.Alias( 'dox_clean', 'dox' )
env.Clean( 'dox_clean', doxOutputPath )
# target dox_mrproper
env.Alias( 'dox_mrproper', 'dox_clean' )
env.Clean( 'dox_mrproper', doxInstallPath )
| npapier/sbf | src/sbfDoxygen.py | Python | gpl-3.0 | 6,169 | 0.041984 |
import os
import re
import asyncio
import logging
from collections import OrderedDict
from pypeman.message import Message
from pypeman.errors import PypemanConfigError
logger = logging.getLogger("pypeman.store")
DATE_FORMAT = '%Y%m%d_%H%M'
class MessageStoreFactory():
""" Message store factory class can generate Message store instance for specific store_id. """
def get_store(self, store_id):
"""
:param store_id: identifier of corresponding message store.
:return: A MessageStore corresponding to correct store_id.
"""
class MessageStore():
""" A MessageStore keep an history of processed messages. Mainly used in channels. """
async def start(self):
"""
Called at startup to initialize store.
"""
async def store(self, msg):
"""
Store a message in the store.
:param msg: The message to store.
:return: Id for this specific message.
"""
async def change_message_state(self, id, new_state):
"""
Change the `id` message state.
:param id: Message specific store id.
:param new_state: Target state.
"""
async def get(self, id):
"""
Return one message corresponding to given `id` with his status.
:param id: Message id. Message store dependant.
:return: A dict `{'id':<message_id>, 'state': <message_state>, 'message': <message_object>}`.
"""
async def search(self, start=0, count=10, order_by='timestamp'):
"""
Return a list of message with store specific `id` and processed status.
:param start: First element.
:param count: Count of elements since first element.
:param order_by: Message order. Allowed values : ['timestamp', 'status'].
:return: A list of dict `{'id':<message_id>, 'state': <message_state>, 'message': <message_object>}`.
"""
async def total(self):
"""
:return: total count of messages
"""
class NullMessageStoreFactory(MessageStoreFactory):
""" Return an NullMessageStore that do nothing at all. """
def get_store(self, store_id):
return NullMessageStore()
class NullMessageStore(MessageStore):
""" For testing purpose """
async def store(self, msg):
return None
async def get(self, id):
return None
async def search(self, **kwargs):
return None
async def total(self):
return 0
class FakeMessageStoreFactory(MessageStoreFactory):
""" Return an Fake message store """
def get_store(self, store_id):
return FakeMessageStore()
class FakeMessageStore(MessageStore):
""" For testing purpose """
async def store(self, msg):
logger.debug("Should store message %s", msg)
return 'fake_id'
async def get(self, id):
return {'id':id, 'state': 'processed', 'message': None}
async def search(self, **kwargs):
return []
async def total(self):
return 0
class MemoryMessageStoreFactory(MessageStoreFactory):
""" Return a Memory message store. All message are lost at pypeman stop. """
def __init__(self):
self.base_dict = {}
def get_store(self, store_id):
return MemoryMessageStore(self.base_dict, store_id)
class MemoryMessageStore(MessageStore):
""" Store messages in memory """
def __init__(self, base_dict, store_id):
super().__init__()
self.messages = base_dict.setdefault(store_id, OrderedDict())
async def store(self, msg):
msg_id = msg.uuid
self.messages[msg_id] = {'id': msg_id, 'state': Message.PENDING, 'timestamp': msg.timestamp, 'message': msg.to_dict()}
return msg_id
async def change_message_state(self, id, new_state):
self.messages[id]['state'] = new_state
async def get(self, id):
resp = dict(self.messages[id])
resp['message'] = Message.from_dict(resp['message'])
return resp
async def search(self, start=0, count=10, order_by='timestamp'):
if order_by.startswith('-'):
reverse = True
sort_key = order_by[1:]
else:
reverse = False
sort_key = order_by
result = []
for value in sorted(self.messages.values(), key=lambda x: x[sort_key], reverse=reverse):
resp = dict(value)
resp['message'] = Message.from_dict(resp['message'])
result.append(resp)
return result[start: start + count]
async def total(self):
return len(self.messages)
class FileMessageStoreFactory(MessageStoreFactory):
"""
Generate a FileMessageStore message store instance.
Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy.
"""
# TODO add an option to reguraly archive old file or delete them
def __init__(self, path):
super().__init__()
if path is None:
raise PypemanConfigError('file message store requires a path')
self.base_path = path
def get_store(self, store_id):
return FileMessageStore(self.base_path, store_id)
class FileMessageStore(MessageStore):
""" Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy."""
# TODO file access should be done in another thread. Waiting for file backend.
def __init__(self, path, store_id):
super().__init__()
self.base_path = os.path.join(path, store_id)
# Match msg file name
self.msg_re = re.compile(r'^([0-9]{8})_([0-9]{2})([0-9]{2})_[0-9abcdef]*$')
try:
# Try to make dirs if necessary
os.makedirs(os.path.join(self.base_path))
except FileExistsError:
pass
self._total = 0
async def start(self):
self._total = await self.count_msgs()
async def store(self, msg):
""" Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy."""
# TODO implement a safer store to avoid broken messages
# The filename is the file id
filename = "{}_{}".format(msg.timestamp.strftime(DATE_FORMAT), msg.uuid)
dirs = os.path.join(str(msg.timestamp.year), "%02d" % msg.timestamp.month, "%02d" % msg.timestamp.day)
try:
# Try to make dirs if necessary
os.makedirs(os.path.join(self.base_path, dirs))
except FileExistsError:
pass
file_path = os.path.join(dirs, filename)
# Write message to file
with open(os.path.join(self.base_path, file_path), "w") as f:
f.write(msg.to_json())
await self.change_message_state(file_path, Message.PENDING)
self._total += 1
return file_path
async def change_message_state(self, id, new_state):
with open(os.path.join(self.base_path, id + '.meta'), "w") as f:
f.write(new_state)
async def get_message_state(self, id):
with open(os.path.join(self.base_path, id + '.meta'), "r") as f:
state = f.read()
return state
async def get(self, id):
if not os.path.exists(os.path.join(self.base_path, id)):
raise IndexError
with open(os.path.join(self.base_path, id), "rb") as f:
msg = Message.from_json(f.read().decode('utf-8'))
return {'id': id, 'state': await self.get_message_state(id), 'message': msg}
async def sorted_list_directories(self, path, reverse=True):
"""
:param path: Base path
:param reverse: reverse order
:return: List of directories in specified path ordered
"""
return sorted([d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))], reverse=reverse)
async def count_msgs(self):
"""
Count message by listing all directories. To be used at startup.
"""
count = 0
for year in await self.sorted_list_directories(os.path.join(self.base_path)):
for month in await self.sorted_list_directories(os.path.join(self.base_path, year)):
for day in await self.sorted_list_directories(os.path.join(self.base_path, year, month)):
for msg_name in sorted(os.listdir(os.path.join(self.base_path, year, month, day))):
found = self.msg_re.match(msg_name)
if found:
count +=1
return count
async def search(self, start=0, count=10, order_by='timestamp'):
# TODO better performance for slicing by counting file in dirs ?
if order_by.startswith('-'):
reverse = True
sort_key = order_by[1:]
else:
reverse = False
sort_key = order_by
# TODO handle sort_key
result = []
end = start + count
position = 0
for year in await self.sorted_list_directories(os.path.join(self.base_path), reverse=reverse):
for month in await self.sorted_list_directories(os.path.join(self.base_path, year), reverse=reverse):
for day in await self.sorted_list_directories(os.path.join(self.base_path, year, month), reverse=reverse):
for msg_name in sorted(os.listdir(os.path.join(self.base_path, year, month, day)), reverse=reverse):
found = self.msg_re.match(msg_name)
if found:
if start <= position < end:
mid = os.path.join(year, month, day, msg_name)
result.append(await self.get(mid))
position += 1
return result
async def total(self):
return self._total
| jrmi/pypeman | pypeman/msgstore.py | Python | apache-2.0 | 9,722 | 0.00288 |
# val for type checking (literal or ENUM style)
from pyrser import fmt
from pyrser.type_system.signature import *
from pyrser.type_system.type_name import *
class Val(Signature):
"""
Describe a value signature for the language
"""
nvalues = 0
valuniq = dict()
def __init__(self, value, tret: str):
if not isinstance(value, str):
value = str(value)
self.value = value
if not isinstance(tret, TypeName):
tret = TypeName(tret)
self.tret = tret
k = self.value + "$" + tret
idx = 0
if k not in Val.valuniq:
Val.nvalues += 1
Val.valuniq[k] = Val.nvalues
idx = Val.nvalues
else:
idx = Val.valuniq[k]
super().__init__('$' + str(idx))
def internal_name(self):
"""
Return the unique internal name
"""
unq = super().internal_name()
if self.tret is not None:
unq += "_" + self.tret
return unq
| payet-s/pyrser | pyrser/type_system/val.py | Python | gpl-3.0 | 1,017 | 0 |
while(True):
n = input()
if(n == 42):
break
else:
print n | aqfaridi/Code-Online-Judge | web/env/Main1145/Main1145.py | Python | mit | 90 | 0.011111 |
import pandas as pd
import numpy as np
from swiftnav.ephemeris import *
from swiftnav.single_diff import SingleDiff
from swiftnav.gpstime import *
def construct_pyobj_eph(eph):
return Ephemeris(
eph.tgd,
eph.crs, eph.crc, eph.cuc, eph.cus, eph.cic, eph.cis,
eph.dn, eph.m0, eph.ecc, eph.sqrta, eph.omega0, eph.omegadot, eph.w, eph.inc, eph.inc_dot,
eph.af0, eph.af1, eph.af2,
GpsTime(eph.toe_wn, eph.toe_tow), GpsTime(eph.toc_wn, eph.toc_tow),
eph['valid'], # this syntax is needed because the method .valid takes precedence to the field
eph.healthy,
eph.prn+1) # +1 temporarily, until i get the next dataset where this is fixed
def separate_ephs(ephs):
"""
Return a dictionary of prn to dataframe, where each dataframe is
the unique ephemerides (unique and first, as in fst . groupby) over
the time period the data was taken.
"""
sep_ephs_tuples = [(int(prn),ephs[ephs['prn'] == prn]) for prn in ephs['prn'].unique()]
sep_ephs = {}
for sep_eph_tuple in sep_ephs_tuples:
prn = sep_eph_tuple[0]+1 #temporarily, just for the dataset before i started storing them correctly TODO FIX
frame = pd.DataFrame(sep_eph_tuple[1].drop_duplicates().apply(construct_pyobj_eph, axis=1), columns=['ephemeris'])
# frame = pd.DataFrame(sep_eph_tuple[1].apply(construct_pyobj_eph, axis=1), columns=['ephemeris'])
frame['time'] = frame.index
sep_ephs[prn] = frame
return sep_ephs
def merge_into_sdiffs(ephs, sd):
"""
Taking ephemerides and observation data, this will merge them
together into a panel whose index is a sat, major axis is time,
and minor axis is everything needed for an sdiff struct.
It's super slow, so I left it all in pandas format, so we can
save it out in hdf5 and get it back all nicely processed.
"""
sep_ephs = separate_ephs(ephs)
sats = sd.items
num_sats = map(lambda x: int(x[1:]),sats)
sdiff_dict = {}
for sat in sats:
# sat = sats[0]
sat_ephs = sep_ephs[int(sat[1:])]
fst_eph = sat_ephs.ix[0].ephemeris
obs = sd[sat]
obs['time'] = obs.index
def make_single_diff(x):
if np.isnan(x.C1) or np.isnan(x.L1) or np.isnan(x.S1_1) or np.isnan(x.S1_2):
return pd.Series([np.nan]*11,
index=['C1', 'L1', 'D1', 'sat_pos_x', 'sat_pos_y', 'sat_pos_z',
'sat_vel_x', 'sat_vel_y', 'sat_vel_z', 'min_snr', 'prn'])
c1 = x.C1
l1 = x.L1
snr = min(x.S1_1, x.S1_2)
timestamp = x.time
earlier_ephs = sat_ephs[sat_ephs['time'] <= timestamp]
if earlier_ephs.shape[0] >= 1:
eph = earlier_ephs.ix[-1].ephemeris
else:
eph = fst_eph
gpstime = datetime2gpst(timestamp)
pos, vel, clock_err, clock_rate_err = calc_sat_pos(eph, gpstime)
return pd.Series([c1, l1, np.nan, pos[0], pos[1], pos[2], vel[0], vel[1], vel[2], snr, int(sat[1:])],
index=['C1', 'L1', 'D1', 'sat_pos_x', 'sat_pos_y', 'sat_pos_z',
'sat_vel_x', 'sat_vel_y', 'sat_vel_z', 'min_snr', 'prn'])
sdiffs = obs.apply(make_single_diff,axis=1).dropna(how='all',axis=0)
sdiff_dict[sat] = sdiffs
return pd.Panel(sdiff_dict)
def main():
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("ephemeris",
help="the ephemeris file to process")
parser.add_argument("input",
help="the HDF5 file to process")
parser.add_argument("base_name", default=False,
help="the marker name of the base station")
parser.add_argument("rover_name", default=False,
help="the marker name of the rover")
args = parser.parse_args()
eph_file = pd.HDFStore(args.ephemeris)
eph = eph_file['eph']
h5 = pd.HDFStore(args.input)
sd_table = h5['sd_%s_%s' % (args.rover_name, args.base_name)]
output_table_name = 'sdiff_%s_%s' % (args.rover_name, args.base_name)
h5[output_table_name] = merge_into_sdiffs(eph, sd_table)
h5.close()
if __name__ == '__main__':
main()
| imh/gnss-analysis | gnss_analysis/mk_sdiffs.py | Python | lgpl-3.0 | 4,406 | 0.006809 |
#!/usr/bin/env python3
# 556A_zeroes.py - Codeforces.com/problemset/problem/556/A Zeroes quiz by Sergey 2015
# Standard modules
import unittest
import sys
import re
# Additional modules
###############################################################################
# Zeroes Class
###############################################################################
class Zeroes:
""" Zeroes representation """
def __init__(self, args):
""" Default constructor """
self.list = args
def calculate(self):
""" Main calcualtion function of the class """
result = 0
for n in self.list:
result += 1 if n else -1
return str(abs(result))
###############################################################################
# Executable code
###############################################################################
def get_inputs(test_inputs=None):
it = iter(test_inputs.split("\n")) if test_inputs else None
def uinput():
""" Unit-testable input function wrapper """
if it:
return next(it)
else:
return input()
# Getting string inputs
num = int(uinput())
ints = [int(n) for n in uinput()]
return ints
def calculate(test_inputs=None):
""" Base class calculate method wrapper """
return Zeroes(get_inputs(test_inputs)).calculate()
###############################################################################
# Unit Tests
###############################################################################
class unitTests(unittest.TestCase):
def test_sample_tests(self):
""" Quiz sample tests. Add \n to separate lines """
self.assertEqual(calculate("4\n1100"), "0")
self.assertEqual(calculate("5\n01010"), "1")
self.assertEqual(calculate("8\n11101111"), "6")
str = "1\n"
for i in range(2*pow(10, 5)):
str += "0"
self.assertEqual(calculate(str), "200000")
def test_get_inputs(self):
""" Input string decoding testing """
self.assertEqual(get_inputs("4\n1100"), [1, 1, 0, 0])
def test_Zeroes_class__basic_functions(self):
""" Zeroes class basic functions testing """
# Constructor test
d = Zeroes([1, 0, 0, 1])
self.assertEqual(d.list[0], 1)
self.assertEqual(d.calculate(), "0")
d.list = [1, 0, 0, 0]
self.assertEqual(d.calculate(), "2")
if __name__ == "__main__":
# Avoiding recursion limitaions
sys.setrecursionlimit(100000)
if sys.argv[-1] == "-ut":
unittest.main(argv=[" "])
# Print the result string
print(calculate())
| snsokolov/contests | codeforces/556A_zeroes.py | Python | unlicense | 2,674 | 0.000748 |
from __future__ import division
from math import sqrt, cos, sin, acos, degrees, radians, log
from collections import MutableSequence
# This file contains classes for the different types of SVG path segments as
# well as a Path object that contains a sequence of path segments.
MIN_DEPTH = 5
ERROR = 1e-12
def segment_length(curve, start, end, start_point, end_point, error, min_depth, depth):
"""Recursively approximates the length by straight lines"""
mid = (start + end) / 2
mid_point = curve.point(mid)
length = abs(end_point - start_point)
first_half = abs(mid_point - start_point)
second_half = abs(end_point - mid_point)
length2 = first_half + second_half
if (length2 - length > error) or (depth < min_depth):
# Calculate the length of each segment:
depth += 1
return (segment_length(curve, start, mid, start_point, mid_point,
error, min_depth, depth) +
segment_length(curve, mid, end, mid_point, end_point,
error, min_depth, depth))
# This is accurate enough.
return length2
class Line(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __repr__(self):
return 'Line(start=%s, end=%s)' % (self.start, self.end)
def __eq__(self, other):
if not isinstance(other, Line):
return NotImplemented
return self.start == other.start and self.end == other.end
def __ne__(self, other):
if not isinstance(other, Line):
return NotImplemented
return not self == other
def point(self, pos):
distance = self.end - self.start
return self.start + distance * pos
def length(self, error=None, min_depth=None):
distance = (self.end - self.start)
return sqrt(distance.real ** 2 + distance.imag ** 2)
class CubicBezier(object):
def __init__(self, start, control1, control2, end):
self.start = start
self.control1 = control1
self.control2 = control2
self.end = end
def __repr__(self):
return 'CubicBezier(start=%s, control1=%s, control2=%s, end=%s)' % (
self.start, self.control1, self.control2, self.end)
def __eq__(self, other):
if not isinstance(other, CubicBezier):
return NotImplemented
return self.start == other.start and self.end == other.end and \
self.control1 == other.control1 and self.control2 == other.control2
def __ne__(self, other):
if not isinstance(other, CubicBezier):
return NotImplemented
return not self == other
def is_smooth_from(self, previous):
"""Checks if this segment would be a smooth segment following the previous"""
if isinstance(previous, CubicBezier):
return (self.start == previous.end and
(self.control1 - self.start) == (previous.end - previous.control2))
else:
return self.control1 == self.start
def point(self, pos):
"""Calculate the x,y position at a certain position of the path"""
return ((1 - pos) ** 3 * self.start) + \
(3 * (1 - pos) ** 2 * pos * self.control1) + \
(3 * (1 - pos) * pos ** 2 * self.control2) + \
(pos ** 3 * self.end)
def length(self, error=ERROR, min_depth=MIN_DEPTH):
"""Calculate the length of the path up to a certain position"""
start_point = self.point(0)
end_point = self.point(1)
return segment_length(self, 0, 1, start_point, end_point, error, min_depth, 0)
class QuadraticBezier(object):
def __init__(self, start, control, end):
self.start = start
self.end = end
self.control = control
def __repr__(self):
return 'QuadraticBezier(start=%s, control=%s, end=%s)' % (
self.start, self.control, self.end)
def __eq__(self, other):
if not isinstance(other, QuadraticBezier):
return NotImplemented
return self.start == other.start and self.end == other.end and \
self.control == other.control
def __ne__(self, other):
if not isinstance(other, QuadraticBezier):
return NotImplemented
return not self == other
def is_smooth_from(self, previous):
"""Checks if this segment would be a smooth segment following the previous"""
if isinstance(previous, QuadraticBezier):
return (self.start == previous.end and
(self.control - self.start) == (previous.end - previous.control))
else:
return self.control == self.start
def point(self, pos):
return (1 - pos) ** 2 * self.start + 2 * (1 - pos) * pos * self.control + \
pos ** 2 * self.end
def length(self, error=None, min_depth=None):
# http://www.malczak.info/blog/quadratic-bezier-curve-length/
a = self.start - 2 * self.control + self.end
b = 2 * (self.control - self.start)
A = 4 * (a.real ** 2 + a.imag ** 2)
B = 4 * (a.real * b.real + a.imag * b.imag)
C = b.real ** 2 + b.imag ** 2
Sabc = 2 * sqrt(A + B + C)
A2 = sqrt(A)
A32 = 2 * A * A2
C2 = 2 * sqrt(C)
BA = B / A2
return (A32 * Sabc + A2 * B * (Sabc - C2) + (4 * C * A - B ** 2) *
log((2 * A2 + BA + Sabc) / (BA + C2))) / (4 * A32)
class Arc(object):
def __init__(self, start, radius, rotation, arc, sweep, end):
"""radius is complex, rotation is in degrees,
large and sweep are 1 or 0 (True/False also work)"""
self.start = start
self.radius = radius
self.rotation = rotation
self.arc = bool(arc)
self.sweep = bool(sweep)
self.end = end
self._parameterize()
def __repr__(self):
return 'Arc(start=%s, radius=%s, rotation=%s, arc=%s, sweep=%s, end=%s)' % (
self.start, self.radius, self.rotation, self.arc, self.sweep, self.end)
def __eq__(self, other):
if not isinstance(other, Arc):
return NotImplemented
return self.start == other.start and self.end == other.end and \
self.radius == other.radius and self.rotation == other.rotation and \
self.arc == other.arc and self.sweep == other.sweep
def __ne__(self, other):
if not isinstance(other, Arc):
return NotImplemented
return not self == other
def _parameterize(self):
# Conversion from endpoint to center parameterization
# http://www.w3.org/TR/SVG/implnote.html#ArcImplementationNotes
cosr = cos(radians(self.rotation))
sinr = sin(radians(self.rotation))
dx = (self.start.real - self.end.real) / 2
dy = (self.start.imag - self.end.imag) / 2
x1prim = cosr * dx + sinr * dy
x1prim_sq = x1prim * x1prim
y1prim = -sinr * dx + cosr * dy
y1prim_sq = y1prim * y1prim
rx = self.radius.real
rx_sq = rx * rx
ry = self.radius.imag
ry_sq = ry * ry
# Correct out of range radii
radius_check = (x1prim_sq / rx_sq) + (y1prim_sq / ry_sq)
if radius_check > 1:
rx *= sqrt(radius_check)
ry *= sqrt(radius_check)
rx_sq = rx * rx
ry_sq = ry * ry
t1 = rx_sq * y1prim_sq
t2 = ry_sq * x1prim_sq
c = sqrt(abs((rx_sq * ry_sq - t1 - t2) / (t1 + t2)))
if self.arc == self.sweep:
c = -c
cxprim = c * rx * y1prim / ry
cyprim = -c * ry * x1prim / rx
self.center = complex((cosr * cxprim - sinr * cyprim) +
((self.start.real + self.end.real) / 2),
(sinr * cxprim + cosr * cyprim) +
((self.start.imag + self.end.imag) / 2))
ux = (x1prim - cxprim) / rx
uy = (y1prim - cyprim) / ry
vx = (-x1prim - cxprim) / rx
vy = (-y1prim - cyprim) / ry
n = sqrt(ux * ux + uy * uy)
p = ux
theta = degrees(acos(p / n))
if uy < 0:
theta = -theta
self.theta = theta % 360
n = sqrt((ux * ux + uy * uy) * (vx * vx + vy * vy))
p = ux * vx + uy * vy
if p == 0:
delta = degrees(acos(0))
else:
delta = degrees(acos(p / n))
if (ux * vy - uy * vx) < 0:
delta = -delta
self.delta = delta % 360
if not self.sweep:
self.delta -= 360
def point(self, pos):
angle = radians(self.theta + (self.delta * pos))
cosr = cos(radians(self.rotation))
sinr = sin(radians(self.rotation))
x = (cosr * cos(angle) * self.radius.real - sinr * sin(angle) *
self.radius.imag + self.center.real)
y = (sinr * cos(angle) * self.radius.real + cosr * sin(angle) *
self.radius.imag + self.center.imag)
return complex(x, y)
def length(self, error=ERROR, min_depth=MIN_DEPTH):
"""The length of an elliptical arc segment requires numerical
integration, and in that case it's simpler to just do a geometric
approximation, as for cubic bezier curves.
"""
start_point = self.point(0)
end_point = self.point(1)
return segment_length(self, 0, 1, start_point, end_point, error, min_depth, 0)
class Path(MutableSequence):
"""A Path is a sequence of path segments"""
# Put it here, so there is a default if unpickled.
_closed = False
def __init__(self, *segments, **kw):
self._segments = list(segments)
self._length = None
self._lengths = None
if 'closed' in kw:
self.closed = kw['closed']
def __getitem__(self, index):
return self._segments[index]
def __setitem__(self, index, value):
self._segments[index] = value
self._length = None
def __delitem__(self, index):
del self._segments[index]
self._length = None
def insert(self, index, value):
self._segments.insert(index, value)
self._length = None
def reverse(self):
# Reversing the order of a path would require reversing each element
# as well. That's not implemented.
raise NotImplementedError
def __len__(self):
return len(self._segments)
def __repr__(self):
return 'Path(%s, closed=%s)' % (
', '.join(repr(x) for x in self._segments), self.closed)
def __eq__(self, other):
if not isinstance(other, Path):
return NotImplemented
if len(self) != len(other):
return False
for s, o in zip(self._segments, other._segments):
if not s == o:
return False
return True
def __ne__(self, other):
if not isinstance(other, Path):
return NotImplemented
return not self == other
def _calc_lengths(self, error=ERROR, min_depth=MIN_DEPTH):
if self._length is not None:
return
lengths = [each.length(error=error, min_depth=min_depth) for each in self._segments]
self._length = sum(lengths)
self._lengths = [each / self._length for each in lengths]
def point(self, pos):
# Shortcuts
if pos == 0.0:
return self._segments[0].point(pos)
if pos == 1.0:
return self._segments[-1].point(pos)
self._calc_lengths()
# Find which segment the point we search for is located on:
segment_start = 0
for index, segment in enumerate(self._segments):
segment_end = segment_start + self._lengths[index]
if segment_end >= pos:
# This is the segment! How far in on the segment is the point?
segment_pos = (pos - segment_start) / (segment_end - segment_start)
break
segment_start = segment_end
return segment.point(segment_pos)
def length(self, error=ERROR, min_depth=MIN_DEPTH):
self._calc_lengths(error, min_depth)
return self._length
def _is_closable(self):
"""Returns true if the end is on the start of a segment"""
end = self[-1].end
for segment in self:
if segment.start == end:
return True
return False
@property
def closed(self):
"""Checks that the end point is the same as the start point"""
return self._closed and self._is_closable()
@closed.setter
def closed(self, value):
value = bool(value)
if value and not self._is_closable():
raise ValueError("End does not coincide with a segment start.")
self._closed = value
def d(self):
if self.closed:
segments = self[:-1]
else:
segments = self[:]
current_pos = None
parts = []
previous_segment = None
end = self[-1].end
for segment in segments:
start = segment.start
# If the start of this segment does not coincide with the end of
# the last segment or if this segment is actually the close point
# of a closed path, then we should start a new subpath here.
if current_pos != start or (self.closed and start == end):
parts.append('M {0:G},{1:G}'.format(start.real, start.imag))
if isinstance(segment, Line):
parts.append('L {0:G},{1:G}'.format(
segment.end.real, segment.end.imag)
)
elif isinstance(segment, CubicBezier):
if segment.is_smooth_from(previous_segment):
parts.append('S {0:G},{1:G} {2:G},{3:G}'.format(
segment.control2.real, segment.control2.imag,
segment.end.real, segment.end.imag)
)
else:
parts.append('C {0:G},{1:G} {2:G},{3:G} {4:G},{5:G}'.format(
segment.control1.real, segment.control1.imag,
segment.control2.real, segment.control2.imag,
segment.end.real, segment.end.imag)
)
elif isinstance(segment, QuadraticBezier):
if segment.is_smooth_from(previous_segment):
parts.append('T {0:G},{1:G}'.format(
segment.end.real, segment.end.imag)
)
else:
parts.append('Q {0:G},{1:G} {2:G},{3:G}'.format(
segment.control.real, segment.control.imag,
segment.end.real, segment.end.imag)
)
elif isinstance(segment, Arc):
parts.append('A {0:G},{1:G} {2:G} {3:d},{4:d} {5:G},{6:G}'.format(
segment.radius.real, segment.radius.imag, segment.rotation,
int(segment.arc), int(segment.sweep),
segment.end.real, segment.end.imag)
)
current_pos = segment.end
previous_segment = segment
if self.closed:
parts.append('Z')
return ' '.join(parts)
| sqaxomonophonen/worldmapsvg | svg/path/path.py | Python | cc0-1.0 | 15,350 | 0.001303 |
# -*- coding: utf-8 -*-
from django.db import models
from tweets.models import Tweet
class Tag(models.Model):
name = models.CharField(max_length=255, unique=True, db_index=True)
is_hashtag = models.BooleanField(default=False)
tweets = models.ManyToManyField(Tweet, related_name='tags')
class Meta:
db_table = 'tags'
| kk6/onedraw | onedraw/tags/models.py | Python | mit | 345 | 0 |
""" A simple module to get the links of first
10 images displayed on google image search
"""
from googleapiclient.discovery import build
class GoogleImageSearch:
def __init__(self,api_key,cse_id):
self.my_api_key = api_key
self.my_cse_id= cse_id
def search(self,search_term,**kwargs):
google_service = build("customsearch", "v1",
developerKey=self.my_api_key)
result = google_service.cse().list(q=search_term,
cx=self.my_cse_id, **kwargs).execute()
return result['items']
def get_image_links(self , search_term):
results = self.search(search_term , searchType='image')
links = [result['link'] for result in results]
return links | shravan97/WordHunter | ImageSearch/image_searcher.py | Python | mit | 775 | 0.023226 |
"""
Given a range [m, n] where 0 <= m <= n <= 2147483647, return the bitwise AND of all numbers in this range, inclusive.
For example, given the range [5, 7], you should return 4.
Credits:
Special thanks to @amrsaqr for adding this problem and creating all test cases.
"""
class Solution(object):
def rangeBitwiseAnd(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
"""
ans = 0
gap = n-m
max_i = n
min_i = m
bits = [0 for _ in xrange(32)]
for i in xrange(32):
#take the i-th pos of max and min
a = max_i & 1
b = min_i & 1
max_i >>= 1
min_i >>= 1
if a == 0 or b == 0:
bits[i] = 0
else:
if gap >>i > 0:
bits[i] = 0
else:
bits[i] = 1
for each in bits[::-1]:
ans |= each
ans<<=1
return ans | urashima9616/Leetcode_Python | Leet201_BitwiswAndRange.py | Python | gpl-3.0 | 1,001 | 0.006993 |
# -*- coding: utf-8 -*-
class GetText():
_file_path = None
_body_list = None
_target = None
def __init__(self, file_path):
#self._file_path = open(file_path, "r+").read().replace("<br","\n<br")
self._file_path = file_path.replace("<br />", "<br />\n")
#self._file_path = (self._file_path.replace("\n",";;")).split(";;")
#self._file_path = file_path
#print(self._file_path)
self._file_path = ((self._file_path.replace("\n", "123")).replace(" ", "")).replace("> <", "")
self._file_path = (self._file_path).replace("<p", "\n<p")
#print (self._file_path)
self._body_list = self.get_body().split("123")
self.set_target_content(self.get_body())
self.get_beginning()
self.remove_tags()
#super(leGetText, self).__init__()
def get_html(self):
return self._file_path
def get_body(self):
return self.get_html().split("</head>", -1)[1]
def get_first_br_line(self):
br_index = 0
for i in self._body_list:
if(i.find("<br") > -1):
return (self._body_list.index(i))
else:
++br_index
return br_index
def get_since_first_br(self):
since_first_br = self._body_list
del since_first_br[0:self.get_first_br_line()]
self.set_target_content(since_first_br)
return since_first_br
def set_target_content(self, content):
self._target = content
def get_target(self):
return self._target
def br_check(self, info):
if(info == "<br>"):
return True
elif(info == "<br />"):
return True
elif(info == "</ br>"):
return True
elif(info == "< br>"):
return True
else:
return False
def get_beginning(self):
# verifying whether there's any br in the next index
since_last_br = self.get_since_first_br()
#getting beginning of the lyrics
#print(since_last_br)
i = 0
for j in since_last_br:
if (
j.find("<br") > -1 and
since_last_br[i+1].find("<br") > -1 and
since_last_br[i+2].find("<br") > -1 and
since_last_br[i+3].find("<br") > -1 and
self.br_check(since_last_br[i]) == False and
self.br_check(since_last_br[i+1]) == False and
self.br_check(since_last_br[i+2]) == False and
self.br_check(since_last_br[i+3]) == False
):
del since_last_br[0:i]
break
else:
i = i +1
if (since_last_br[i].find("<br") > -1 and i+3< len(since_last_br) and self.br_check(since_last_br[i+3]) == False):
#print("i + 1 contains <br>")
#print(since_last_br[i])
del since_last_br[0:i]
# print (since_last_br)
break
self.set_target_content(since_last_br[0:200])
def remove_tags(self):
#removing tags
#removing b
html_file = "ç\n".join(self.get_target())
while(html_file.find("<b>") > -1):
#print("occur")
op = html_file.index("<b>")
cl = html_file.index("/b>")+3
html_file = list(html_file)
#for i in range(op, cl):
del html_file[op:cl]
html_file = "".join(html_file)
#removing [] (brackets) => from "[" to "\n"
while(html_file.find("[") > -1):
op = html_file.index("[")
cl = html_file.find("]")+1
bracket_line = html_file.split("ç")
l = 0
for k in bracket_line:
if(k.find("[") > -1):
break
l = l +1
del bracket_line[l]
html_file = ""
for k in bracket_line:
html_file = html_file + k+"ç"
'''
html_file = list(html_file)
#for i in range(op, cl):
del html_file[op:cl]
html_file = "".join(html_file)'''
self.set_target_content(html_file.split("ç"))
def get_end(self):
#getting the end of the lyrics (check if the next tag
#being opened is the same as the one being close
broken_since = "".join(self.get_target())
broken_since = broken_since.split("\n")
new_broken_since = [] #turning <p> into <br>
for x in broken_since:
la = x.replace("<p", "<br")
la = la.replace("</p>", "")
new_broken_since.append(la)
broken_since = new_broken_since
#checking if we are still in the lyrics block
until_the_end = []
l = 0
for k in broken_since:
kk = list(k)
if len(k) > 0:
'''
print("\n\n")
print(broken_since[l+1].find("<br"))
print(broken_since[l+1])
print("< l1 \n l2 >")
print(broken_since[l + 2].find("<br"))
print("\n\n")'''
if(l < 3 or k[0] != "<" or k[1] == "b"
or (broken_since[l+1].find("<br") > -1 and broken_since[l+2].find("<br"))
):
if (k.find("><br") == -1):
#print(k)
until_the_end.append("\n"+k)
else:
break
else:
#print("\n\n")
break
l = l + 1
#removing tags
final = ""
block = False
for j in until_the_end:
i = 0
moral = list(j)
for i in range(0, len(moral)):
if(moral[i] == "<"):
block = True
elif(moral[i] == ">"):
block = False
if(block==False and moral[i]!="<" and moral[i]!=">"):
final=final+moral[i]
return final
'''
oo = open("../../tmp/lrc", "r").read()
#print(oo)
get_text = _GetText(oo)
#print(get_text.get_target())
final = get_text.get_end()
final = final.encode("latin1").decode("utf-8")
'''
#print(final)
'''
#rs = open("../../tmp/lrc", "w+")
#rs.write(final)'
''' | henriquesouza/toply | src/objects/GetText.py | Python | gpl-3.0 | 6,417 | 0.009512 |
#!/usr/local/munkireport/munkireport-python2
# encoding: utf-8
from . import display
from . import prefs
from . import constants
from . import FoundationPlist
from munkilib.purl import Purl
from munkilib.phpserialize import *
import subprocess
import pwd
import sys
import hashlib
import platform
from urllib import urlencode
import re
import time
import os
# PyLint cannot properly find names inside Cocoa libraries, so issues bogus
# No name 'Foo' in module 'Bar' warnings. Disable them.
# pylint: disable=E0611
from Foundation import NSArray, NSDate, NSMetadataQuery, NSPredicate
from Foundation import CFPreferencesAppSynchronize
from Foundation import CFPreferencesCopyAppValue
from Foundation import CFPreferencesCopyKeyList
from Foundation import CFPreferencesSetValue
from Foundation import kCFPreferencesAnyUser
from Foundation import kCFPreferencesCurrentUser
from Foundation import kCFPreferencesCurrentHost
from Foundation import NSHTTPURLResponse
from SystemConfiguration import SCDynamicStoreCopyConsoleUser
# pylint: enable=E0611
# our preferences "bundle_id"
BUNDLE_ID = "MunkiReport"
class CurlError(Exception):
def __init__(self, status, message):
display_error(message)
finish_run()
def set_verbosity(level):
"""Set verbosity level."""
display.verbose = int(level)
def display_error(msg, *args):
"""Call display error msg handler."""
display.display_error("%s" % msg, *args)
def display_warning(msg, *args):
"""Call display warning msg handler."""
display.display_warning("%s" % msg, *args)
def display_detail(msg, *args):
"""Call display detail msg handler."""
display.display_detail("%s" % msg, *args)
def finish_run():
remove_run_file()
display_detail("## Finished run")
exit(0)
def remove_run_file():
touchfile = '/Users/Shared/.com.github.munkireport.run'
if os.path.exists(touchfile):
os.remove(touchfile)
def curl(url, values):
options = dict()
options["url"] = url
options["method"] = "POST"
options["content_type"] = "application/x-www-form-urlencoded"
options["body"] = urlencode(values)
options["logging_function"] = display_detail
options["connection_timeout"] = 60
if pref("UseMunkiAdditionalHttpHeaders"):
custom_headers = prefs.pref(constants.ADDITIONAL_HTTP_HEADERS_KEY)
if custom_headers:
options["additional_headers"] = dict()
for header in custom_headers:
m = re.search(r"^(?P<header_name>.*?): (?P<header_value>.*?)$", header)
if m:
options["additional_headers"][m.group("header_name")] = m.group(
"header_value"
)
else:
raise CurlError(
-1,
"UseMunkiAdditionalHttpHeaders defined, "
"but not found in Munki preferences",
)
# Build Purl with initial settings
connection = Purl.alloc().initWithOptions_(options)
connection.start()
try:
while True:
# if we did `while not connection.isDone()` we'd miss printing
# messages if we exit the loop first
if connection.isDone():
break
except (KeyboardInterrupt, SystemExit):
# safely kill the connection then re-raise
connection.cancel()
raise
except Exception, err: # too general, I know
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
connection.cancel()
# Re-raise the error as a GurlError
raise CurlError(-1, str(err))
if connection.error != None:
# Gurl returned an error
display.display_detail(
"Download error %s: %s",
connection.error.code(),
connection.error.localizedDescription(),
)
if connection.SSLerror:
display_detail("SSL error detail: %s", str(connection.SSLerror))
display_detail("Headers: %s", connection.headers)
raise CurlError(
connection.error.code(), connection.error.localizedDescription()
)
if connection.response != None and connection.status != 200:
display.display_detail("Status: %s", connection.status)
display.display_detail("Headers: %s", connection.headers)
if connection.redirection != []:
display.display_detail("Redirection: %s", connection.redirection)
connection.headers["http_result_code"] = str(connection.status)
description = NSHTTPURLResponse.localizedStringForStatusCode_(connection.status)
connection.headers["http_result_description"] = description
if str(connection.status).startswith("2"):
return connection.get_response_data()
else:
# there was an HTTP error of some sort.
raise CurlError(
connection.status,
"%s failed, HTTP returncode %s (%s)"
% (
url,
connection.status,
connection.headers.get("http_result_description", "Failed"),
),
)
def get_hardware_info():
"""Uses system profiler to get hardware info for this machine."""
cmd = ["/usr/sbin/system_profiler", "SPHardwareDataType", "-xml"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, dummy_error) = proc.communicate()
try:
plist = FoundationPlist.readPlistFromString(output)
# system_profiler xml is an array
sp_dict = plist[0]
items = sp_dict["_items"]
sp_hardware_dict = items[0]
return sp_hardware_dict
except BaseException:
return {}
def get_long_username(username):
try:
long_name = pwd.getpwnam(username)[4]
except:
long_name = ""
return long_name.decode("utf-8")
def get_uid(username):
try:
uid = pwd.getpwnam(username)[2]
except:
uid = ""
return uid
def get_computername():
cmd = ["/usr/sbin/scutil", "--get", "ComputerName"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_cpuinfo():
cmd = ["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_buildversion():
cmd = ["/usr/bin/sw_vers", "-buildVersion"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_uptime():
cmd = ["/usr/sbin/sysctl", "-n", "kern.boottime"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
sec = int(re.sub(".*sec = (\d+),.*", "\\1", output))
up = int(time.time() - sec)
return up if up > 0 else -1
def set_pref(pref_name, pref_value):
"""Sets a preference, See prefs.py for details."""
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
print "set pref"
try:
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
except Exception:
pass
def pref(pref_name):
"""Return a preference.
See prefs.py for details
"""
pref_value = CFPreferencesCopyAppValue(pref_name, BUNDLE_ID)
return pref_value
def process(serial, items):
"""Process receives a list of items, checks if they need updating and
updates them if necessary."""
# Sanitize serial
serial = "".join([c for c in serial if c.isalnum()])
# Get prefs
baseurl = pref("BaseUrl") or prefs.pref("SoftwareRepoURL") + "/report/"
hashurl = baseurl + "index.php?/report/hash_check"
checkurl = baseurl + "index.php?/report/check_in"
# Get passphrase
passphrase = pref("Passphrase")
# Get hashes for all scripts
for key, i in items.items():
if i.get("path"):
i["hash"] = getmd5hash(i.get("path"))
# Check dict
check = {}
for key, i in items.items():
if i.get("hash"):
check[key] = {"hash": i.get("hash")}
# Send hashes to server
values = {"serial": serial, "items": serialize(check), "passphrase": passphrase}
server_data = curl(hashurl, values)
# = response.read()
# Decode response
try:
result = unserialize(server_data)
except Exception, e:
display_error("Could not unserialize server data: %s" % str(e))
display_error("Request: %s" % str(values))
display_error("Response: %s" % str(server_data))
return -1
if result.get("error") != "":
display_error("Server error: %s" % result["error"])
return -1
if result.get("info") != "":
display_detail("Server info: %s" % result["info"])
# Retrieve hashes that need updating
total_size = 0
for i in items.keys():
if i in result:
if items[i].get("path"):
try:
f = open(items[i]["path"], "r")
items[i]["data"] = f.read()
except:
display_warning("Can't open %s" % items[i]["path"])
del items[i]
continue
size = len(items[i]["data"])
display_detail("Need to update %s (%s)" % (i, sizeof_fmt(size)))
total_size = total_size + size
else: # delete items that don't have to be uploaded
del items[i]
# Send new files with hashes
if len(items):
display_detail("Sending items (%s)" % sizeof_fmt(total_size))
response = curl(
checkurl,
{"serial": serial, "items": serialize(items), "passphrase": passphrase},
)
display_detail(response)
else:
display_detail("No changes")
def runExternalScriptWithTimeout(
script, allow_insecure=False, script_args=(), timeout=30
):
"""Run a script (e.g. preflight/postflight) and return its exit status.
Args:
script: string path to the script to execute.
allow_insecure: bool skip the permissions check of executable.
args: args to pass to the script.
Returns:
Tuple. (integer exit status from script, str stdout, str stderr).
Raises:
ScriptNotFoundError: the script was not found at the given path.
RunExternalScriptError: there was an error running the script.
"""
from munkilib import utils
if not os.path.exists(script):
raise ScriptNotFoundError("script does not exist: %s" % script)
if not allow_insecure:
try:
utils.verifyFileOnlyWritableByMunkiAndRoot(script)
except utils.VerifyFilePermissionsError, e:
msg = (
"Skipping execution due to failed file permissions "
"verification: %s\n%s" % (script, str(e))
)
raise utils.RunExternalScriptError(msg)
if os.access(script, os.X_OK):
cmd = [script]
if script_args:
cmd.extend(script_args)
proc = subprocess.Popen(
cmd,
shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
while timeout > 0:
if proc.poll() is not None:
(stdout, stderr) = proc.communicate()
return (
proc.returncode,
stdout.decode("UTF-8", "replace"),
stderr.decode("UTF-8", "replace"),
)
time.sleep(0.1)
timeout -= 0.1
else:
try:
proc.kill()
except OSError, e:
if e.errno != 3:
raise
raise utils.RunExternalScriptError("%s timed out" % script)
return (0, None, None)
else:
raise utils.RunExternalScriptError("%s not executable" % script)
def rundir(scriptdir, runtype, abort=False, submitscript=""):
"""Run scripts in directory scriptdir runtype is passed to the script if
abort is True, a non-zero exit status will abort munki submitscript is put
at the end of the scriptlist."""
if os.path.exists(scriptdir):
from munkilib import utils
# Get timeout for scripts
scriptTimeOut = 30
if pref("scriptTimeOut"):
scriptTimeOut = int(pref("scriptTimeOut"))
display_detail("# Set custom script timeout to %s seconds" % scriptTimeOut)
# Directory containing the scripts
parentdir = os.path.basename(scriptdir)
display_detail("# Executing scripts in %s" % parentdir)
# Get all files in scriptdir
files = os.listdir(scriptdir)
# Sort files
files.sort()
# Find submit script and stick it on the end of the list
if submitscript:
try:
sub = files.pop(files.index(submitscript))
files.append(sub)
except Exception, e:
display_error("%s not found in %s" % (submitscript, parentdir))
for script in files:
# Skip files that start with a period
if script.startswith("."):
continue
# Concatenate dir and filename
scriptpath = os.path.join(scriptdir, script)
# Skip directories
if os.path.isdir(scriptpath):
continue
try:
# Attempt to execute script
display_detail("Running %s" % script)
result, stdout, stderr = runExternalScriptWithTimeout(
scriptpath,
allow_insecure=False,
script_args=[runtype],
timeout=scriptTimeOut,
)
if stdout:
display_detail(stdout)
if stderr:
display_detail("%s Error: %s" % (script, stderr))
if result:
if abort:
display_detail("Aborted by %s" % script)
exit(1)
else:
display_warning("%s return code: %d" % (script, result))
except utils.ScriptNotFoundError:
pass # Script has disappeared - pass.
except Exception, e:
display_warning("%s: %s" % (script, str(e)))
def sizeof_fmt(num):
for unit in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"]:
if abs(num) < 1000.0:
return "%.0f%s" % (num, unit)
num /= 1000.0
return "%.1f%s" % (num, "YB")
def gethash(filename, hash_function):
"""Calculates the hashvalue of the given file with the given hash_function.
Args:
filename: The file name to calculate the hash value of.
hash_function: The hash function object to use, which was instantiated
before calling this function, e.g. hashlib.md5().
Returns:
The hashvalue of the given file as hex string.
"""
if not os.path.isfile(filename):
return "NOT A FILE"
fileref = open(filename, "rb")
while 1:
chunk = fileref.read(2 ** 16)
if not chunk:
break
hash_function.update(chunk)
fileref.close()
return hash_function.hexdigest()
def getmd5hash(filename):
"""Returns hex of MD5 checksum of a file."""
hash_function = hashlib.md5()
return gethash(filename, hash_function)
def getOsVersion(only_major_minor=True, as_tuple=False):
"""Returns an OS version.
Args:
only_major_minor: Boolean. If True, only include major/minor versions.
as_tuple: Boolean. If True, return a tuple of ints, otherwise a string.
"""
os.environ["SYSTEM_VERSION_COMPAT"] = '0'
cmd = ["/usr/bin/sw_vers -productVersion"]
proc = subprocess.Popen(
cmd,
shell=True,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
os_version_tuple = output.split(".")
if only_major_minor:
os_version_tuple = os_version_tuple[0:2]
if as_tuple:
return tuple(map(int, os_version_tuple))
else:
return ".".join(os_version_tuple)
def getconsoleuser():
"""Return console user."""
cfuser = SCDynamicStoreCopyConsoleUser(None, None, None)
return cfuser[0]
# End of reportcommon
| munkireport/munkireport-php | public/assets/client_installer/payload/usr/local/munkireport/munkilib/reportcommon.py | Python | mit | 17,507 | 0.000914 |
# -*- coding: utf-8 -*-
# @Author: Marco Benzi <marco.benzi@alumnos.usm.cl>
# @Date: 2015-06-07 19:44:12
# @Last Modified 2015-06-09
# @Last Modified time: 2015-06-09 16:07:05
# ==========================================================================
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ==========================================================================
import math
"""
Speed of light constant
"""
c = 3E8
"""
Vacuum permittivity
"""
e0 = 8.8541E-12
"""
Vacuum permeability
"""
u0 = 4E-7*math.pi
def getEffectivePermitivity(WHratio, er):
"""
Returns the effective permitivity for a given W/H ratio.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `WHratio` : W/H ratio.
- `er` : Relative permitivity of the dielectric.
"""
if WHratio <= 1:
return (er + 1)/2 + ((1 + 12/WHratio)**(-0.5) + 0.04*(1-WHratio)**2)*(er -1)/2
else:
return (er + 1)/2 + ((1 + 12/WHratio)**(-0.5))*(er -1)/2
def getAuxVarA(Zo,er):
"""
Returns the auxiliary variable
A = (Zo)/60 * math.sqrt((er + 1)/2) + (er-1)/(er+1)*(0.23+0.11/er)
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
return (Zo)/60 * math.sqrt((er + 1)/2) + (er-1)/(er+1)*(0.23+0.11/er)
def getAuxVarB(Zo,er):
"""
Returns the auxiliary variable
B = (377*math.pi)/(2*Zo*math.sqrt(er))
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
return (377*math.pi)/(2*Zo*math.sqrt(er))
def getWHRatioA(Zo,er):
"""
Returns the W/H ratio for W/H < 2. If the result is > 2, then other method
should be used.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
A = getAuxVarA(Zo,er)
return (8*math.e**A)/(math.e**(2*A) - 2)
def getWHRatioB(Zo,er):
"""
Returns the W/H ratio for W/H > 2. If the result is < 2, then other method
should be used.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
B = getAuxVarB(Zo,er)
return (2/math.pi)*(B-1 - math.log(2*B - 1) + (er - 1)*(math.log(B-1) + 0.39 - 0.61/er)/(2*er))
def getCharacteristicImpedance(WHratio, ef):
"""
Returns the characteristic impedance of the medium, based on the effective
permitivity and W/H ratio.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `WHratio` : W/H ratio.
- `ef` : Effective permitivity of the dielectric.
"""
if WHratio <= 1:
return (60/math.sqrt(ef))*math.log(8/WHratio + WHratio/4)
else:
return (120*math.pi/math.sqrt(ef))/(WHratio + 1.393 + 0.667*math.log(WHratio +1.444))
def getWHRatio(Zo,er):
"""
Returns the W/H ratio, after trying with the two possible set of solutions,
for when W/H < 2 or else. When no solution, returns zero.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
efa = er
efb = er
Zoa = Zo
Zob = Zo
while 1:
rA = getWHRatioA(Zoa,efa)
rB = getWHRatioB(Zob,efb)
if rA < 2:
return rA
if rB > 2:
return rB
Zoa = math.sqrt(efa)*Zoa
Zob = math.sqrt(efb)*Zob
def getCorrectedWidth(W,H,t):
"""
For significant conductor thickness, this returns the corrected width.
Paramenters:
- `W` : Width
- `H` : Height
- `t` : Conductor thickness
"""
if t < H and t < W/2:
if W/H <= math.pi/2:
return W + (1 + math.log(2*H/t))*(t/math.pi)
else:
return W + (1 + math.log(4*math.pi*H/t))*(t/math.pi)
else:
print "The conductor is too thick!!"
def getConductorLoss(W,H,t,sigma,f,Zo):
"""
Returns the conductor loss in [Np/m].
Parameters:
- `W` : Width
- `H` : Height
- `t` : Conductor thickness
- `sigma` : Conductance of medium
- `f` : Operating frequency
- `Zo` : Characteristic impedance
"""
We = getCorrectedWidth(W,H,t)
P = 1 - (We/4/H)**2
Rs = math.sqrt((math.pi*f*u0)/sigma)
Q = 1 + H/We + (math.log((2*H)/t)-t/W)*H/(We*math.pi)
if W/H <= 1/(2*math.pi):
return (1 + H/We + (math.log(4*pi*W/t) + t/W)*H/(math.pi*We))*(8.68*Rs*P)/(2*pi*Zo*H)
elif W/H <= 2:
return (8.68*Rs*P*Q)/(2*math.pi*Zo*H)
else:
return ((8.68*Rs*Q)/(Zo*H))*(We/H + (We/math.pi/H)/(We/2/H)+0.94)*((H/We + 2*math.log(We/2/H + 0.94)/math.pi)**(-2))
def getDielectricLoss(er,ef,tanD,f):
"""
Returns the dielectric loss in [dB/cm].
Paramenters:
- `er` : Relative permitivity of the dielectric
- `ef` : Effective permitivity
- `tanD` : tan \delta
- `f` : Operating frequency
"""
lam = c/math.sqrt(ef)/f
return 27.3*(er*(ef-1)*tanD)/(lam*math.sqrt(er)*(er-1)) | Lisergishnu/LTXKit | uStripDesign.py | Python | gpl-2.0 | 5,581 | 0.03064 |
from hiveplotter import HivePlot
from networkx import nx
import random
from unittest import TestCase
SEED = 1
NTYPES = ['A', 'B', 'C']
class SimpleCase(TestCase):
def make_graph(self):
G = nx.fast_gnp_random_graph(30, 0.2, seed=SEED)
for node, data in G.nodes_iter(data=True):
data['ntype'] = random.choice(NTYPES)
for src, tgt, data in G.edges_iter(data=True):
data['weight'] = random.random()
return G
def test_simple(self):
G = self.make_graph()
H = HivePlot(G, node_class_attribute='ntype')
H.draw()
H.save_plot('./output/main.pdf')
def test_dump_cfg(self):
G = self.make_graph()
H = HivePlot(G, node_class_attribute='ntype')
H.draw()
print(H.dump_config())
if __name__ == '__main__':
tests = SimpleCase()
tests.test_simple()
| clbarnes/hiveplotter | test/simple_tests.py | Python | bsd-3-clause | 882 | 0 |
import json
import logging
from django.contrib.auth.models import User
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse
from django.core.mail import send_mail
from django.template import RequestContext
from django.template.loader import render_to_string
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from django.contrib.admin.views.decorators import staff_member_required
from settings import LOGIN_URL
from xbrowse_server.base.forms import LoginForm, SetUpAccountForm
from xbrowse_server.base.models import UserProfile
from xbrowse_server.base.utils import get_projects_for_user
from xbrowse_server.decorators import log_request
def landing_page(request):
return render(request, 'landing_page.html', {})
@csrf_exempt
def errorlog(request):
logger = logging.getLogger(__name__)
logger.error('xbrowse JS error', extra={'request': request})
return HttpResponse(json.dumps({'success': True}))
@log_request('home')
def home(request):
if request.user.is_anonymous():
return landing_page(request)
projects = get_projects_for_user(request.user)
return render(request, 'home.html', {
'user': request.user,
'projects': projects,
'new_page_url': '/dashboard',
})
@log_request('login')
def login_view(request):
logout(request)
next = request.GET.get('next')
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.user
login(request, user)
if next and '.wsgi' not in next:
return redirect(next)
else:
return redirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
'next': next,
})
def logout_view(request):
logout(request)
return redirect('home')
@log_request('set_password')
def set_password(request):
error = None
token = request.GET.get('token')
if not token or len(token) < 1:
return HttpResponse('Invalid')
profile = get_object_or_404(UserProfile, set_password_token=token)
if request.method == 'POST':
form = SetUpAccountForm(request.POST)
if form.is_valid():
user = profile.user
user.set_password(form.cleaned_data['password1'])
user.save()
profile.set_password_token = ''
profile.display_name = form.cleaned_data['name']
profile.save()
u = authenticate(username=profile.user.username, password=form.cleaned_data['password1'])
login(request, u)
return redirect('home')
else:
form = SetUpAccountForm()
return render(request, 'set_password.html', {
'form': form,
'error': error,
})
def forgot_password(request):
error = None
if request.method == 'POST':
email = request.POST.get('email').lower()
if email is None or email == "":
error = "Please enter an email."
elif not User.objects.filter(email__iexact=email).exists():
error = "This email address is not valid."
else:
user = User.objects.get(email__iexact=email)
profile = user.profile
profile.set_password_token = User.objects.make_random_password(length=30)
profile.save()
email_content = render_to_string(
'emails/reset_password.txt',
{'user': user, 'BASE_URL': settings.BASE_URL },
)
send_mail('Reset your xBrowse password', email_content, settings.FROM_EMAIL, [email,], fail_silently=False )
return redirect('forgot_password_sent')
return render(request, 'forgot_password.html', {
'error': error,
'new_page_url': '/users/forgot_password',
})
def forgot_password_sent(request):
return render(request, 'forgot_password_sent.html', {
})
def style_css(request):
return render(request, 'css/style.css', {
}, content_type="text/css")
@log_request('user_summary')
@staff_member_required(login_url=LOGIN_URL)
def user_summary(request, username):
user = User.objects.get(username=username)
return render(request, 'user_summary.html', {
'user': user,
'projects': get_projects_for_user(user),
})
| macarthur-lab/xbrowse | xbrowse_server/base/views/account_views.py | Python | agpl-3.0 | 4,556 | 0.004829 |
import sys
import math
import wave
import struct
import curses
import pyaudio
import numpy as np
import matplotlib.pyplot as plt
# 'curses' configuration
stdscr = curses.initscr()
stdscr.nodelay(True)
curses.noecho()
curses.cbreak()
# PyAudio object variable
pa = pyaudio.PyAudio()
# The mode the user chose with a script argument
MODE = sys.argv[1]
# Size of each read-in chunk
CHUNK = 1
# Amount of channels of the live recording
CHANNELS = 2
# Sample width of the live recording
WIDTH = 2
# Sample rate in Hz of the live recording
SAMPLE_RATE = 44100
# Set how often data for the result will be saved (every nth CHUNK)
if MODE != '-p' and MODE != '--playback':
try:
NTH_ITERATION = int(sys.argv[3])
except (ValueError, IndexError):
print('The second argument has to be a number')
sys.exit()
def main():
# Execute the chosen mode
if MODE == '--file' or MODE == '-f':
file_mode()
elif MODE == '--live' or MODE == '-l':
live_mode()
elif MODE == '--playback' or MODE == '-p':
playback_mode()
else:
print('Please either choose file-mode, live-mode or playback-mode with the first argument')
def file_mode():
# Read in the given file
(waveform, stream) = readin(sys.argv[4])
# Give some feedback
stdscr.addstr('Now noise-cancelling the file')
# Collecting the volume levels in decibels in a list
decibel_levels = []
# Collecting the waves into lists
total_original = []
total_inverted = []
total_difference = []
# Counting the iterations of the while-loop
iteration = 0
# Determines the ratio of the mix
ratio = 1.0
# Determines if the noise-cancellation is active
active = True
# Read a first chunk and continue to do so for as long as there is a stream to read in
original = waveform.readframes(CHUNK)
while original != b'':
try:
# Capture if a key was pressed
pressed_key = stdscr.getch()
# If the 'o' key was pressed toggle the 'active' variable
if pressed_key == 111:
active = not active
# While the noise-cancellation is not activated the ratio should be 100% towards the orginial audio
if not active:
ratio = 2.0
else:
ratio = 1.0
# Increase the ratio of the mix
elif pressed_key == 43:
ratio += 0.01
# Decrease the ratio of the mix
elif pressed_key == 45:
ratio -= 0.01
# If the 'x' key was pressed abort the loop
elif pressed_key == 120:
break
# Invert the original audio
inverted = invert(original)
# Play back a mixed audio stream of both, original source and the inverted one
if active:
mix = mix_samples(original, inverted, ratio)
stream.write(mix)
# In case the noise-cancellation is not turned off temporarily, only play the orignial audio source
else:
stream.write(original)
# On every nth iteration append the difference between the level of the source audio and the inverted one
if iteration % NTH_ITERATION == 0:
# Clear the terminal before outputting the new value
stdscr.clear()
# Calculate the difference of the source and the inverted audio
difference = calculate_difference(original, inverted)
# Print the current difference
stdscr.addstr('Difference (in dB): {}\n'.format(difference))
# Append the difference to the list used for the plot
decibel_levels.append(difference)
# Calculate the waves for the graph
int_original, int_inverted, int_difference = calculate_wave(original, inverted, ratio)
total_original.append(int_original)
total_inverted.append(int_inverted)
total_difference.append(int_difference)
# Read in the next chunk of data
original = waveform.readframes(CHUNK)
# Add up one to the iterations
iteration += 1
except (KeyboardInterrupt, SystemExit):
break
# Stop the stream after there is no more data to read
stream.stop_stream()
stream.close()
# Outputting feedback regarding the end of the file
print('Finished noise-cancelling the file')
# Plot the results
if sys.argv[2] == '--decibel' or sys.argv[2] == '-db':
plot_results(decibel_levels, NTH_ITERATION)
elif sys.argv[2] == '--waves' or sys.argv[2] == '-wv':
plot_wave_results(total_original, total_inverted, total_difference, NTH_ITERATION)
# Revert the changes from 'curses'
curses.endwin()
# Terminate PyAudio as well as the program
pa.terminate()
sys.exit()
def live_mode():
# Start live recording
stdscr.addstr('Now noise-cancelling live')
# Create a new PyAudio object using the preset constants
stream = pa.open(
format=pa.get_format_from_width(WIDTH),
channels=CHANNELS,
rate=SAMPLE_RATE,
frames_per_buffer=CHUNK,
input=True,
output=True
)
# Collecting the volume levels in decibels in a list
decibel_levels = []
# Collecting the waves into lists
total_original = []
total_inverted = []
total_difference = []
# Determines if the noise-cancellation is active
active = True
# Grab a chunk of data in iterations according to the preset constants
try:
for i in range(0, int(SAMPLE_RATE / CHUNK * sys.maxunicode)):
# Capture if a key was pressed
pressed_key = stdscr.getch()
# If the 'o' key was pressed toggle the 'active' variable
if pressed_key == 111:
active = not active
# If the 'x' key was pressed abort the loop
if pressed_key == 120:
break
# Read in a chunk of live audio on each iteration
original = stream.read(CHUNK)
# Invert the original audio
inverted = invert(original)
# Play back the inverted audio
stream.write(inverted, CHUNK)
# On every nth iteration append the difference between the level of the source audio and the inverted one
if i % NTH_ITERATION == 0:
# Clear the terminal before outputting the new value
stdscr.clear()
# Calculate the difference of the source and the inverted audio
difference = calculate_difference(original, inverted)
# Print the current difference
stdscr.addstr('Difference (in dB): {}'.format(difference))
# Append the difference to the list used for the plot
decibel_levels.append(difference)
# Calculate the waves for the graph
int_original, int_inverted, int_difference = calculate_wave(original, inverted)
total_original.append(int_original)
total_inverted.append(int_inverted)
total_difference.append(int_difference)
except (KeyboardInterrupt, SystemExit):
# Outputting feedback regarding the end of the file
print('Finished noise-cancelling the file')
# Plot the results
if sys.argv[2] == '--decibel' or sys.argv[2] == '-db':
plot_results(decibel_levels, NTH_ITERATION)
elif sys.argv[2] == '--waves' or sys.argv[2] == '-wv':
plot_wave_results(total_original, total_inverted, total_difference, NTH_ITERATION)
# Revert the changes from 'curses'
curses.endwin()
# Terminate the program
stream.stop_stream()
stream.close()
pa.terminate()
sys.exit()
def playback_mode():
# Read in the given file
(waveform, stream) = readin(sys.argv[2])
# Give some feedback
print('Now playing back the file')
# Read a first chunk and continue to do so for as long as there is a stream to read in
original = waveform.readframes(CHUNK)
while original != b'':
try:
# Play back the audio
stream.write(original)
# Read in the next chunk of data
original = waveform.readframes(CHUNK)
except (KeyboardInterrupt, SystemExit):
break
# Stop the stream after there is no more data to read
stream.stop_stream()
stream.close()
# Outputting feedback regarding the end of the file
print('Finished playing back the file')
# Terminate PyAudio as well as the program
pa.terminate()
sys.exit()
def readin(file):
"""
Reads in the given wave file and returns a new PyAudio stream object from it.
:param file: The path to the file to read in
:return (waveform, stream): (The actual audio data as a waveform, the PyAudio object for said data)
"""
# Open the waveform from the command argument
try:
waveform = wave.open(file, 'r')
except wave.Error:
print('The program can only process wave audio files (.wav)')
sys.exit()
except FileNotFoundError:
print('The chosen file does not exist')
sys.exit()
# Load PyAudio and create a useable waveform object
stream = pa.open(
format=pa.get_format_from_width(waveform.getsampwidth()),
channels=waveform.getnchannels(),
rate=waveform.getframerate(),
output=True
)
# Return the waveform as well as the generated PyAudio stream object
return waveform, stream
def invert(data):
"""
Inverts the byte data it received utilizing an XOR operation.
:param data: A chunk of byte data
:return inverted: The same size of chunked data inverted bitwise
"""
# Convert the bytestring into an integer
intwave = np.fromstring(data, np.int32)
# Invert the integer
intwave = np.invert(intwave)
# Convert the integer back into a bytestring
inverted = np.frombuffer(intwave, np.byte)
# Return the inverted audio data
return inverted
def mix_samples(sample_1, sample_2, ratio):
"""
Mixes two samples into each other
:param sample_1: A bytestring containing the first audio source
:param sample_2: A bytestring containing the second audio source
:param ratio: A float which determines the mix-ratio of the two samples (the higher, the louder the first sample)
:return mix: A bytestring containing the two samples mixed together
"""
# Calculate the actual ratios based on the float the function received
(ratio_1, ratio_2) = get_ratios(ratio)
# Convert the two samples to integers
intwave_sample_1 = np.fromstring(sample_1, np.int16)
intwave_sample_2 = np.fromstring(sample_2, np.int16)
# Mix the two samples together based on the calculated ratios
intwave_mix = (intwave_sample_1 * ratio_1 + intwave_sample_2 * ratio_2).astype(np.int16)
# Convert the new mix back to a playable bytestring
mix = np.frombuffer(intwave_mix, np.byte)
return mix
def get_ratios(ratio):
"""
Calculates the ratios using a received float
:param ratio: A float betwenn 0 and 2 resembling the ratio between two things
:return ratio_1, ratio_2: The two calculated actual ratios
"""
ratio = float(ratio)
ratio_1 = ratio / 2
ratio_2 = (2 - ratio) / 2
return ratio_1, ratio_2
def calculate_decibel(data):
"""
Calculates the volume level in decibel of the given data
:param data: A bytestring used to calculate the decibel level
:return db: The calculated volume level in decibel
"""
count = len(data) / 2
form = "%dh" % count
shorts = struct.unpack(form, data)
sum_squares = 0.0
for sample in shorts:
n = sample * (1.0 / 32768)
sum_squares += n * n
rms = math.sqrt(sum_squares / count) + 0.0001
db = 20 * math.log10(rms)
return db
def calculate_difference(data_1, data_2):
"""
Calculates the difference level in decibel between the received binary inputs
:param data_1: The first binary digit
:param data_2: The second binary digit
:return difference: The calculated difference level (in dB)
"""
difference = calculate_decibel(data_1) - calculate_decibel(data_2)
return difference
def calculate_wave(original, inverted, ratio):
"""
Converts the bytestrings it receives into plottable integers and calculates the difference between both
:param original: A bytestring of sound
:param inverted: A bytestring of sound
:param ratio: A float which determines the mix-ratio of the two samples
:return int_original, int_inverted, int_difference: A tupel of the three calculated integers
"""
# Calculate the actual ratios based on the float the function received
(ratio_1, ratio_2) = get_ratios(ratio)
# Convert the two samples to integers to be able to add them together
int_original = np.fromstring(original, np.int16)[0] * ratio_1
int_inverted = np.fromstring(inverted, np.int16)[0] * ratio_2
# Calculate the difference between the two samples
int_difference = (int_original + int_inverted)
return int_original, int_inverted, int_difference
def plot_results(data, nth_iteration):
"""
Plots the list it receives and cuts off the first ten entries to circumvent the plotting of initial silence
:param data: A list of data to be plotted
:param nth_iteration: Used for the label of the x axis
"""
# Plot the data
plt.plot(data[10:])
# Label the axes
plt.xlabel('Time (every {}th {} byte)'.format(nth_iteration, CHUNK))
plt.ylabel('Volume level difference (in dB)')
# Calculate and output the absolute median difference level
plt.suptitle('Difference - Median (in dB): {}'.format(np.round(np.fabs(np.median(data)), decimals=5)), fontsize=14)
# Display the plotted graph
plt.show()
def plot_wave_results(total_original, total_inverted, total_difference, nth_iteration):
"""
Plots the three waves of the original sound, the inverted one and their difference
:param total_original: A list of the original wave data
:param total_inverted: A list of the inverted wave data
:param total_difference: A list of the difference of 'total_original' and 'total_inverted'
:param nth_iteration: Used for the label of the x axis
"""
# Plot the three waves
plt.plot(total_original, 'b')
plt.plot(total_inverted, 'r')
plt.plot(total_difference, 'g')
# Label the axes
plt.xlabel('Time (per {}th {} byte chunk)'.format(nth_iteration, CHUNK))
plt.ylabel('Amplitude (integer representation of each {} byte chunk)'.format(nth_iteration, CHUNK))
# Calculate and output the absolute median difference level
plt.suptitle('Waves: original (blue), inverted (red), output (green)', fontsize=14)
# Display the plotted graph
plt.show()
# Execute the main function to start the script
main()
| loehnertz/rattlesnake | rattlesnake.py | Python | mit | 15,227 | 0.001773 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# sorteddict.py
# Sorted dictionary (implementation for Python 2.x)
#
# Copyright (c) 2010 Jan Kaliszewski (zuo)
#
# The MIT License:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from bisect import bisect_left, insort
from itertools import izip, repeat
def dictdoc(method):
"A decorator making reuse of the ordinary dict's docstrings more concise."
dict_method = getattr(dict, method.__name__)
if hasattr(dict_method, '__doc__'):
method.__doc__ = dict_method.__doc__
return method
class SortedDict(dict):
'''Dictionary with sorted keys.
The interface is similar to the ordinary dict's one, but:
* methods: __repr__(), __str__(), __iter__(), iterkeys(), itervalues(),
iteritems(), keys(), values(), items() and popitem() -- return results
taking into consideration sorted keys order;
* new methods: largest_key(), largest_item(), smallest_key(),
smallest_item() added.
'''
def __init__(self, *args, **kwargs):
'''Like with the ordinary dict: from a mapping, from an iterable
of (key, value) pairs, or from keyword arguments.'''
dict.__init__(self, *args, **kwargs)
self._sorted_keys = sorted(dict.iterkeys(self))
@dictdoc
def __repr__(self):
return 'SortedDict({%s})' % ', '.join('%r: %r' % item
for item in self.iteritems())
@dictdoc
def __str__(self):
return repr(self)
@dictdoc
def __setitem__(self, key, value):
key_is_new = key not in self
dict.__setitem__(self, key, value)
if key_is_new:
insort(self._sorted_keys, key)
@dictdoc
def __delitem__(self, key):
dict.__delitem__(self, key)
del self._sorted_keys[bisect_left(self._sorted_keys, key)]
def __iter__(self, reverse=False):
'''D.__iter__() <==> iter(D) <==> D.iterkeys() -> an iterator over
sorted keys (add reverse=True for reverse ordering).'''
if reverse:
return reversed(self._sorted_keys)
else:
return iter(self._sorted_keys)
iterkeys = __iter__
def itervalues(self, reverse=False):
'''D.itervalues() -> an iterator over values sorted by keys
(add reverse=True for reverse ordering).'''
return (self[key] for key in self.iterkeys(reverse))
def iteritems(self, reverse=False):
'''D.iteritems() -> an iterator over (key, value) pairs sorted by keys
(add reverse=True for reverse ordering).'''
return ((key, self[key]) for key in self.iterkeys(reverse))
def keys(self, reverse=False):
'''D.keys() -> a sorted list of keys
(add reverse=True for reverse ordering).'''
return list(self.iterkeys(reverse))
def values(self, reverse=False):
'''D.values() -> a list of values sorted by keys
(add reverse=True for reverse ordering).'''
return list(self.itervalues(reverse))
def items(self, reverse=False):
'''D.items() -> a list of (key, value) pairs sorted by keys
(add reverse=True for reverse ordering).'''
return list(self.iteritems(reverse))
@dictdoc
def clear(self):
dict.clear(self)
del self._sorted_keys[:]
def copy(self):
'''D.copy() -> a shallow copy of D (still as a SortedDict).'''
return self.__class__(self)
@classmethod
@dictdoc
def fromkeys(cls, seq, value=None):
return cls(izip(seq, repeat(value)))
@dictdoc
def pop(self, key, *args, **kwargs):
if key in self:
del self._sorted_keys[bisect_left(self._sorted_keys, key)]
return dict.pop(self, key, *args, **kwargs)
def popitem(self):
'''D.popitem() -> (k, v). Remove and return a (key, value) pair with
the largest key; raise KeyError if D is empty.'''
try:
key = self._sorted_keys.pop()
except IndexError:
raise KeyError('popitem(): dictionary is empty')
else:
return key, dict.pop(self, key)
@dictdoc
def setdefault(self, key, default=None):
if key not in self:
insort(self._sorted_keys, key)
return dict.setdefault(self, key, default)
@dictdoc
def update(self, other=()):
if hasattr(other, 'keys') and hasattr(other, 'values'):
# mapping
newkeys = [key for key in other if key not in self]
else:
# iterator/sequence of pairs
other = list(other)
newkeys = [key for key, _ in other if key not in self]
dict.update(self, other)
for key in newkeys:
insort(self._sorted_keys, key)
def largest_key(self):
'''D.largest_key() -> the largest key; raise KeyError if D is empty.'''
try:
return self._sorted_keys[-1]
except IndexError:
raise KeyError('largest_key(): dictionary is empty')
def largest_item(self):
'''D.largest_item() -> a (key, value) pair with the largest key;
raise KeyError if D is empty.'''
key = self.largest_key()
return key, self[key]
def smallest_key(self):
'''D.smallest_key() -> the smallest key; raise KeyError if D is empty.'''
try:
return self._sorted_keys[0]
except IndexError:
raise KeyError('smallest_key(): dictionary is empty')
def smallest_item(self):
'''D.smallest_item() -> a (key, value) pair with the smallest key;
raise KeyError if D is empty.'''
key = self.smallest_key()
return key, self[key]
| ddurieux/alignak | alignak/sorteddict.py | Python | agpl-3.0 | 6,690 | 0.000149 |
import lxml.html
from .bills import NHBillScraper
from .legislators import NHLegislatorScraper
from .committees import NHCommitteeScraper
metadata = {
'abbreviation': 'nh',
'name': 'New Hampshire',
'capitol_timezone': 'America/New_York',
'legislature_name': 'New Hampshire General Court',
'legislature_url': 'http://www.gencourt.state.nh.us/',
'chambers': {
'upper': {'name': 'Senate', 'title': 'Senator'},
'lower': {'name': 'House', 'title': 'Representative'},
},
'terms': [
{'name': '2011-2012', 'sessions': ['2011', '2012'],
'start_year': 2011, 'end_year': 2012},
{'name': '2013-2014', 'sessions': ['2013', '2014'],
'start_year': 2013, 'end_year': 2014},
{'name': '2015-2016', 'sessions': ['2015', '2016'],
'start_year': 2015, 'end_year': 2016},
{'name': '2017-2018', 'sessions': ['2017'],
'start_year': 2017, 'end_year': 2018}
],
'session_details': {
'2011': {'display_name': '2011 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2011%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2011 Session',
},
'2012': {'display_name': '2012 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2012%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2012 Session',
},
'2013': {'display_name': '2013 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2013%20Session%20Bill%20Status%20Tables.zip',
# Their dump filename changed, probably just a hiccup.
'_scraped_name': '2013',
# '_scraped_name': '2013 Session',
},
'2014': {'display_name': '2014 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2014%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2014 Session',
},
'2015': {'display_name': '2015 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2015%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2015 Session',
},
'2016': {'display_name': '2016 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2016%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2016 Session',
},
'2017': {'display_name': '2017 Regular Session',
'_scraped_name': '2017 Session',
},
},
'feature_flags': ['subjects', 'influenceexplorer'],
'_ignored_scraped_sessions': ['2013 Session','2017 Session Bill Status Tables Link.txt'],
}
def session_list():
from billy.scrape.utils import url_xpath
zips = url_xpath('http://gencourt.state.nh.us/downloads/',
'//a[contains(@href, "Bill%20Status%20Tables")]/text()')
return [zip.replace(' Bill Status Tables.zip', '') for zip in zips]
def extract_text(doc, data):
doc = lxml.html.fromstring(data)
return doc.xpath('//html')[0].text_content()
| cliftonmcintosh/openstates | openstates/nh/__init__.py | Python | gpl-3.0 | 3,210 | 0.005296 |
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
origin = 'lower'
#origin = 'upper'
delta = 0.025
x = y = np.arange(-3.0, 3.01, delta)
X, Y = np.meshgrid(x, y)
Z1 = plt.mlab.bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0)
Z2 = plt.mlab.bivariate_normal(X, Y, 1.5, 0.5, 1, 1)
Z = 10 * (Z1 - Z2)
nr, nc = Z.shape
# put NaNs in one corner:
Z[-nr//6:, -nc//6:] = np.nan
# contourf will convert these to masked
Z = np.ma.array(Z)
# mask another corner:
Z[:nr//6, :nc//6] = np.ma.masked
# mask a circle in the middle:
interior = np.sqrt((X**2) + (Y**2)) < 0.5
Z[interior] = np.ma.masked
# We are using automatic selection of contour levels;
# this is usually not such a good idea, because they don't
# occur on nice boundaries, but we do it here for purposes
# of illustration.
CS = plt.contourf(X, Y, Z, 10, # [-1, -0.1, 0, 0.1],
#alpha=0.5,
cmap=plt.cm.bone,
origin=origin)
# Note that in the following, we explicitly pass in a subset of
# the contour levels used for the filled contours. Alternatively,
# We could pass in additional levels to provide extra resolution,
# or leave out the levels kwarg to use all of the original levels.
CS2 = plt.contour(CS, levels=CS.levels[::2],
colors = 'r',
origin=origin,
hold='on')
plt.title('Nonsense (3 masked regions)')
plt.xlabel('word length anomaly')
plt.ylabel('sentence length anomaly')
# Make a colorbar for the ContourSet returned by the contourf call.
cbar = plt.colorbar(CS)
cbar.ax.set_ylabel('verbosity coefficient')
# Add the contour line levels to the colorbar
cbar.add_lines(CS2)
plt.figure()
# Now make a contour plot with the levels specified,
# and with the colormap generated automatically from a list
# of colors.
levels = [-1.5, -1, -0.5, 0, 0.5, 1]
CS3 = plt.contourf(X, Y, Z, levels,
colors = ('r', 'g', 'b'),
origin=origin,
extend='both')
# Our data range extends outside the range of levels; make
# data below the lowest contour level yellow, and above the
# highest level cyan:
CS3.cmap.set_under('yellow')
CS3.cmap.set_over('cyan')
CS4 = plt.contour(X, Y, Z, levels,
colors = ('k',),
linewidths = (3,),
origin = origin)
plt.title('Listed colors (3 masked regions)')
plt.clabel(CS4, fmt = '%2.1f', colors = 'w', fontsize=14)
# Notice that the colorbar command gets all the information it
# needs from the ContourSet object, CS3.
plt.colorbar(CS3)
# Illustrate all 4 possible "extend" settings:
extends = ["neither", "both", "min", "max"]
cmap = plt.cm.get_cmap("winter")
cmap.set_under("magenta")
cmap.set_over("yellow")
# Note: contouring simply excludes masked or nan regions, so
# instead of using the "bad" colormap value for them, it draws
# nothing at all in them. Therefore the following would have
# no effect:
#cmap.set_bad("red")
fig, axs = plt.subplots(2,2)
for ax, extend in zip(axs.ravel(), extends):
cs = ax.contourf(X, Y, Z, levels, cmap=cmap, extend=extend, origin=origin)
fig.colorbar(cs, ax=ax, shrink=0.9)
ax.set_title("extend = %s" % extend)
ax.locator_params(nbins=4)
plt.show()
| radiasoft/radtrack | radtrack/plot/contourf_demo.py | Python | apache-2.0 | 3,308 | 0.009069 |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six.moves.urllib.parse import quote
import unittest
import os
from tempfile import mkdtemp
import shutil
from swift.common.storage_policy import StoragePolicy
from swift.common.swob import Request
from swift.common.utils import mkdirs, split_path
from swift.common.wsgi import monkey_patch_mimetools, WSGIContext
from swift.obj import server as object_server
from swift.proxy import server as proxy
import swift.proxy.controllers
from test.unit import FakeMemcache, debug_logger, FakeRing, \
fake_http_connect, patch_policies
class FakeServerConnection(WSGIContext):
'''Fakes an HTTPConnection to a server instance.'''
def __init__(self, app):
super(FakeServerConnection, self).__init__(app)
self.data = ''
def getheaders(self):
return self._response_headers
def read(self, amt=None):
try:
result = next(self.resp_iter)
return result
except StopIteration:
return ''
def getheader(self, name, default=None):
result = self._response_header_value(name)
return result if result else default
def getresponse(self):
environ = {'REQUEST_METHOD': self.method}
req = Request.blank(self.path, environ, headers=self.req_headers,
body=self.data)
self.data = ''
self.resp = self._app_call(req.environ)
self.resp_iter = iter(self.resp)
if self._response_headers is None:
self._response_headers = []
status_parts = self._response_status.split(' ', 1)
self.status = int(status_parts[0])
self.reason = status_parts[1] if len(status_parts) == 2 else ''
return self
def getexpect(self):
class ContinueResponse(object):
status = 100
return ContinueResponse()
def send(self, data):
self.data += data
def close(self):
pass
def __call__(self, ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
self.path = quote('/' + device + '/' + str(partition) + path)
self.method = method
self.req_headers = headers
return self
def get_http_connect(account_func, container_func, object_func):
'''Returns a http_connect function that delegates to
entity-specific http_connect methods based on request path.
'''
def http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
a, c, o = split_path(path, 1, 3, True)
if o:
func = object_func
elif c:
func = container_func
else:
func = account_func
resp = func(ipaddr, port, device, partition, method, path,
headers=headers, query_string=query_string)
return resp
return http_connect
@patch_policies([StoragePolicy(0, 'zero', True,
object_ring=FakeRing(replicas=1))])
class TestObjectSysmeta(unittest.TestCase):
'''Tests object sysmeta is correctly handled by combination
of proxy server and object server.
'''
def _assertStatus(self, resp, expected):
self.assertEqual(resp.status_int, expected,
'Expected %d, got %s'
% (expected, resp.status))
def _assertInHeaders(self, resp, expected):
for key, val in expected.items():
self.assertTrue(key in resp.headers,
'Header %s missing from %s' % (key, resp.headers))
self.assertEqual(val, resp.headers[key],
'Expected header %s:%s, got %s:%s'
% (key, val, key, resp.headers[key]))
def _assertNotInHeaders(self, resp, unexpected):
for key, val in unexpected.items():
self.assertFalse(key in resp.headers,
'Header %s not expected in %s'
% (key, resp.headers))
def setUp(self):
self.app = proxy.Application(None, FakeMemcache(),
logger=debug_logger('proxy-ut'),
account_ring=FakeRing(replicas=1),
container_ring=FakeRing(replicas=1))
monkey_patch_mimetools()
self.tmpdir = mkdtemp()
self.testdir = os.path.join(self.tmpdir,
'tmp_test_object_server_ObjectController')
mkdirs(os.path.join(self.testdir, 'sda', 'tmp'))
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.obj_ctlr = object_server.ObjectController(
conf, logger=debug_logger('obj-ut'))
http_connect = get_http_connect(fake_http_connect(200),
fake_http_connect(200),
FakeServerConnection(self.obj_ctlr))
self.orig_base_http_connect = swift.proxy.controllers.base.http_connect
self.orig_obj_http_connect = swift.proxy.controllers.obj.http_connect
swift.proxy.controllers.base.http_connect = http_connect
swift.proxy.controllers.obj.http_connect = http_connect
def tearDown(self):
shutil.rmtree(self.tmpdir)
swift.proxy.controllers.base.http_connect = self.orig_base_http_connect
swift.proxy.controllers.obj.http_connect = self.orig_obj_http_connect
original_sysmeta_headers_1 = {'x-object-sysmeta-test0': 'val0',
'x-object-sysmeta-test1': 'val1'}
original_sysmeta_headers_2 = {'x-object-sysmeta-test2': 'val2'}
changed_sysmeta_headers = {'x-object-sysmeta-test0': '',
'x-object-sysmeta-test1': 'val1 changed'}
new_sysmeta_headers = {'x-object-sysmeta-test3': 'val3'}
original_meta_headers_1 = {'x-object-meta-test0': 'meta0',
'x-object-meta-test1': 'meta1'}
original_meta_headers_2 = {'x-object-meta-test2': 'meta2'}
changed_meta_headers = {'x-object-meta-test0': '',
'x-object-meta-test1': 'meta1 changed'}
new_meta_headers = {'x-object-meta-test3': 'meta3'}
bad_headers = {'x-account-sysmeta-test1': 'bad1'}
def test_PUT_sysmeta_then_GET(self):
path = '/v1/a/c/o'
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.original_sysmeta_headers_1)
hdrs.update(self.original_meta_headers_1)
hdrs.update(self.bad_headers)
req = Request.blank(path, environ=env, headers=hdrs, body='x')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
req = Request.blank(path, environ={})
resp = req.get_response(self.app)
self._assertStatus(resp, 200)
self._assertInHeaders(resp, self.original_sysmeta_headers_1)
self._assertInHeaders(resp, self.original_meta_headers_1)
self._assertNotInHeaders(resp, self.bad_headers)
def test_PUT_sysmeta_then_HEAD(self):
path = '/v1/a/c/o'
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.original_sysmeta_headers_1)
hdrs.update(self.original_meta_headers_1)
hdrs.update(self.bad_headers)
req = Request.blank(path, environ=env, headers=hdrs, body='x')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
env = {'REQUEST_METHOD': 'HEAD'}
req = Request.blank(path, environ=env)
resp = req.get_response(self.app)
self._assertStatus(resp, 200)
self._assertInHeaders(resp, self.original_sysmeta_headers_1)
self._assertInHeaders(resp, self.original_meta_headers_1)
self._assertNotInHeaders(resp, self.bad_headers)
def test_sysmeta_replaced_by_PUT(self):
path = '/v1/a/c/o'
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.original_sysmeta_headers_1)
hdrs.update(self.original_sysmeta_headers_2)
hdrs.update(self.original_meta_headers_1)
hdrs.update(self.original_meta_headers_2)
req = Request.blank(path, environ=env, headers=hdrs, body='x')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.changed_sysmeta_headers)
hdrs.update(self.new_sysmeta_headers)
hdrs.update(self.changed_meta_headers)
hdrs.update(self.new_meta_headers)
hdrs.update(self.bad_headers)
req = Request.blank(path, environ=env, headers=hdrs, body='x')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
req = Request.blank(path, environ={})
resp = req.get_response(self.app)
self._assertStatus(resp, 200)
self._assertInHeaders(resp, self.changed_sysmeta_headers)
self._assertInHeaders(resp, self.new_sysmeta_headers)
self._assertNotInHeaders(resp, self.original_sysmeta_headers_2)
self._assertInHeaders(resp, self.changed_meta_headers)
self._assertInHeaders(resp, self.new_meta_headers)
self._assertNotInHeaders(resp, self.original_meta_headers_2)
def _test_sysmeta_not_updated_by_POST(self):
# check sysmeta is not changed by a POST but user meta is replaced
path = '/v1/a/c/o'
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.original_sysmeta_headers_1)
hdrs.update(self.original_meta_headers_1)
req = Request.blank(path, environ=env, headers=hdrs, body='x')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
env = {'REQUEST_METHOD': 'POST'}
hdrs = dict(self.changed_sysmeta_headers)
hdrs.update(self.new_sysmeta_headers)
hdrs.update(self.changed_meta_headers)
hdrs.update(self.new_meta_headers)
hdrs.update(self.bad_headers)
req = Request.blank(path, environ=env, headers=hdrs)
resp = req.get_response(self.app)
self._assertStatus(resp, 202)
req = Request.blank(path, environ={})
resp = req.get_response(self.app)
self._assertStatus(resp, 200)
self._assertInHeaders(resp, self.original_sysmeta_headers_1)
self._assertNotInHeaders(resp, self.new_sysmeta_headers)
self._assertInHeaders(resp, self.changed_meta_headers)
self._assertInHeaders(resp, self.new_meta_headers)
self._assertNotInHeaders(resp, self.bad_headers)
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.changed_sysmeta_headers)
hdrs.update(self.new_sysmeta_headers)
hdrs.update(self.bad_headers)
req = Request.blank(path, environ=env, headers=hdrs, body='x')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
req = Request.blank(path, environ={})
resp = req.get_response(self.app)
self._assertStatus(resp, 200)
self._assertInHeaders(resp, self.changed_sysmeta_headers)
self._assertInHeaders(resp, self.new_sysmeta_headers)
self._assertNotInHeaders(resp, self.original_sysmeta_headers_2)
def test_sysmeta_not_updated_by_POST(self):
self.app.object_post_as_copy = False
self._test_sysmeta_not_updated_by_POST()
def test_sysmeta_not_updated_by_POST_as_copy(self):
self.app.object_post_as_copy = True
self._test_sysmeta_not_updated_by_POST()
def test_sysmeta_updated_by_COPY(self):
# check sysmeta is updated by a COPY in same way as user meta
path = '/v1/a/c/o'
dest = '/c/o2'
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.original_sysmeta_headers_1)
hdrs.update(self.original_sysmeta_headers_2)
hdrs.update(self.original_meta_headers_1)
hdrs.update(self.original_meta_headers_2)
req = Request.blank(path, environ=env, headers=hdrs, body='x')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
env = {'REQUEST_METHOD': 'COPY'}
hdrs = dict(self.changed_sysmeta_headers)
hdrs.update(self.new_sysmeta_headers)
hdrs.update(self.changed_meta_headers)
hdrs.update(self.new_meta_headers)
hdrs.update(self.bad_headers)
hdrs.update({'Destination': dest})
req = Request.blank(path, environ=env, headers=hdrs)
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
self._assertInHeaders(resp, self.changed_sysmeta_headers)
self._assertInHeaders(resp, self.new_sysmeta_headers)
self._assertInHeaders(resp, self.original_sysmeta_headers_2)
self._assertInHeaders(resp, self.changed_meta_headers)
self._assertInHeaders(resp, self.new_meta_headers)
self._assertInHeaders(resp, self.original_meta_headers_2)
self._assertNotInHeaders(resp, self.bad_headers)
req = Request.blank('/v1/a/c/o2', environ={})
resp = req.get_response(self.app)
self._assertStatus(resp, 200)
self._assertInHeaders(resp, self.changed_sysmeta_headers)
self._assertInHeaders(resp, self.new_sysmeta_headers)
self._assertInHeaders(resp, self.original_sysmeta_headers_2)
self._assertInHeaders(resp, self.changed_meta_headers)
self._assertInHeaders(resp, self.new_meta_headers)
self._assertInHeaders(resp, self.original_meta_headers_2)
self._assertNotInHeaders(resp, self.bad_headers)
def test_sysmeta_updated_by_COPY_from(self):
# check sysmeta is updated by a COPY in same way as user meta
path = '/v1/a/c/o'
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.original_sysmeta_headers_1)
hdrs.update(self.original_sysmeta_headers_2)
hdrs.update(self.original_meta_headers_1)
hdrs.update(self.original_meta_headers_2)
req = Request.blank(path, environ=env, headers=hdrs, body='x')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
env = {'REQUEST_METHOD': 'PUT'}
hdrs = dict(self.changed_sysmeta_headers)
hdrs.update(self.new_sysmeta_headers)
hdrs.update(self.changed_meta_headers)
hdrs.update(self.new_meta_headers)
hdrs.update(self.bad_headers)
hdrs.update({'X-Copy-From': '/c/o'})
req = Request.blank('/v1/a/c/o2', environ=env, headers=hdrs, body='')
resp = req.get_response(self.app)
self._assertStatus(resp, 201)
self._assertInHeaders(resp, self.changed_sysmeta_headers)
self._assertInHeaders(resp, self.new_sysmeta_headers)
self._assertInHeaders(resp, self.original_sysmeta_headers_2)
self._assertInHeaders(resp, self.changed_meta_headers)
self._assertInHeaders(resp, self.new_meta_headers)
self._assertInHeaders(resp, self.original_meta_headers_2)
self._assertNotInHeaders(resp, self.bad_headers)
req = Request.blank('/v1/a/c/o2', environ={})
resp = req.get_response(self.app)
self._assertStatus(resp, 200)
self._assertInHeaders(resp, self.changed_sysmeta_headers)
self._assertInHeaders(resp, self.new_sysmeta_headers)
self._assertInHeaders(resp, self.original_sysmeta_headers_2)
self._assertInHeaders(resp, self.changed_meta_headers)
self._assertInHeaders(resp, self.new_meta_headers)
self._assertInHeaders(resp, self.original_meta_headers_2)
self._assertNotInHeaders(resp, self.bad_headers)
| bkolli/swift | test/unit/proxy/test_sysmeta.py | Python | apache-2.0 | 16,039 | 0 |
from __future__ import print_function
import datetime
import sys
import re
import os
import json
import urlparse
import fnmatch
import functools
import mock
import lxml.html
import requests
from requests.adapters import HTTPAdapter
from configman import Namespace
from configman.converters import class_converter, str_to_list
from crontabber.base import BaseCronApp
from crontabber.mixins import (
as_backfill_cron_app,
with_postgres_transactions
)
from socorro.cron import buildutil
from socorro.app.socorro_app import App, main
from socorro.lib.datetimeutil import string_to_datetime
def memoize_download(fun):
cache = {}
@functools.wraps(fun)
def inner(self, url):
if url not in cache:
cache[url] = fun(self, url)
return cache[url]
return inner
class ScrapersMixin(object):
"""
Mixin that requires to be able to call `self.download(some_url)`
and `self.skip_json_file(json_url)`.
"""
def get_links(self, url, starts_with=None, ends_with=None):
results = []
content = self.download(url)
if not content:
return []
if not (starts_with or ends_with):
raise NotImplementedError(
'get_links requires either `startswith` or `endswith`'
)
html = lxml.html.document_fromstring(content)
path = urlparse.urlparse(url).path
def url_match(link):
# The link might be something like "/pub/mobile/nightly/"
# but we're looking for a path that starts with "nightly".
# So first we need to remove what's part of the base URL
# to make a fair comparison.
if starts_with is not None:
# If the current URL is http://example.com/some/dir/
# and the link is /some/dir/mypage/ and the thing
# we're looking for is "myp" then this should be true
if link.startswith(path):
link = link.replace(path, '')
return link.startswith(starts_with)
elif ends_with:
return link.endswith(ends_with)
return False
for _, _, link, _ in html.iterlinks():
if url_match(link):
results.append(urlparse.urljoin(url, link))
return results
def parse_build_json_file(self, url, nightly=False):
content = self.download(url)
if content:
try:
kvpairs = json.loads(content)
kvpairs['repository'] = kvpairs.get('moz_source_repo')
if kvpairs['repository']:
kvpairs['repository'] = kvpairs['repository'].split(
'/', -1
)[-1]
kvpairs['build_type'] = kvpairs.get('moz_update_channel')
kvpairs['buildID'] = kvpairs.get('buildid')
# bug 1065071 - ignore JSON files that have keys with
# missing values.
if None in kvpairs.values():
self.config.logger.warning(
'warning, unsupported JSON file: %s', url
)
return kvpairs
# bug 963431 - it is valid to have an empty file
# due to a quirk in our build system
except ValueError:
self.config.logger.warning(
'Unable to JSON parse content %r',
content,
exc_info=True
)
def parse_info_file(self, url, nightly=False):
self.config.logger.debug('Opening %s', url)
content = self.download(url)
results = {}
bad_lines = []
if not content:
return results, bad_lines
contents = content.splitlines()
if nightly:
results = {'buildID': contents[0], 'rev': contents[1]}
if len(contents) > 2:
results['altrev'] = contents[2]
elif contents:
results = {}
for line in contents:
if line == '':
continue
try:
key, value = line.split('=')
results[key] = value
except ValueError:
bad_lines.append(line)
return results, bad_lines
def get_json_release(self, candidate_url, dirname):
version = dirname.split('-candidates')[0]
builds = self.get_links(candidate_url, starts_with='build')
if not builds:
return
latest_build = builds.pop()
version_build = os.path.basename(os.path.normpath(latest_build))
possible_platforms = (
'linux', 'mac', 'win', 'debug', # for Firefox
'android-api-16', 'android-api-15', 'android-x86', # for mobile
)
for platform in possible_platforms:
platform_urls = self.get_links(
latest_build,
starts_with=platform
)
for platform_url in platform_urls:
# We're only interested in going into depper directories.
# Inside a directory like 'firefox/candidates/45.3.0esr-candidates/build1/'
# there is likely to be regular files that match the
# 'possible_platforms' above. Skip those that aren't directories.
# This means we're much less likely to open URLs like
# '...45.3.0esr-candidates/build1/en-US/' which'll 404
if not platform_url.endswith('/'):
continue
platform_local_url = urlparse.urljoin(platform_url, 'en-US/')
json_files = self.get_links(
platform_local_url,
ends_with='.json'
)
for json_url in json_files:
if self.skip_json_file(json_url):
continue
kvpairs = self.parse_build_json_file(json_url)
if not kvpairs:
continue
kvpairs['version_build'] = version_build
yield (platform, version, kvpairs)
def get_json_nightly(self, nightly_url, dirname):
json_files = self.get_links(nightly_url, ends_with='.json')
for url in json_files:
if self.skip_json_file(url):
continue
basename = os.path.basename(url)
if '.en-US.' in url:
pv, platform = re.sub('\.json$', '', basename).split('.en-US.')
elif '.multi.' in url:
pv, platform = re.sub('\.json$', '', basename).split('.multi.')
else:
continue
version = pv.split('-')[-1]
repository = []
for field in dirname.split('-'):
# Skip until something is not a digit and once we've
# appended at least one, keep adding.
if not field.isdigit() or repository:
repository.append(field)
repository = '-'.join(repository).strip('/')
kvpairs = self.parse_build_json_file(url, nightly=True)
yield (platform, repository, version, kvpairs)
def get_release(self, candidate_url):
builds = self.get_links(candidate_url, starts_with='build')
if not builds:
self.config.logger.info('No build dirs in %s', candidate_url)
return
latest_build = builds.pop()
version_build = os.path.basename(os.path.normpath(latest_build))
info_files = self.get_links(latest_build, ends_with='_info.txt')
for info_url in info_files:
kvpairs, bad_lines = self.parse_info_file(info_url)
# os.path.basename works on URL looking things too
# and not just file path
platform = os.path.basename(info_url).split('_info.txt')[0]
# suppose the `info_url` is something like
# "https://archive.moz.../40.0.3-candidates/..11_info.txt"
# then look for the "40.0.3-candidates" part and remove
# "-candidates" part.
version, = [
x.split('-candidates')[0]
for x in urlparse.urlparse(info_url).path.split('/')
if x.endswith('-candidates')
]
kvpairs['version_build'] = version_build
yield (platform, version, kvpairs, bad_lines)
@with_postgres_transactions()
@as_backfill_cron_app
class FTPScraperCronApp(BaseCronApp, ScrapersMixin):
app_name = 'ftpscraper'
app_description = 'FTP Scraper'
app_version = '0.1'
required_config = Namespace()
required_config.add_option(
'products',
default='firefox,mobile,thunderbird,seamonkey,devedition',
from_string_converter=lambda line: tuple(
[x.strip() for x in line.split(',') if x.strip()]
),
doc='a comma-delimited list of URIs for each product')
required_config.add_option(
'base_url',
default='https://archive.mozilla.org/pub/',
doc='The base url to use for fetching builds')
required_config.add_option(
'dry_run',
default=False,
doc='Print instead of storing builds')
required_config.add_option(
'retries',
default=5,
doc='Number of times the requests sessions should retry')
required_config.add_option(
'read_timeout',
default=10, # seconds
doc='Number of seconds wait for a full read')
required_config.add_option(
'connect_timeout',
default=3.5, # seconds, ideally something slightly larger than 3
doc='Number of seconds wait for a connection')
required_config.add_option(
'json_files_to_ignore',
default='*.mozinfo.json, *test_packages.json',
from_string_converter=str_to_list
)
required_config.add_option(
'cachedir',
default='',
doc=(
'Directory to cache .json files in. Empty string if you want to '
'disable caching'
)
)
def __init__(self, *args, **kwargs):
super(FTPScraperCronApp, self).__init__(*args, **kwargs)
self.session = requests.Session()
if urlparse.urlparse(self.config.base_url).scheme == 'https':
mount = 'https://'
else:
mount = 'http://'
self.session.mount(
mount,
HTTPAdapter(max_retries=self.config.retries)
)
self.cache_hits = 0
self.cache_misses = 0
def url_to_filename(self, url):
fn = re.sub('\W', '_', url)
fn = re.sub('__', '_', fn)
return fn
@memoize_download
def download(self, url):
is_caching = False
fn = None
if url.endswith('.json') and self.config.cachedir:
is_caching = True
fn = os.path.join(self.config.cachedir, self.url_to_filename(url))
if not os.path.isdir(os.path.dirname(fn)):
os.makedirs(os.path.dirname(fn))
if os.path.exists(fn):
self.cache_hits += 1
with open(fn, 'r') as fp:
return fp.read()
self.cache_misses += 1
response = self.session.get(
url,
timeout=(self.config.connect_timeout, self.config.read_timeout)
)
if response.status_code == 404:
self.config.logger.warning(
'404 when downloading %s', url
)
# Legacy. Return None on any 404 error.
return
assert response.status_code == 200, response.status_code
data = response.content
if is_caching:
with open(fn, 'w') as fp:
fp.write(data)
return data
def skip_json_file(self, json_url):
basename = os.path.basename(json_url)
for file_pattern in self.config.json_files_to_ignore:
if fnmatch.fnmatch(basename, file_pattern):
return True
return False
def run(self, date):
# record_associations
for product_name in self.config.products:
self.config.logger.debug(
'scraping %s releases for date %s',
product_name,
date
)
self.database_transaction_executor(
self._scrape_json_releases_and_nightlies,
product_name,
date
)
if self.config.cachedir:
total = float(self.cache_hits + self.cache_misses)
self.config.logger.debug('Cache: hits: %d (%2.2f%%) misses: %d (%2.2f%%)' % (
self.cache_hits,
self.cache_hits / total * 100,
self.cache_misses,
self.cache_misses / total * 100,
))
def _scrape_json_releases_and_nightlies(
self,
connection,
product_name,
date
):
self.scrape_json_releases(connection, product_name)
self.scrape_json_nightlies(connection, product_name, date)
def _insert_build(self, cursor, *args, **kwargs):
self.config.logger.debug('adding %s', args)
if self.config.dry_run:
print('INSERT BUILD')
print(args)
print(kwargs)
else:
buildutil.insert_build(cursor, *args, **kwargs)
def _is_final_beta(self, version):
# If this is a XX.0 version in the release channel,
# return True otherwise, False
# Make a special exception for the out-of-cycle 38.0.5
return version.endswith('.0') or version == '38.0.5'
def scrape_json_releases(self, connection, product_name):
prod_url = urlparse.urljoin(self.config.base_url, product_name + '/')
logger = self.config.logger
cursor = connection.cursor()
for directory in ('nightly', 'candidates'):
try:
url, = self.get_links(prod_url, starts_with=directory)
except (IndexError, ValueError):
logger.debug('Dir %s not found for %s',
directory, product_name)
continue
releases = self.get_links(url, ends_with='-candidates/')
for release in releases:
dirname = release.replace(url, '')
if dirname.endswith('/'):
dirname = dirname[:-1]
for info in self.get_json_release(release, dirname):
platform, version, kvpairs = info
build_type = 'release'
beta_number = None
repository = kvpairs['repository']
if 'b' in version:
build_type = 'beta'
version, beta_number = version.split('b')
if kvpairs.get('buildID'):
build_id = kvpairs['buildID']
version_build = kvpairs['version_build']
self._insert_build(
cursor,
product_name,
version,
platform,
build_id,
build_type,
beta_number,
repository,
version_build,
ignore_duplicates=True
)
if (
self._is_final_beta(version) and
build_type == 'release' and
version > '26.0' and
kvpairs.get('buildID')
):
logger.debug('adding final beta version %s', version)
repository = 'mozilla-beta'
build_id = kvpairs['buildID']
build_type = 'beta'
version_build = kvpairs['version_build']
# just force this to 99 until
# we deal with version_build properly
beta_number = 99
self._insert_build(
cursor,
product_name,
version,
platform,
build_id,
build_type,
beta_number,
repository,
version_build,
ignore_duplicates=True
)
def scrape_json_nightlies(self, connection, product_name, date):
directories = (
product_name,
'nightly',
date.strftime('%Y'),
date.strftime('%m'),
)
nightly_url = self.config.base_url
for part in directories:
nightly_url = urlparse.urljoin(
nightly_url, part + '/'
)
cursor = connection.cursor()
dir_prefix = date.strftime('%Y-%m-%d')
nightlies = self.get_links(nightly_url, starts_with=dir_prefix)
for nightly in nightlies:
dirname = nightly.replace(nightly_url, '')
if dirname.endswith('/'):
dirname = dirname[:-1]
for info in self.get_json_nightly(nightly, dirname):
platform, repository, version, kvpairs = info
build_type = 'nightly'
if version.endswith('a2'):
build_type = 'aurora'
if kvpairs.get('buildID'):
build_id = kvpairs['buildID']
self._insert_build(
cursor,
product_name,
version,
platform,
build_id,
build_type,
kvpairs.get('beta_number', None),
repository,
ignore_duplicates=True
)
class FTPScraperCronAppDryRunner(App): # pragma: no cover
"""This is a utility class that makes it easy to run the scraping
and ALWAYS do so in a "dry run" fashion such that stuff is never
stored in the database but instead found releases are just printed
out stdout.
To run it, simply execute this file:
$ python socorro/cron/jobs/ftpscraper.py
If you want to override what date to run it for (by default it's
"now") you simply use this format:
$ python socorro/cron/jobs/ftpscraper.py --date=2015-10-23
By default it runs for every, default configured, product
(see the configuration set up in the FTPScraperCronApp above). You
can override that like this:
$ python socorro/cron/jobs/ftpscraper.py --product=mobile,thunderbird
"""
required_config = Namespace()
required_config.add_option(
'date',
default=datetime.datetime.utcnow().date(),
doc='Date to run for',
from_string_converter=string_to_datetime
)
required_config.add_option(
'crontabber_job_class',
default='socorro.cron.jobs.ftpscraper.FTPScraperCronApp',
doc='bla',
from_string_converter=class_converter,
)
@staticmethod
def get_application_defaults():
return {
'database.database_class': mock.MagicMock()
}
def __init__(self, config):
self.config = config
self.config.dry_run = True
self.ftpscraper = config.crontabber_job_class(config, {})
def main(self):
assert self.config.dry_run
self.ftpscraper.run(self.config.date)
if __name__ == '__main__': # pragma: no cover
sys.exit(main(FTPScraperCronAppDryRunner))
| Tayamarn/socorro | socorro/cron/jobs/ftpscraper.py | Python | mpl-2.0 | 19,943 | 0.000301 |
import json
from mock import patch
from django.core.urlresolvers import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
from student.tests.factories import UserFactory
from biz.djangoapps.ga_invitation.tests.test_views import BizContractTestBase
from biz.djangoapps.ga_manager.tests.factories import ManagerFactory
from biz.djangoapps.gx_org_group.models import Group, Right, Parent, Child
from biz.djangoapps.gx_org_group.tests.factories import GroupUtil, GroupFactory
from biz.djangoapps.gx_member.tests.factories import MemberFactory
from biz.djangoapps.gx_member.models import Member
class OrgGroupListViewTest(BizContractTestBase):
"""
Test Class for gx_org_group
"""
def setUp(self):
super(BizContractTestBase, self).setUp()
self.user_gacco_staff = UserFactory(username='gacco_staff', is_staff=True, is_superuser=True)
self.user_tac_aggregator = UserFactory(username='tac_aggregator')
self.user_a_director = UserFactory(username='a_director')
self.user_manager1 = UserFactory(username='manager1')
self.user_manager2 = UserFactory(username='manager2')
self.org_a = self._create_organization(org_name='org_a', org_code='a', creator_org=self.gacco_organization)
self.manager_platformer = ManagerFactory.create(org=self.gacco_organization, user=self.user_gacco_staff,
permissions=[self.platformer_permission])
self.manager_manager1 = ManagerFactory.create(org=self.gacco_organization, user=self.user_manager1,
permissions=[self.manager_permission])
self.manager_manager2 = ManagerFactory.create(org=self.gacco_organization, user=self.user_manager2,
permissions=[self.manager_permission])
def _index_view(self):
"""
Returns URL of group list as index
:return:
"""
return reverse('biz:group:group_list')
def _delete_group(self):
"""
Returns URL of delete group
:return:
"""
return reverse('biz:group:delete_group')
def _upload_csv(self):
"""
Returns URL of file upload API
:return:
"""
return reverse('biz:group:upload_csv')
def _download_csv(self):
"""
Returns URL of group list download API
:return:
"""
return reverse('biz:group:download_csv')
def _download_headers_csv(self):
"""
Returns URL of group list download API
:return:
"""
return reverse('biz:group:download_headers_csv')
def _detail_view(self, selected_group_id):
"""
Returns URL of detail of group known access right settings
:param selected_group_id:
:return:
"""
return reverse('biz:group:detail', kwargs={'selected_group_id': selected_group_id})
def _accessible_user_list(self):
"""
Returns URL of accessible user list API
:return:
"""
return reverse('biz:group:accessible_user_list')
def _accessible_parent_list(self):
"""
Returns URL of parent group accessible user list API
:return:
"""
return reverse('biz:group:accessible_parent_list')
def _grant_right(self):
"""
Returns URL of access right grant API
:return:
"""
return reverse('biz:group:grant_right')
@property
def _csv_header(self):
return ",".join([
'Organization Group Code',
'Organization Group Name',
'Parent Organization Code',
'Parent Organization Name',
'notes'
]) + '\r\n'
@property
def _csv_data_first(self):
csv_data = "G01,G1,,,\r\n" \
"G01-01,G1-1,G01,G1,\r\n" \
"G01-01-01,G1-1-1,G01-01,G1-1,\r\n" \
"G01-01-02,G1-1-2,G01-01,G1-1,\r\n" \
"G01-02,G1-2,G01,G1,\r\n" \
"G02,G2,,,\r\n" \
"G02-01,G2-1,G02,G2,\r\n" \
"G02-01-01,G2-1-1,G02-01,G2-1,\r\n" \
"G02-01-02,G2-1-2,G02-01,G2-1,\r\n" \
"G02-02,G2-2,G02,G2,\r\n"
return csv_data
@property
def _csv_data_cir_err_master(self):
csv_data = "1000,group1,,,\r\n" \
"1000aaa,group3,1000,group1,\r\n" \
"1001,group4,,,\r\n" \
"1002,group3,1000,group1,\r\n" \
"1003,group3,1000,group1,\r\n" \
"1005,group5,,,\r\n" \
"1006,group6,,,\r\n" \
"1007,group7,1009,group9,\r\n" \
"1008,group8,,,\r\n" \
"1009,group9,,,\r\n" \
"aaaaaaaaabbbbbbbbbcc,group3,1000,group1,\r\n"
return csv_data
@property
def _csv_data_cir_err_tran(self):
csv_data = "1000,group6,1000,,\r\n"
return csv_data
def _test_upload_cir_err_master(self):
csv_header = self._csv_header
csv_data = self._csv_data_cir_err_master
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(200, response.status_code)
def _test_upload_first(self):
csv_header = self._csv_header
csv_data = self._csv_data_first
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(200, response.status_code)
self._test_group('G01', 'G1', '', '', '', 0, [],['G01-01', 'G01-02', 'G01-01-01', 'G01-01-02'])
self._test_group('G01-01', 'G1-1', 'G01', 'G1', '', 1, ['G01'], ['G01-01-01', 'G01-01-02'])
self._test_group('G01-01-01', 'G1-1-1', 'G01-01', 'G1-1', '', 2, ['G01', 'G01-01'], [])
self._test_group('G01-01-02', 'G1-1-2', 'G01-01', 'G1-1', '', 2, ['G01', 'G01-01'], [])
self._test_group('G01-02', 'G1-2', 'G01', 'G1', '', 1, ['G01'], [])
self._test_group('G02', 'G2','', '', '', 0, [], ['G02-01', 'G02-02', 'G02-01-01', 'G02-01-02'])
self._test_group('G02-01', 'G2-1', 'G02', 'G2', '', 1, ['G02'], ['G02-01-01', 'G02-01-02'])
self._test_group('G02-01-01', 'G2-1-1', 'G02-01', 'G2-1', '', 2, ['G02', 'G02-01'], [])
self._test_group('G02-01-02', 'G2-1-2', 'G02-01', 'G2-1', '', 2, ['G02', 'G02-01'], [])
self._test_group('G02-02', 'G2-2', 'G02', 'G2', '', 1, ['G02'],[])
def _test_upload_second(self):
csv_header = self._csv_header
csv_data = "G02,G02underG1,G01,G1,moved to under G1\r\n"
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(200, response.status_code)
self._test_group('G02', 'G02underG1', 'G01', 'G1', 'moved to under G1', 1, ['G01'], ['G02-01', 'G02-02', 'G02-01-01', 'G02-01-02'])
def _test_upload_third(self):
csv_header = self._csv_header
csv_data = "G03,G3,G01,G1,connect to under G1\r\n" \
"G03-01,G3-1,G03,G3,\r\n" \
"G03-01-01,G3-1-1,G03-01,G3-1,\r\n" \
"G03-01-02,G3-1-2,G03-01,G3-1,\r\n" \
"G03-02,G3-2,G03,G3,\r\n"
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(200, response.status_code)
self._test_group('G03', 'G3', 'G01', 'G1', 'connect to under G1', 1, ['G01'], ['G03-01', 'G03-02', 'G03-01-01', 'G03-01-02'])
self._test_group('G03-01', 'G3-1', 'G03', 'G3', '', 2, ['G01', 'G03'], ['G03-01-01', 'G03-01-02'])
self._test_group('G03-01-01', 'G3-1-1', 'G03-01', 'G3-1', '', 3, ['G01', 'G03', 'G03-01'], [])
self._test_group('G03-01-02', 'G3-1-2', 'G03-01', 'G3-1', '', 3, ['G01', 'G03', 'G03-01'], [])
self._test_group('G03-02', 'G3-2', 'G03', 'G3', '', 2, ['G01', 'G03'], [])
def _test_group(self, group_code, group_name, parent_code, parent_name, notes, level_no, parents_codes, children_codes):
group = Group.objects.get(group_code=group_code)
self.assertEqual(group_name, group.group_name) # Checking Group Name
self.assertEqual(notes, group.notes) # Checking Note
self.assertEqual(level_no, group.level_no) # Checking Level
if parent_code == '':
self.assertEqual(0, group.parent_id) # Checking Parent Id is not set
else:
parent = Group.objects.get(id=group.parent_id)
self.assertEqual(parent_code, parent.group_code) # Checking Parent Code
self.assertEqual(parent_name, parent.group_name) # Checking Parent Name
self._test_parents_data(group, parents_codes) # Checking Parent Table
self._test_children_data(group, children_codes) # Checking Children Table
def _test_parents_data(self, group, parents_codes):
parents_data = Parent.objects.get(group_id=group.id)
if len(parents_codes) > 0:
groups = [Group.objects.get(id=int(group_id)) for group_id in self._split(parents_data.path)]
groups_codes = set([group.group_code for group in groups])
self.assertEqual(groups_codes, set(parents_codes)) # Checking Parent Codes
else:
self.assertEqual('', parents_data.path) # Checking Path is not set
def _test_children_data(self, group, children_codes):
children_data = Child.objects.get(group_id=group.id)
if len(children_codes) > 0:
groups = [Group.objects.get(id=int(group_id)) for group_id in self._split(children_data.list)]
groups_codes = set([group.group_code for group in groups])
self.assertEqual(groups_codes, set(children_codes)) # Checking Codes is not set
else:
self.assertEqual('', children_data.list) # Checking List is not set
def _split(self, string):
return [int(group_id) for group_id in string.split(',')]
def _test_grant_right(self, group_id, username):
"""
Tests grant access right API
:return:
"""
# add a grant user
user_str = username
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': group_id, 'action': 'allow', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
def test_index(self):
"""
Tests group list page
:return:
"""
self.setup_user()
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.get(self._index_view())
self.assertEqual(200, response.status_code)
def test_index_exist_data(self):
"""
Tests group list tree data
:return:
"""
self.setup_user()
self._test_upload_first()
parent_group_id = Group.objects.get(group_code="G01").id
child_group_id = Group.objects.get(group_code="G01-01").id
# test index
with self.skip_check_course_selection(current_organization=self.gacco_organization):
path = self._index_view()
response = self.client.get(path)
self.assertEqual(200, response.status_code)
self.assertEqual(parent_group_id, 9)
self.assertEqual(child_group_id, 3)
self._test_grant_right(parent_group_id, self.manager_manager1.user.username)
parent_group = Group.objects.get(group_code="G01")
right = Right.objects.filter(group_id=parent_group_id)
active_user = UserFactory.create()
self._create_member(
org=self.org_a, group=parent_group, user=active_user,
code="code_1", is_active=True, is_delete=False
)
member = Member.objects.filter(group_id=parent_group_id)
group = Group.objects.all()
with self.skip_check_course_selection(current_organization=self.gacco_organization):
path = self._index_view()
response = self.client.get(path)
self.assertEqual(200, response.status_code)
self.assertEqual(group.count(), 10)
self.assertEqual(right.count(), 1)
self.assertEqual(member.count(), 1)
def _create_member(
self, org, group, user, code, is_active, is_delete,
org1='', org2='', org3='', org4='', org5='', org6='', org7='', org8='', org9='', org10='',
item1='', item2='', item3='', item4='', item5='', item6='', item7='', item8='', item9='', item10=''):
return MemberFactory.create(
org=org,
group=group,
user=user,
code=code,
created_by=user,
creator_org=org,
updated_by=user,
updated_org=org,
is_active=is_active,
is_delete=is_delete,
org1=org1, org2=org2, org3=org3, org4=org4, org5=org5, org6=org6, org7=org7, org8=org8, org9=org9,
org10=org10,
item1=item1, item2=item2, item3=item3, item4=item4, item5=item5, item6=item6, item7=item7, item8=item8,
item9=item9, item10=item10
)
def test_group_delete(self):
self.setup_user()
self._test_upload_first()
# exist manager
group = Group.objects.all()
group_id = Group.objects.filter(group_code="G01-01-01").first().id
self._test_grant_right(group_id, self.manager_manager1.user.username)
right = Right.objects.filter(group_id=group_id)
self.assertEqual(group.count(), 10)
self.assertEqual(right.count(), 1)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
self._index_view()
response = self.client.post(self._delete_group(),
{'num': group_id, 'belong': 1, 'grouping': [str(group_id)], 'group_name': 'G1-1-1'})
group = Group.objects.all()
right = Right.objects.filter(group_id=group_id)
self.assertEqual(200, response.status_code)
self.assertEqual(group.count(), 9)
self.assertEqual(right.count(), 0)
# exist member
group_id_child1 = Group.objects.filter(group_code="G02-01-01").first().id
group_id_child2 = Group.objects.filter(group_code="G02-01-02").first().id
group_id = Group.objects.filter(group_code="G02-01").first().id
current_group = Group.objects.get(group_code="G02-01-01")
active_user = UserFactory.create()
self._create_member(
org=self.org_a, group=current_group, user=active_user,
code="code_1", is_active=True, is_delete=False
)
member = Member.objects.all()
self.assertEqual(member.count(), 1)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
self._index_view()
response = self.client.post(self._delete_group(),
{'num': group_id, 'belong': 1, 'grouping': [str(group_id) + ',' + str(group_id_child1) + ',' + str(group_id_child2)], 'group_name': 'G2-1'})
member = Member.objects.filter(group_id=group_id)
group = Group.objects.all()
self.assertEqual(200, response.status_code)
self.assertEqual(member.count(), 0)
self.assertEqual(group.count(), 6)
# exist member and manager
group_id = Group.objects.filter(group_code="G02").first().id
self._test_grant_right(group_id, self.manager_manager1.user.username)
right = Right.objects.filter(group_id=group_id)
active_user = UserFactory.create()
current_group = Group.objects.get(group_code="G02")
self._create_member(
org=self.org_a, group=current_group, user=active_user,
code="code_2", is_active=True, is_delete=False
)
member = Member.objects.filter(group_id=group_id)
self.assertEqual(right.count(), 1)
self.assertEqual(member.count(), 1)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
self._index_view()
response = self.client.post(self._delete_group(),
{'num': group_id, 'belong': 1, 'grouping': [str(group_id)], 'group_name': 'G2'})
group = Group.objects.all()
member = Member.objects.filter(group_id=group_id)
right = Right.objects.filter(group_id=group_id)
self.assertEqual(200, response.status_code)
self.assertEqual(group.count(), 5)
self.assertEqual(member.count(), 0)
self.assertEqual(right.count(), 0)
# Not exist member and manager
group_id = Group.objects.filter(group_code="G01-01-02").first().id
with self.skip_check_course_selection(current_organization=self.gacco_organization):
self._index_view()
response = self.client.post(self._delete_group(),
{'num': group_id, 'belong': 0, 'grouping': [str(group_id)], 'group_name': 'G1-1-2'})
self.assertEqual(200, response.status_code)
self.assertEqual(group.count(), 4)
def test_fail_group_delete(self):
# Not id
self.setup_user()
self._test_upload_first()
group = Group.objects.all()
self.assertEqual(group.count(), 10)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
self._index_view()
response = self.client.post(self._delete_group(),
{'num': 10, 'belong': 1, 'grouping': ['abc'], 'group_name': 'G1'})
self.assertEqual(400, response.status_code)
group = Group.objects.all()
self.assertEqual(group.count(), 10)
def test_upload_fail(self):
"""
Tests upload group list API fail
:return:
"""
# init
csv_header = self._csv_header
self.setup_user()
# test auth error
csv_content = ""
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
# test unicode error
csv_content = self._csv_header
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization), patch(
'biz.djangoapps.gx_org_group.views.get_sjis_csv',
side_effect=UnicodeDecodeError('utf-8', 'arg', 1, 1, 'arg')):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
data = json.loads(response.content)
self.assertEqual(data['errors'], [u'invalid header or file type'])
# invalid header format (_exception_001)
csv_content = "group_code,group_name,xxx_parent_code,parent_name,notes\r\n".encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
# wrong number of columns (_exception_002)
csv_data = "G01-01,G1-1,G01,,,,,G1,\r\n"
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
# invalid parent code (_exception_003)
csv_data = "G01-01,G1-1,XXX,G1,\r\n"
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
# duplicated group code (_exception_004)
csv_data = "G01-01,G1-1,,,\r\n" + "G01-01,G1-1,,,\r\n"
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
# circular ref (_exception_011) (1) (Parent Code in import file)
csv_data = "G02-02,G2-2,G02-02,G2-2,circular ref1\r\n"
csv_content = (csv_header + csv_data).encode('UTF-16')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
# circular ref (_exception_011) (2) (Parent Code in Group model)
self._test_upload_cir_err_master() # load master data
csv_data = self._csv_data_cir_err_tran
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
# max length over (_exception_021)
csv_data = "ABCDEFGHIJ12345678901,MAX_CODE,group code error\r\n"
csv_content = (csv_header + csv_data).encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(400, response.status_code)
def test_upload_success(self):
"""
Test upload group list API success
:return:
"""
# prepare
csv_header = self._csv_header
self.setup_user()
# import empty
csv_content = csv_header.encode('cp932')
upload_file = SimpleUploadedFile("org_group.csv", csv_content)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._upload_csv(), {'organization': '', 'org_group_csv': upload_file})
self.assertEqual(200, response.status_code)
# test upload data
self._test_upload_first()
self._test_upload_second()
self._test_upload_third()
def test_download_cp936_csv(self):
"""
Test download group list API
:return:
"""
self.setup_user()
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._download_csv())
self.assertEqual(200, response.status_code)
def test_download_utf16_csv(self):
"""
Test download group list API
:return:
"""
self.setup_user()
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._download_csv(), {'encode': 'true'})
self.assertEqual(200, response.status_code)
def test_download_headers_csv(self):
"""
Test download group list API
:return:
"""
self.setup_user()
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._download_headers_csv())
self.assertEqual(200, response.status_code)
def test_detail(self):
# init
self.setup_user()
self._test_upload_first()
parent_group_id = Group.objects.filter(group_code="G01").first().id
child_group_id = Group.objects.filter(group_code="G01-01").first().id
self._test_grant_right(parent_group_id, self.manager_manager1.user.username)
# test detail
with self.skip_check_course_selection(current_organization=self.gacco_organization):
path = self._detail_view(child_group_id)
response = self.client.get(path)
self.assertEqual(200, response.status_code)
def test_accessible_user_list(self):
"""
Test accessible user list page
:return:
"""
# init
self.setup_user()
self._test_upload_first()
group_id = Group.objects.filter(group_code="G01").first().id
# add a access right for manager
self._test_grant_right(group_id, self.manager_manager1.user.username)
# test accessible user list
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._accessible_user_list(), {'group_id': group_id})
self.assertEqual(200, response.status_code)
def test_accessible_parent_list(self):
"""
Tests accessible parent group user list page
:return:
"""
# init
self.setup_user()
self._test_upload_first()
parent_group_id = Group.objects.filter(group_code="G01").first().id
child_group_id = Group.objects.filter(group_code="G01-01").first().id
self._test_grant_right(parent_group_id, self.manager_manager1.user.username)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._accessible_parent_list(), {'group_id': child_group_id})
self.assertEqual(200, response.status_code)
def test_grant_right_fail(self):
"""
Tests grant access right API
:return:
"""
# init
self.setup_user()
self._test_upload_first()
parent_group_id = Group.objects.filter(group_code="G01").first().id
child_group_id = Group.objects.filter(group_code="G01-01").first().id
grandson_group_id = Group.objects.filter(group_code="G01-01-01").first().id
# unknown username (_exception_002)
user_str = 'unknown username'
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': parent_group_id, 'action': 'allow', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
# unknown email (_exception_001)
user_str = 'unknown_username@example.com'
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': parent_group_id, 'action': 'allow', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
# user is not belonging to manager (_exception_003)
user_str = 'test'
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': parent_group_id, 'action': 'allow', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
# a manager but does not have access right (_exception_004)
user_str = self.manager_platformer.user.username
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': parent_group_id, 'action': 'allow', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
# *PREPARE* add a access right for manager
self._test_grant_right(parent_group_id, self.manager_manager1.user.username)
# *TEST* duplicated manager (_exception_007)
user_str = self.manager_manager1.user.username
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': parent_group_id, 'action': 'allow', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
# *TEST* exists in parent group (_exception_005)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': child_group_id, 'action': 'allow', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
# *PREPARE* add a access right into grandson
user_str = self.manager_manager2.user.username
self._test_grant_right(grandson_group_id, user_str)
# *TEST* exists in child group (_exception_006)
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': child_group_id, 'action': 'allow', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
def test_grant_right_success(self):
"""
Tests grant access right API
:return:
"""
# init
self.setup_user()
self._test_upload_first()
parent_group_id = Group.objects.filter(group_code="G01").first().id
# add a access right for manager
self._test_grant_right(parent_group_id, self.manager_manager1.user.username)
def test_revoke_right(self):
"""
Tests revoke access right API
:return:
"""
# init
self.setup_user()
self._test_upload_first()
parent_group_id = Group.objects.filter(group_code="G01").first().id
group_id = parent_group_id
# add a access right for manager
grant_username = self.manager_manager1.user.username
self._test_grant_right(group_id, grant_username)
# unknown username
user_str = 'unknown'
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': group_id, 'action': 'revoke', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
# known username
user_str = 'test'
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': group_id, 'action': 'revoke', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
# revoke success
user_str = grant_username
with self.skip_check_course_selection(current_organization=self.gacco_organization):
response = self.client.post(self._grant_right(),
{'group_id': group_id, 'action': 'revoke', 'grant_user_str': user_str})
self.assertEqual(200, response.status_code)
def test_models(self):
"""
test model unicode string for Django Admin
:return:
"""
# init
self.setup_user()
gu = GroupUtil(self.org_a, self.user_a_director)
gu.import_data()
# test unicode name
group = Group.objects.get(group_code="G01")
self.assertEqual(u"G1", unicode(group))
# test unicode name
parent = Parent.objects.get(group=group)
self.assertEqual(u"G1", unicode(parent))
# test unicode name
child = Child.objects.get(group=group)
self.assertEqual(u"G1", unicode(child))
# test unicode name
self._test_grant_right(group.id, self.manager_manager1.user.username)
r = Right.objects.get(group=group)
self.assertEqual(unicode(self.manager_manager1.user.email), unicode(r))
# test grant right
gu.grant_right(group, self.manager_manager1.user)
| nttks/edx-platform | biz/djangoapps/gx_org_group/tests/test_views.py | Python | agpl-3.0 | 34,785 | 0.003823 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import serial
import time
import serial.tools.list_ports
#import json
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib import style
from matplotlib.gridspec import GridSpec
#from mpl_toolkits.mplot3d import Axes3D
#import threading
import numpy as np
import datetime
import os
import sys
from scipy.interpolate import splrep, splev
from termcolor import colored, cprint
TIEMPO_PRUEBA = 5 ##TIEMPO DE LA PRUEBA EN SEGUNDOS
DELAY = 1 ## DELAY DE LA PRUEBA EN MUESTRAS, PARA QUITAR ARTEFACTOS DE INICIO
SUAVIZADO = 10 ## SUAVIZADO DE LAS CURVAS
MARKER_SIZE = 5 ## TAMAÑO DE LA MARCA
BAR_SIZE = 0.4 ##ANCHO DE LAS BARRAS
##RAW
vx0 = [] #Tiempo
yaw0 = [] #YAW
pitch0 = [] #PITCH
roll0 = [] #ROLL
## SUAVIZADO
s_yaw0 = []
s_pitch0 = []
s_roll0 = []
## PUNTOS MAXIMOS
yaw0_max = []
pitch0_max = []
roll0_max = []
vx_yaw0 = []
vx_pitch0 = []
vx_roll0 = []
##HERTZ
hz_pitch0 = []
hz_roll0 = []
hz_yaw0 = []
##CONDICIONES
cond1 = False
cond2 = False
cond3 = False
cond4 = False
state_test = 1
state_loop = 1
###############
##ARCHIVOS
name_folder = "none"
##MAIN
def main():
profilePatient()
ok = True
while (ok == True):
loop()
##LOOP
def loop():
global state_loop
if (state_loop == 1):
ardu()
reset()
selectTest()
#smooth(SUAVIZADO)
#buscar()
#hertz_state()
graph()
borrarDatos()
if (state_loop == 2):
logo()
estado = input("Nuevo usuario? y/n: ")
if (estado == 'y'):
limpiar()
reset()
state_loop = 1
main()
else:
limpiar()
print(Salir)
exit()
if(state_loop == 3):
ardu()
reset()
selectTest()
graph()
borrarDatos()
##INGRESO DE DATOS DEL PACIENTE
global name_folder, TIEMPO_PRUEBA
logo()
number_input =''
try:
TIEMPO_PRUEBA= int(eval(input("Tiempo de Evaluación en Segundos [20] : ")) or 20)
except:
print("solo puede escribir numeros, vuelva a intentarlo")
time.sleep(1)
profilePatient()
while number_input == '':
number_input=eval(input("Id: "))
if number_input == '':
print("debe asignar un id")
time.sleep(1)
logo()
name_input = eval(input("Nombre: "))
lastname_input = eval(input("Apellido: "))
age_input = eval(input("Edad: "))
height_input = float(eval(input("Altura cm: ")))
weight_input = float(eval(input("Peso kg: ")))
name_folder = number_input+"_"+name_input+"_"+lastname_input
logo()
print(("ID = ",colored(number_input, 'blue',attrs=['bold'])))
print(("TIEMPO MAXIMO PARA EL TUG = ",colored(TIEMPO_PRUEBA, 'blue',attrs=['bold'])))
print(("NOMBRE = ",colored(name_input, 'blue',attrs=['bold']),colored(lastname_input,'blue',attrs=['bold'])))
print(("EDAD = ", colored(age_input,'blue',attrs=['bold'])))
print(("ALTURA = ", colored(height_input,'blue',attrs=['bold'])))
print(("PESO = ",colored(weight_input,'blue',attrs=['bold'])))
IMC = round((weight_input)/((height_input/100)**2), 1)
if IMC < 16:
colorIMC = 'red'
resIMC = 'Desnutrición severa'
elif IMC >=16.1 and IMC <=18.4:
colorIMC = 'magenta'
resIMC = 'Desnutrición Moderada'
elif IMC >=18.5 and IMC <=22:
colorIMC = 'yellow'
resIMC = 'Bajo Peso'
elif IMC >=22.1 and IMC <=24.0:
colorIMC = 'green'
resIMC = 'Peso Normal'
elif IMC >=25 and IMC <=29.9:
colorIMC = 'yellow'
resIMC = 'Sobrepeso'
elif IMC >=30 and IMC <=34.9:
colorIMC = 'magenta'
resIMC = 'Obesidad tipo I'
elif IMC >=35 and IMC <=39.9:
colorIMC = 'red'
resIMC = 'Obesidad tipo II'
elif IMC >40:
colorIMC = 'red'
resIMC = 'Obesidad tipo II'
print(("IMC = ",colored(IMC,colorIMC,attrs=['bold']), '-', colored(resIMC,colorIMC,attrs=['bold'])))
createPatient = eval(input("¿Los datos son Correctos? y/n: "))
if createPatient.lower() == "y":
limpiar()
createFolder()
createLog(number_input, name_input, lastname_input, age_input, str(height_input), str(weight_input))
else:
main()
##CREA LA CARPETA
def createFolder():
try:
global name_folder
os.makedirs(name_folder)
logo()
creado = colored(centerify('creado',80), 'green', attrs=['reverse'])
print(creado)
except OSError:
print("Datos ya creados, favor utilice oto Id")
main()
def selectTest():
global state_test
global vx0, yaw0, pitch0, roll0
global yaw0_max, pitch0_max, roll0_max, vx_yaw0, vx_pitch0, vx_roll0
global hz_pitch0, hz_roll0, hz_yaw0
state_test= input("Presione <enter> para comenzar:")
if (cond1 == True):
vx0 = []
yaw0 = []
pitch0 = []
roll0 = []
s_yaw0 = []
s_pitch0 = []
s_roll0 = []
yaw0_max = []
pitch0_max = []
roll0_max = []
vx_yaw0 = []
vx_pitch0 = []
vx_roll0 = []
hz_pitch0 = []
hz_roll0 = []
hz_yaw0 = []
collect()
##CREA LOG CON DATOS DEL PACIENTE
def createLog(number_input, name_input, lastname_input, age_input, height_input, weight_input):
name_Log_profile = number_input+"\n"+name_input+"\n"+lastname_input+"\n"+age_input+"\n"+height_input+"\n"+weight_input
Log_profile = name_folder+'.profile'
log = open(Log_profile, 'w')
log.write(name_Log_profile)
log.close()
##CONECCION DE ARDUINO
def ardu():
#try:
global arduino
port = list(serial.tools.list_ports.comports())
device = port[0]
arduino = serial.Serial(device.device, 9600, timeout=1.)
#time.sleep(2)
#arduino.write(b'9')
print("Receptor Conectado")
# except IndexError:
# raw_input("Conecte y presione <enter> tecla para volver a intentar")
# ardu()
##RESET DE ARDUINO
def reset():
global arduino
arduino.setDTR(False)
time.sleep(1)
arduino.flushInput()
arduino.setDTR(True)
def borrarDatos():
global state_loop
ok = input("tomar otra muestra? y/n: ")
if ok.lower() == "y":
state_loop = 3
else:
state_loop = 2
##RECOLECTA LOS DATOS
def collect(i):
global vx0, yaw0, pitch0, roll0, cond1
cond1 = True
date=datetime.datetime.now()
i = 0
t = 0
conteo = TIEMPO_PRUEBA
try:
while i <= TIEMPO_PRUEBA:
if (i==0.2):
log_test = open(name_folder+'/'+"TUG"+str(date.day)+'-'+str(date.month)+'-'+str(date.year)+'_'+str(date.hour)+'.'+str(date.minute)+str(date.second)+'.tug', 'a')
data = []
data.append(arduino.readline())
data = [x.replace("\r\n","") for x in data]
for line in data:
Type = line.split(",")
a = Type[0]
b = Type[1]
c = Type[2]
d = Type[3]
e = Type[4]
f = Type[5]
g = Type[6]
Line = (a + "," + b + "," + c + "," + d + "," + e + "," + f + "," + g +"\r\n")
log_test.write(Line)
#log_test.close()
a = float(a)
b = float(b)
c = float(c)
d = float(d)
e = float(e)
f = float(f)
g = float(g)
if(len(vx0)==0):
t = t + d
d = d - t
if(len(vx0)>=1):
d = d -t
d = d/1000
limpiar()
print(Medir)
print(log_test.name)
print(d)
i = d
vx0.append(d)
yaw0.append(c)
pitch0.append(a)
roll0.append(b)
except ValueError:
#print"Error"
#raw_input("volver a intentar? ")
collect()
except IndexError:
#print"Error"
#raw_input("volver a intentar? ")
collect()
def centerify(text, width=-1):
lines = text.split('\n')
width = max(list(map(len, lines))) if width == -1 else width
return '\n'.join(line.center(width) for line in lines)
def limpiar():
os.system('cls' if os.name == 'nt' else 'clear')
def logo():
limpiar()
print(banner)
print("\n\n")
def graph():
ani = animation.FuncAnimation(fig, collect, interval=1)
plt.show()
#####Textos de Aviso
banner = colored(centerify('Full Axis V0.5 - Tesis TUG',80), 'white', attrs=['reverse'])
Salir = colored(centerify('Hasta la vista',80), 'green', attrs=['reverse'])
if __name__ == "__main__":
main()
| Debaq/Triada | CP_Marcha/TUG.py | Python | gpl-3.0 | 8,883 | 0.014643 |
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from ..azure_common import BaseTest, arm_template
class IoTHubTest(BaseTest):
def setUp(self):
super(IoTHubTest, self).setUp()
def test_iot_hub_schema_validate(self):
with self.sign_out_patch():
p = self.load_policy({
'name': 'test-iot-hub-compliance',
'resource': 'azure.iothub'
}, validate=True)
self.assertTrue(p)
@arm_template('iothub.json')
def test_find_by_name(self):
p = self.load_policy({
'name': 'test-azure-iothub',
'resource': 'azure.iothub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'glob',
'value': 'cctest-iothub*'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
| thisisshi/cloud-custodian | tools/c7n_azure/tests_azure/tests_resources/test_iot_hub.py | Python | apache-2.0 | 921 | 0 |
# from ..ocl import ApiResource
# class ConceptClass(ApiResource):
# def __init__(self):
# super(ConceptClass, self).__init__()
# self.names = []
# self.descriptions = []
# self.sources = []
| kavasoglu/ocl_web | ocl_web/libs/ocl/concept_class.py | Python | mpl-2.0 | 229 | 0 |
"""engine.SCons.Platform.hpux
Platform-specific initialization for HP-UX systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/hpux.py 4369 2009/09/19 15:58:29 scons"
import posix
def generate(env):
posix.generate(env)
#Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion
env['MAXLINELENGTH'] = 2045000
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| bleepbloop/Pivy | scons/scons-local-1.2.0.d20090919/SCons/Platform/hpux.py | Python | isc | 1,763 | 0.002836 |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import sys
from telemetry.core import browser_finder
from telemetry.core import browser_options
from telemetry.page import page_test
from telemetry.page import page_runner
from telemetry.page import page_set
from telemetry.test import discover
def Main(test_dir, page_set_filenames):
"""Turns a PageTest into a command-line program.
Args:
test_dir: Path to directory containing PageTests.
"""
tests = discover.DiscoverClasses(test_dir,
os.path.join(test_dir, '..'),
page_test.PageTest)
# Naively find the test. If we use the browser options parser, we run
# the risk of failing to parse if we use a test-specific parameter.
test_name = None
for arg in sys.argv:
if arg in tests:
test_name = arg
options = browser_options.BrowserOptions()
parser = options.CreateParser('%prog [options] <test> <page_set>')
page_runner.PageRunner.AddCommandLineOptions(parser)
test = None
if test_name is not None:
if test_name not in tests:
sys.stderr.write('No test name %s found' % test_name)
sys.exit(1)
test = tests[test_name]()
test.AddCommandLineOptions(parser)
_, args = parser.parse_args()
if test is None or len(args) != 2:
parser.print_usage()
print >> sys.stderr, 'Available tests:\n%s\n' % ',\n'.join(
sorted(tests.keys()))
print >> sys.stderr, 'Available page_sets:\n%s\n' % ',\n'.join(
sorted([os.path.relpath(f)
for f in page_set_filenames]))
sys.exit(1)
ps = page_set.PageSet.FromFile(args[1])
results = page_test.PageTestResults()
return RunTestOnPageSet(options, ps, test, results)
def RunTestOnPageSet(options, ps, test, results):
test.CustomizeBrowserOptions(options)
possible_browser = browser_finder.FindBrowser(options)
if not possible_browser:
print >> sys.stderr, """No browser found.\n
Use --browser=list to figure out which are available.\n"""
sys.exit(1)
with page_runner.PageRunner(ps) as runner:
runner.Run(options, possible_browser, test, results)
print '%i pages succeed\n' % len(results.page_successes)
if len(results.page_failures):
logging.warning('Failed pages: %s', '\n'.join(
[failure['page'].url for failure in results.page_failures]))
if len(results.skipped_pages):
logging.warning('Skipped pages: %s', '\n'.join(
[skipped['page'].url for skipped in results.skipped_pages]))
return min(255, len(results.page_failures))
| codenote/chromium-test | tools/telemetry/telemetry/page/page_test_runner.py | Python | bsd-3-clause | 2,691 | 0.010777 |
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class ModuleDocFragment(object):
# Windows shell documentation fragment
# FIXME: set_module_language don't belong here but must be set so they don't fail when someone
# get_option('set_module_language') on this plugin
DOCUMENTATION = """
options:
async_dir:
description:
- Directory in which ansible will keep async job information.
- Before Ansible 2.8, this was set to C(remote_tmp + "\\.ansible_async").
default: '%USERPROFILE%\\.ansible_async'
ini:
- section: powershell
key: async_dir
vars:
- name: ansible_async_dir
version_added: '2.8'
remote_tmp:
description:
- Temporary directory to use on targets when copying files to the host.
default: '%TEMP%'
ini:
- section: powershell
key: remote_tmp
vars:
- name: ansible_remote_tmp
set_module_language:
description:
- Controls if we set the locale for modules when executing on the
target.
- Windows only supports C(no) as an option.
type: bool
default: 'no'
choices: ['no', False]
environment:
description:
- List of dictionaries of environment variables and their values to use when
executing commands.
type: list
default: [{}]
"""
| privateip/ansible | lib/ansible/plugins/doc_fragments/shell_windows.py | Python | gpl-3.0 | 1,460 | 0.002055 |
# Copyright 2019 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Meta-strategy solvers for PSRO."""
import numpy as np
from open_spiel.python.algorithms import lp_solver
from open_spiel.python.algorithms import projected_replicator_dynamics
import pyspiel
EPSILON_MIN_POSITIVE_PROBA = 1e-8
def uniform_strategy(solver, return_joint=False):
"""Returns a Random Uniform distribution on policies.
Args:
solver: GenPSROSolver instance.
return_joint: If true, only returns marginals. Otherwise marginals as well
as joint probabilities.
Returns:
uniform distribution on strategies.
"""
policies = solver.get_policies()
policy_lengths = [len(pol) for pol in policies]
result = [np.ones(pol_len) / pol_len for pol_len in policy_lengths]
if not return_joint:
return result
else:
joint_strategies = get_joint_strategy_from_marginals(result)
return result, joint_strategies
def softmax_on_range(number_policies):
x = np.array(list(range(number_policies)))
x = np.exp(x-x.max())
x /= np.sum(x)
return x
def uniform_biased_strategy(solver, return_joint=False):
"""Returns a Biased Random Uniform distribution on policies.
The uniform distribution is biased to prioritize playing against more recent
policies (Policies that were appended to the policy list later in training)
instead of older ones.
Args:
solver: GenPSROSolver instance.
return_joint: If true, only returns marginals. Otherwise marginals as well
as joint probabilities.
Returns:
uniform distribution on strategies.
"""
policies = solver.get_policies()
if not isinstance(policies[0], list):
policies = [policies]
policy_lengths = [len(pol) for pol in policies]
result = [softmax_on_range(pol_len) for pol_len in policy_lengths]
if not return_joint:
return result
else:
joint_strategies = get_joint_strategy_from_marginals(result)
return result, joint_strategies
def renormalize(probabilities):
"""Replaces all negative entries with zeroes and normalizes the result.
Args:
probabilities: probability vector to renormalize. Has to be one-dimensional.
Returns:
Renormalized probabilities.
"""
probabilities[probabilities < 0] = 0
probabilities = probabilities / np.sum(probabilities)
return probabilities
def get_joint_strategy_from_marginals(probabilities):
"""Returns a joint strategy matrix from a list of marginals.
Args:
probabilities: list of probabilities.
Returns:
A joint strategy from a list of marginals.
"""
probas = []
for i in range(len(probabilities)):
probas_shapes = [1] * len(probabilities)
probas_shapes[i] = -1
probas.append(probabilities[i].reshape(*probas_shapes))
result = np.product(probas)
return result.reshape(-1)
def nash_strategy(solver, return_joint=False):
"""Returns nash distribution on meta game matrix.
This method only works for two player zero-sum games.
Args:
solver: GenPSROSolver instance.
return_joint: If true, only returns marginals. Otherwise marginals as well
as joint probabilities.
Returns:
Nash distribution on strategies.
"""
meta_games = solver.get_meta_game()
if not isinstance(meta_games, list):
meta_games = [meta_games, -meta_games]
meta_games = [x.tolist() for x in meta_games]
if len(meta_games) != 2:
raise NotImplementedError(
"nash_strategy solver works only for 2p zero-sum"
"games, but was invoked for a {} player game".format(len(meta_games)))
nash_prob_1, nash_prob_2, _, _ = (
lp_solver.solve_zero_sum_matrix_game(
pyspiel.create_matrix_game(*meta_games)))
result = [
renormalize(np.array(nash_prob_1).reshape(-1)),
renormalize(np.array(nash_prob_2).reshape(-1))
]
if not return_joint:
return result
else:
joint_strategies = get_joint_strategy_from_marginals(result)
return result, joint_strategies
def prd_strategy(solver, return_joint=False):
"""Computes Projected Replicator Dynamics strategies.
Args:
solver: GenPSROSolver instance.
return_joint: If true, only returns marginals. Otherwise marginals as well
as joint probabilities.
Returns:
PRD-computed strategies.
"""
meta_games = solver.get_meta_game()
if not isinstance(meta_games, list):
meta_games = [meta_games, -meta_games]
kwargs = solver.get_kwargs()
result = projected_replicator_dynamics.projected_replicator_dynamics(
meta_games, **kwargs)
if not return_joint:
return result
else:
joint_strategies = get_joint_strategy_from_marginals(result)
return result, joint_strategies
META_STRATEGY_METHODS = {
"uniform_biased": uniform_biased_strategy,
"uniform": uniform_strategy,
"nash": nash_strategy,
"prd": prd_strategy,
}
| deepmind/open_spiel | open_spiel/python/algorithms/psro_v2/meta_strategies.py | Python | apache-2.0 | 5,344 | 0.008046 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyNistats(PythonPackage):
"""Modeling and Statistical analysis of fMRI data in Python."""
homepage = "https://github.com/nilearn/nistats"
pypi = "nistats/nistats-0.0.1rc0.tar.gz"
version('0.0.1rc0', sha256='dcc4c4e410f542fd72e02e12b3b6531851bae2680d08ad29658b272587ef2f98')
version('0.0.1b2', sha256='a853149087bafbf1bed12664ed8889a63ff15dde1fb7a9d51e8a094afc8d695d')
depends_on('python@2.7:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-numpy@1.11:', type=('build', 'run'))
depends_on('py-scipy@0.17:', type=('build', 'run'))
depends_on('py-scikit-learn@0.18:', type=('build', 'run'))
depends_on('py-nibabel@2.0.2:', type=('build', 'run'))
# needs +plotting to avoid ModuleNotFoundError:
# 'nilearn.plotting.js_plotting_utils' when importing nistats.reporting
# Functionality has been incorporated into py-nilearn@0.7:
depends_on('py-nilearn+plotting@0.4:0.6', type=('build', 'run'))
depends_on('py-pandas@0.18:', type=('build', 'run'))
| LLNL/spack | var/spack/repos/builtin/packages/py-nistats/package.py | Python | lgpl-2.1 | 1,272 | 0.002358 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Student CNN encoder for XE training."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from models.encoders.core.cnn_util import conv_layer, max_pool, batch_normalization
############################################################
# Architecture: (feature map, kernel(f*t), stride(f,t))
# CNN1: (128, 9*9, (1,1)) * 1 layers
# Batch normalization
# ReLU
# Max pool (3,1)
# CNN2: (256, 3*4, (1,1)) * 1 layers
# Batch normalization
# ReLU
# Max pool (1,1)
# fc: 2048 (ReLU) * 4 layers
############################################################
class StudentCNNXEEncoder(object):
"""Student CNN encoder for XE training.
Args:
input_size (int): the dimensions of input vectors.
This is expected to be num_channels * 3 (static + Δ + ΔΔ)
splice (int): frames to splice
num_stack (int): the number of frames to stack
parameter_init (float, optional): the range of uniform distribution to
initialize weight parameters (>= 0)
name (string, optional): the name of encoder
"""
def __init__(self,
input_size,
splice,
num_stack,
parameter_init,
name='cnn_student_xe_encoder'):
assert input_size % 3 == 0
self.num_channels = (input_size // 3) // num_stack // splice
self.splice = splice
self.num_stack = num_stack
self.parameter_init = parameter_init
self.name = name
def __call__(self, inputs, keep_prob, is_training):
"""Construct model graph.
Args:
inputs (placeholder): A tensor of size
`[B, input_size (num_channels * splice * num_stack * 3)]`
keep_prob (placeholder, float): A probability to keep nodes
in the hidden-hidden connection
is_training (bool):
Returns:
outputs: Encoder states.
if time_major is True, a tensor of size `[T, B, output_dim]`
otherwise, `[B, output_dim]`
"""
# inputs: 2D tensor `[B, input_dim]`
batch_size = tf.shape(inputs)[0]
input_dim = inputs.shape.as_list()[-1]
# NOTE: input_dim: num_channels * splice * num_stack * 3
# for debug
# print(input_dim) # 1200
# print(self.num_channels) # 40
# print(self.splice) # 5
# print(self.num_stack) # 2
assert input_dim == self.num_channels * self.splice * self.num_stack * 3
# Reshape to 4D tensor `[B, num_channels, splice * num_stack, 3]`
inputs = tf.reshape(
inputs,
shape=[batch_size, self.num_channels, self.splice * self.num_stack, 3])
# NOTE: filter_size: `[H, W, C_in, C_out]`
with tf.variable_scope('CNN1'):
inputs = conv_layer(inputs,
filter_size=[9, 9, 3, 128],
stride=[1, 1],
parameter_init=self.parameter_init,
activation='relu')
inputs = batch_normalization(inputs, is_training=is_training)
inputs = max_pool(inputs,
pooling_size=[3, 1],
stride=[3, 1],
name='max_pool')
with tf.variable_scope('CNN2'):
inputs = conv_layer(inputs,
filter_size=[3, 4, 128, 256],
stride=[1, 1],
parameter_init=self.parameter_init,
activation='relu')
inputs = batch_normalization(inputs, is_training=is_training)
inputs = max_pool(inputs,
pooling_size=[1, 1],
stride=[1, 1],
name='max_pool')
# Reshape to 2D tensor `[B, new_h * new_w * C_out]`
outputs = tf.reshape(
inputs, shape=[batch_size, np.prod(inputs.shape.as_list()[-3:])])
for i in range(1, 5, 1):
with tf.variable_scope('fc%d' % (i)) as scope:
outputs = tf.contrib.layers.fully_connected(
inputs=outputs,
num_outputs=2048,
activation_fn=tf.nn.relu,
weights_initializer=tf.truncated_normal_initializer(
stddev=self.parameter_init),
biases_initializer=tf.zeros_initializer(),
scope=scope)
return outputs
| hirofumi0810/tensorflow_end2end_speech_recognition | models/encoders/core/student_cnn_xe.py | Python | mit | 4,732 | 0.000634 |
import fbchat
from fbchat import PageData
def test_page_from_graphql(session):
data = {
"id": "123456",
"name": "Some school",
"profile_picture": {"uri": "https://scontent-arn2-1.xx.fbcdn.net/v/..."},
"url": "https://www.facebook.com/some-school/",
"category_type": "SCHOOL",
"city": None,
}
assert PageData(
session=session,
id="123456",
photo=fbchat.Image(url="https://scontent-arn2-1.xx.fbcdn.net/v/..."),
name="Some school",
url="https://www.facebook.com/some-school/",
city=None,
category="SCHOOL",
) == PageData._from_graphql(session, data)
| carpedm20/fbchat | tests/threads/test_page.py | Python | bsd-3-clause | 669 | 0.001495 |
"""
WSGI config for board project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "board.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| Atom1c/home | board/board/wsgi.py | Python | unlicense | 385 | 0.002597 |
# *********************************************************************************************
# Copyright (C) 2017 Joel Becker, Jillian Anderson, Steve McColl and Dr. John McLevey
#
# This file is part of the tidyextractors package developed for Dr John McLevey's Networks Lab
# at the University of Waterloo. For more information, see
# http://tidyextractors.readthedocs.io/en/latest/
#
# tidyextractors is free software: you can redistribute it and/or modify it under the terms of
# the GNU General Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version.
#
# tidyextractors is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with tidyextractors.
# If not, see <http://www.gnu.org/licenses/>.
# *********************************************************************************************
import os
import re
import tqdm
import mailbox
import warnings
import pandas as pd
import email.utils as email
import email.header as header
# Adapted from Phil Deutsch's "mbox-analysis" https://github.com/phildeutsch/mbox-analysis
def clean_addresses(addresses):
"""
Cleans email address.
:param addresses: List of strings (email addresses)
:return: List of strings (cleaned email addresses)
"""
if addresses is None:
return []
addresses = addresses.replace("\'", "")
address_list = re.split('[,;]', addresses)
clean_list = []
for address in address_list:
temp_clean_address = clean_address(address)
clean_list.append(temp_clean_address)
return clean_list
def clean_address(address):
"""
Cleans a single email address.
:param address: String (email address)
:return: String (clean email address)
"""
if isinstance(address, header.Header):
return clean_address(address.encode('ascii'))
elif isinstance(address, str):
address = address.replace("<", "")
address = address.replace(">", "")
address = address.replace("\"", "")
address = address.replace("\n", " ")
address = address.replace("MAILER-DAEMON", "")
address = address.lower().strip()
email = None
for word in address.split(' '):
email_regex = re.compile(
"^[a-zA-Z0-9._%-]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,6}$"
)
email = re.match(email_regex, word)
if email is not None:
clean_email = email.group(0)
if email is None:
if address.split(' ')[-1].find('@') > -1:
clean_email = address.split(' ')[-1].strip()
elif address.split(' ')[-1].find('?') > -1:
clean_email = 'n/a'
else:
clean_email = address
return clean_email
elif address is None:
return None
else:
raise ValueError('An unexpected type was given to clean_address. Address was {}'.format(address))
return None
def get_body(message):
"""
Extracts body text from an mbox message.
:param message: Mbox message
:return: String
"""
try:
sm = str(message)
body_start = sm.find('iamunique', sm.find('iamunique')+1)
body_start = sm.find('Content-Transfer-Encoding', body_start+1)
body_start = sm.find('\n', body_start+1)+1
body_end = sm.find('From: ', body_start + 1)
if body_end == -1:
body_end = sm.find('iamunique', body_start + 1)
body_end = sm.find('\n', body_end - 25)
body = sm[body_start:body_end]
body = body.replace("=20\n", "")
body = body.replace("=FC", "ü")
body = body.replace("=F6", "ö")
body = body.replace("=84", "\"")
body = body.replace("=94", "\"")
body = body.replace("=96", "-")
body = body.replace("=92", "\'")
body = body.replace("=93", "\"")
body = body.replace("=E4", "ä")
body = body.replace("=DF", "ss")
body = body.replace("=", "")
body = body.replace("\"", "")
body = body.replace("\'", "")
except:
body = None
return body
def write_table(mboxfile, mailTable):
"""
Takes a list and extends it with lists of data, which is
extracted from mbox messages.
:param mboxfile: Mbox file name/path
:param mailTable: A list (of lists)
:return: An extended list of lists
"""
mail_box_contents = mailbox.mbox(mboxfile)
m_pbar = tqdm.tqdm(range(0,len(mail_box_contents)))
m_pbar.set_description('Extracting mbox messages...')
count = 0
update_interval = min(50,len(mail_box_contents))
for message in mail_box_contents:
count += 1
if count % update_interval == 0:
m_pbar.update(update_interval)
clean_from = clean_address(message['From'])
clean_to = clean_addresses(message['To'])
clean_cc = clean_addresses(message['Cc'])
try:
clean_date = email.parsedate_to_datetime(message['Date'])
except:
clean_date = None
mailTable.append([
clean_from,
clean_to,
clean_cc,
clean_date,
message['Subject'],
get_body(message)
])
def mbox_to_pandas(mbox_path):
"""
Extracts all mbox messages from mbox files in mbox_path.
:param mbox_path: Path to an mbox file OR a directory containing mbox files.
:return: A Pandas DataFrame with messages as rows/observations.
"""
if os.path.isfile(mbox_path):
mbox_files = [mbox_path]
else:
mbox_files = [os.path.join(dirpath, f) for dirpath, dirnames, files in os.walk(mbox_path) for f in files if f.endswith('mbox')]
mail_table = []
f_pbar = tqdm.tqdm(range(0,len(mbox_files)))
f_pbar.set_description('Extracting mbox files...')
for mbox_file in mbox_files:
write_table(mbox_file, mail_table)
f_pbar.update(1)
df_out = pd.DataFrame(mail_table)
df_out.columns = ['From', 'To', 'Cc', 'Date', 'Subject', 'Body']
df_out['NumTo'] = df_out['To'].map(lambda i: len(i))
df_out['NumCC'] = df_out['Cc'].map(lambda i: len(i))
return df_out | networks-lab/tidyextractors | tidyextractors/tidymbox/mbox_to_pandas.py | Python | gpl-3.0 | 6,462 | 0.002787 |
from mpf.tests.MpfFakeGameTestCase import MpfFakeGameTestCase
from unittest.mock import MagicMock, patch
from mpf.tests.MpfTestCase import MpfTestCase
class TestDropTargets(MpfTestCase):
def get_config_file(self):
return 'test_drop_targets.yaml'
def get_machine_path(self):
return 'tests/machine_files/drop_targets/'
def get_platform(self):
return 'smart_virtual'
def test_reset_and_playfield_active(self):
self.mock_event("playfield_active")
self.hit_switch_and_run("switch1", 1)
self.hit_switch_and_run("switch2", 1)
# playfield should is active when drop target are shot down
self.assertEventCalled("playfield_active")
self.mock_event("playfield_active")
self.assertTrue(self.machine.drop_targets["left1"].complete)
self.assertTrue(self.machine.drop_targets["left2"].complete)
self.assertFalse(self.machine.drop_targets["left3"].complete)
# reset the bank. this should not trigger playfield_active
self.machine.drop_target_banks["left_bank"].reset()
self.advance_time_and_run(1)
self.assertEventNotCalled("playfield_active")
self.assertFalse(self.machine.drop_targets["left1"].complete)
self.assertFalse(self.machine.drop_targets["left2"].complete)
self.assertFalse(self.machine.drop_targets["left3"].complete)
def test_drop_target_bank(self):
self.assertIn('left1', self.machine.drop_targets)
self.assertIn('left2', self.machine.drop_targets)
self.assertIn('left3', self.machine.drop_targets)
self.assertIn('left_bank', self.machine.drop_target_banks)
self.machine.coils["coil1"].pulse = MagicMock(return_value=200)
self.assertFalse(self.machine.drop_targets["left1"].complete)
self.assertFalse(self.machine.drop_targets["left2"].complete)
self.assertFalse(self.machine.drop_targets["left3"].complete)
self.assertFalse(self.machine.drop_target_banks["left_bank"].complete)
self.hit_switch_and_run("switch1", 1)
self.hit_switch_and_run("switch2", 1)
self.assertTrue(self.machine.drop_targets["left1"].complete)
self.assertTrue(self.machine.drop_targets["left2"].complete)
self.assertFalse(self.machine.drop_targets["left3"].complete)
self.assertFalse(self.machine.drop_target_banks["left_bank"].complete)
assert not self.machine.coils["coil1"].pulse.called
self.hit_switch_and_run("switch3", .5)
self.assertTrue(self.machine.drop_targets["left1"].complete)
self.assertTrue(self.machine.drop_targets["left2"].complete)
self.assertTrue(self.machine.drop_targets["left3"].complete)
self.assertTrue(self.machine.drop_target_banks["left_bank"].complete)
assert not self.machine.coils["coil1"].pulse.called
# it should reset after 1s
self.advance_time_and_run(.5)
self.machine.coils["coil1"].pulse.assert_called_once_with(max_wait_ms=100)
# after another 100ms the switches releases
self.release_switch_and_run("switch1", 0)
self.release_switch_and_run("switch2", 0)
self.release_switch_and_run("switch3", 1)
self.assertFalse(self.machine.drop_targets["left1"].complete)
self.assertFalse(self.machine.drop_targets["left2"].complete)
self.assertFalse(self.machine.drop_targets["left3"].complete)
self.assertFalse(self.machine.drop_target_banks["left_bank"].complete)
# check that the bank does not reset if already down
self.machine.coils["coil1"].pulse = MagicMock(return_value=100)
self.machine.drop_target_banks['left_bank'].reset()
assert not self.machine.coils["coil1"].pulse.called
# reset should work with one target down
self.hit_switch_and_run("switch1", 1)
self.machine.drop_target_banks['left_bank'].reset()
self.machine.coils["coil1"].pulse.assert_called_once_with(max_wait_ms=100)
def test_knockdown_and_reset(self):
self.mock_event("unexpected_ball_on_playfield")
self.machine.coils["coil2"].pulse = MagicMock(wraps=self.machine.coils["coil2"].pulse)
self.machine.coils["coil3"].pulse = MagicMock(wraps=self.machine.coils["coil3"].pulse)
self.assertFalse(self.machine.drop_targets["left6"].complete)
# knock it down
self.post_event("knock_knock")
self.advance_time_and_run(.3)
assert not self.machine.coils["coil2"].pulse.called
self.machine.coils["coil3"].pulse.assert_called_once_with(max_wait_ms=100)
# ignore ms means the state is not updated yet
self.assertFalse(self.machine.drop_targets["left6"].complete)
self.advance_time_and_run(.3)
# and now it is
self.assertTrue(self.machine.drop_targets["left6"].complete)
# reset it
self.machine.coils["coil3"].pulse.reset_mock()
self.post_event("reset_target")
self.advance_time_and_run(.3)
assert not self.machine.coils["coil3"].pulse.called
self.machine.coils["coil2"].pulse.assert_called_once_with(max_wait_ms=100)
# ignore ms means the state is not updated yet
self.assertTrue(self.machine.drop_targets["left6"].complete)
self.advance_time_and_run(6)
# and now it is
self.assertFalse(self.machine.drop_targets["left6"].complete)
self.assertEventNotCalled("unexpected_ball_on_playfield")
def test_drop_targets_in_mode(self):
self.machine.modes['mode1'].start()
self.advance_time_and_run()
self.machine.coils["coil2"].pulse = MagicMock(return_value=30)
self.assertFalse(self.machine.drop_targets["left4"].complete)
self.assertFalse(self.machine.drop_targets["left5"].complete)
self.assertFalse(self.machine.drop_targets["left6"].complete)
self.assertFalse(self.machine.drop_target_banks["left_bank_2"].complete)
self.hit_switch_and_run("switch4", 1)
self.hit_switch_and_run("switch5", 1)
self.assertTrue(self.machine.drop_targets["left4"].complete)
self.assertTrue(self.machine.drop_targets["left5"].complete)
self.assertFalse(self.machine.drop_targets["left6"].complete)
self.assertFalse(self.machine.drop_target_banks["left_bank_2"].complete)
self.machine.modes['mode1'].stop()
self.advance_time_and_run()
self.assertTrue(self.machine.drop_targets["left4"].complete)
self.assertTrue(self.machine.drop_targets["left5"].complete)
self.assertFalse(self.machine.drop_targets["left6"].complete)
self.assertFalse(self.machine.drop_target_banks["left_bank_2"].complete)
# should not complete the bank
self.hit_switch_and_run("switch6", .1)
self.assertTrue(self.machine.drop_targets["left4"].complete)
self.assertTrue(self.machine.drop_targets["left5"].complete)
self.assertTrue(self.machine.drop_targets["left6"].complete)
self.assertFalse(self.machine.drop_target_banks["left_bank_2"].complete)
self.post_event("reset_target")
self.machine.modes['mode1'].start()
self.advance_time_and_run()
# mode is running again. should complete
self.hit_switch_and_run("switch4", .1)
self.hit_switch_and_run("switch5", .1)
self.hit_switch_and_run("switch6", .1)
self.assertTrue(self.machine.drop_targets["left4"].complete)
self.assertTrue(self.machine.drop_targets["left5"].complete)
self.assertTrue(self.machine.drop_targets["left6"].complete)
self.assertTrue(self.machine.drop_target_banks["left_bank_2"].complete)
def test_drop_target_reset(self):
target = self.machine.drop_targets["left6"]
self.machine.coils["coil2"].pulse = MagicMock()
self.machine.coils["coil3"].pulse = MagicMock()
self.assertSwitchState("switch6", 0)
# target up. it should not reset
target.reset()
self.advance_time_and_run()
assert not self.machine.coils["coil2"].pulse.called
assert not self.machine.coils["coil3"].pulse.called
# hit target down
self.hit_switch_and_run("switch6", 1)
self.assertTrue(target.complete)
# it should reset
target.reset()
self.advance_time_and_run()
self.machine.coils["coil2"].pulse.assert_called_once_with(max_wait_ms=100)
self.machine.coils["coil2"].pulse.reset_mock()
assert not self.machine.coils["coil3"].pulse.called
self.release_switch_and_run("switch6", 1)
# knock down should work
target.knockdown()
self.advance_time_and_run()
self.machine.coils["coil3"].pulse.assert_called_once_with(max_wait_ms=100)
self.machine.coils["coil3"].pulse.reset_mock()
assert not self.machine.coils["coil2"].pulse.called
self.hit_switch_and_run("switch6", 1)
# but not when its down already
target.knockdown()
self.advance_time_and_run()
assert not self.machine.coils["coil2"].pulse.called
assert not self.machine.coils["coil3"].pulse.called
def test_drop_target_reset_retry_success(self):
target = self.machine.drop_targets["left7"]
coil = self.machine.coils["coil4"]
coil.pulse = MagicMock()
self.assertSwitchState("switch7", 0)
# target up. it should not reset
target.reset()
self.advance_time_and_run()
assert not coil.pulse.called
# hit target down
self.hit_switch_and_run("switch7", 1)
self.assertTrue(target.complete)
assert not coil.pulse.called
# it should attempt to reset
target.reset()
self.assertEqual(coil.pulse.call_count, 1)
# after 90ms, should not have pulsed
self.advance_time_and_run(0.09)
self.assertEqual(coil.pulse.call_count, 1)
# after 100ms, should be called
self.advance_time_and_run(0.02)
self.assertEqual(coil.pulse.call_count, 2)
# after switch is up, should not be called again
self.release_switch_and_run("switch7", 1)
self.assertFalse(target.complete)
self.assertEqual(coil.pulse.call_count, 2)
def test_drop_target_reset_retry_max_attempts(self):
target = self.machine.drop_targets["left7"]
coil = self.machine.coils["coil4"]
coil.pulse = MagicMock()
self.assertSwitchState("switch7", 0)
# target up. it should not reset
target.reset()
self.advance_time_and_run()
assert not coil.pulse.called
# hit target down
self.hit_switch_and_run("switch7", 1)
self.assertTrue(target.complete)
assert not coil.pulse.called
# it should attempt to reset
target.reset()
self.assertEqual(coil.pulse.call_count, 1)
# after 90ms, should not have pulsed
self.advance_time_and_run(0.09)
self.assertEqual(coil.pulse.call_count, 1)
# after 100ms, should be called
self.advance_time_and_run(0.02)
self.assertEqual(coil.pulse.call_count, 2)
# after 100ms, should be called
self.advance_time_and_run(0.1)
self.assertEqual(coil.pulse.call_count, 3)
# after 100ms, max retries achieved
self.advance_time_and_run(0.1)
self.assertEqual(coil.pulse.call_count, 3)
def test_drop_target_ignore_ms(self):
self.mock_event('drop_target_center1_down')
self.mock_event('drop_target_center1_up')
self.hit_switch_and_run('switch10', 1)
self.assertSwitchState('switch10', True) # ###############
self.assertEventNotCalled('drop_target_center1_up')
self.assertEventCalled('drop_target_center1_down')
self.post_event('reset_center1', .05)
self.release_switch_and_run('switch10', .1)
self.hit_switch_and_run('switch10', .1)
self.release_switch_and_run('switch10', .1)
self.assertSwitchState('switch10', False)
self.advance_time_and_run(.5)
# reset happened in the ignore window so this event should not be
# called
self.assertEventNotCalled('drop_target_center1_up')
self.advance_time_and_run(1)
# now do the same test for knockdown
self.mock_event('drop_target_center1_down')
self.post_event('knockdown_center1', .2)
self.hit_switch_and_run('switch10', .1)
self.assertEventNotCalled('drop_target_center1_down')
self.advance_time_and_run(1)
self.assertEventCalled('drop_target_center1_down')
def test_drop_target_ignore_ms_ball_search(self):
self.machine.playfields["playfield"].config['enable_ball_search'] = True
self.machine.playfields["playfield"].balls += 1
self.mock_event('drop_target_center1_down')
self.mock_event('drop_target_center1_up')
#self.hit_switch_and_run('switch10', 1)
self.assertSwitchState('switch10', False) # ###############
# wait until ball search phase 1
event_future = self.machine.events.wait_for_event("ball_search_phase_1")
self.machine.clock.loop.run_until_complete(event_future)
self.advance_time_and_run(.25)
self.hit_switch_and_run('switch10', .1)
self.release_switch_and_run('switch10', .1)
self.assertSwitchState('switch10', False)
self.advance_time_and_run(.5)
# reset happened in the ignore window so this event should not be
# called
self.assertEventNotCalled('drop_target_center1_down')
self.assertEventNotCalled('drop_target_center1_up')
# wait until ball search phase 2
event_future = self.machine.events.wait_for_event("ball_search_phase_2")
self.machine.clock.loop.run_until_complete(event_future)
self.advance_time_and_run(.25)
self.hit_switch_and_run('switch10', .1)
self.release_switch_and_run('switch10', .1)
self.assertSwitchState('switch10', False)
self.advance_time_and_run(.5)
# reset happened in the ignore window so this event should not be
# called
self.assertEventNotCalled('drop_target_center1_down')
self.assertEventNotCalled('drop_target_center1_up')
# wait until ball search phase 3
event_future = self.machine.events.wait_for_event("ball_search_phase_3")
self.machine.clock.loop.run_until_complete(event_future)
self.advance_time_and_run(.25)
self.hit_switch_and_run('switch10', .1)
self.release_switch_and_run('switch10', .1)
self.assertSwitchState('switch10', False)
self.advance_time_and_run(.5)
# reset happened in the ignore window so this event should not be
# called
self.assertEventNotCalled('drop_target_center1_down')
self.assertEventNotCalled('drop_target_center1_up')
def test_drop_target_bank_ignore_ms(self):
self.mock_event('drop_target_bank_right_bank_down')
self.mock_event('drop_target_bank_right_bank_mixed')
self.hit_switch_and_run('switch8', 1)
self.hit_switch_and_run('switch9', 1)
self.assertEventCalled('drop_target_bank_right_bank_mixed', 1)
self.assertEventCalled('drop_target_bank_right_bank_down', 1)
self.mock_event('drop_target_bank_right_bank_down')
self.mock_event('drop_target_bank_right_bank_mixed')
self.mock_event('drop_target_bank_right_bank_up')
self.post_event('reset_right_bank', .5)
# these events should not be called since we're in the ignore window
self.assertEventNotCalled('drop_target_bank_right_bank_mixed')
self.assertEventNotCalled('drop_target_bank_right_bank_up')
self.advance_time_and_run(1)
# after 1s, the ignore is cleared and the bank updates its state.
# mixed should not have been called since it happened during the
# ignore window
self.assertEventNotCalled('drop_target_bank_right_bank_mixed')
# up should have been called by now
self.assertEventCalled('drop_target_bank_right_bank_up')
def test_drop_target_bank_restore_delay_ms(self):
# Set a specific ms that the pulse will wait before firing
self.machine.coils['coil5'].pulse = MagicMock(return_value=27)
with patch('mpf.core.delays.DelayManager.add') as add:
self.hit_switch_and_run('switch8', 1)
self.hit_switch_and_run('switch9', 1)
self.post_event('reset_right_bank', 1.5)
bank = self.machine.drop_target_banks['right_bank']
# Verify that the ignore_ms is the config value (1000) plus the wait
add.assert_called_with(ms=1027, name='ignore_hits',
callback=bank._restore_switch_hits,
reset_attempt=None)
class TestDropTargetsInGame(MpfFakeGameTestCase):
def get_config_file(self):
return 'test_multiple_drop_resets_on_startup.yaml'
def get_machine_path(self):
return 'tests/machine_files/drop_targets/'
def test_multiple_reset_events(self):
"""Check that the drop target tries to reset three times on startup."""
# we check that the bank resets 3 times but only reports down once
self.mock_event("drop_target_bank_multiple_resets_on_game_start_down")
self.machine.coils["coil1"].hw_driver.pulse = MagicMock()
# drop is down
self.hit_switch_and_run("switch1", 1)
self.assertEventCalled("drop_target_bank_multiple_resets_on_game_start_down", times=1)
self.assertEqual(0, self.machine.coils["coil1"].hw_driver.pulse.call_count)
# start game to trigger the reset
self.start_game()
# drop should have tried a reset
self.assertEventCalled("drop_target_bank_multiple_resets_on_game_start_down", times=1)
self.assertEqual(1, self.machine.coils["coil1"].hw_driver.pulse.call_count)
# it should reset again
self.advance_time_and_run(3)
self.assertEventCalled("drop_target_bank_multiple_resets_on_game_start_down", times=1)
self.assertEqual(2, self.machine.coils["coil1"].hw_driver.pulse.call_count)
# it should reset again
self.advance_time_and_run(3)
self.assertEventCalled("drop_target_bank_multiple_resets_on_game_start_down", times=1)
self.assertEqual(3, self.machine.coils["coil1"].hw_driver.pulse.call_count)
| missionpinball/mpf | mpf/tests/test_DropTargets.py | Python | mit | 18,577 | 0.001453 |
"""
WSGI config for mysite6 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite6.settings")
application = get_wsgi_application()
| wasit7/PythonDay | django/mysite6/mysite6/wsgi.py | Python | bsd-3-clause | 391 | 0 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2015 ERP|OPEN (www.erpopen.nl).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import models | rosenvladimirov/addons | product_barcodes_bg/__init__.py | Python | agpl-3.0 | 992 | 0.001008 |
#!/usr/bin/env python3
from configparser import ConfigParser
from colorama import Fore, Back, Style
import time
import argparse
import ast
import pymysql
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", help="JSON Config File with our Storage Info", required=True)
parser.add_argument("-V", "--verbose", action="store_true", help="Enable Verbose Mode")
parser._optionals.title = "DESCRIPTION "
# Parser Args
args = parser.parse_args()
# Grab Variables
CONFIG=args.config
VERBOSE=args.verbose
def archive_collections(CONFIG, VERBOSE) :
# Process Config
try:
# Read Our INI with our data collection rules
config = ConfigParser()
config.read(CONFIG)
except Exception as e: # pylint: disable=broad-except, invalid-name
sys.exit('Bad configuration file {}'.format(e))
# Grab me Collections Items Turn them into a Dictionary
config_items=dict()
# Collection Items
for section in config :
config_items[section]=dict()
for item in config[section]:
config_items[section][item] = config[section][item]
if VERBOSE:
print("Config Items: ", config_items)
do_archive = True
try :
# Note that Autocommit is off
db_conn = pymysql.connect(host=config_items["db"]["dbhostname"], port=int(config_items["db"]["dbport"]), \
user=config_items["db"]["dbuser"], passwd=config_items["db"]["dbpassword"], \
db=config_items["db"]["dbname"], autocommit=True )
except Exception as e :
# Error
print("Error Connecting to Datbase with error: ", str(e) )
do_archive = False
if do_archive == True :
# Set Archive Time
ARCHIVE_TIME = int(time.time())
if VERBOSE:
print("Archive Time: " , str(ARCHIVE_TIME))
# Create Query Strings
grab_delete_ids = "select id from trked_trans where active = False and lastChecked < FROM_UNIXTIME(" + str(ARCHIVE_TIME) +" ) - interval 7 DAY ;"
remove_trked_trans_sql = "DELETE FROM trked_trans where id = %s ; "
remove_attempt_sql = "DELETE FROM attempts where fk_trked_trans_id = %s ; "
cur = db_conn.cursor()
if VERBOSE:
print(grab_delete_ids)
print(populate_archive_sql)
print(remove_overachieving_sql)
success = True
try:
cur.execute(grab_delete_ids)
to_delete_ids=cur.fetchall()
except Exception as e :
if VERBOSE:
print(Fore.RED, "Trouble with id grabbing query ", str(grab_delete_ids) , " error : ", str(e), Style.RESET_ALL)
success = False
else :
# Worked So Do the
try :
cur.execute(remove_trked_trans_sql, to_delete_ids)
trans_removed = cur.rowcount
cur.execute(remove_attempt_sql, to_delete_ids)
attempts_removed = cur.rowcount
except Exception as e :
if VERBOSE:
print(Fore.RED, "Trouble with removal queries error : ", str(e), Style.RESET_ALL)
success = False
if success == True :
print(Fore.GREEN, "Long Transaction Archived", str(trans_removed), " | Attempt records removed ", str(attempts_removed), Style.RESET_ALL)
else :
print(Fore.RED, "Archiving has failed" , Style.RESET_ALL)
if __name__ == "__main__":
archive_collections(CONFIG, VERBOSE)
| chalbersma/persist_transaction | archive.py | Python | gpl-3.0 | 3,154 | 0.045656 |
# griddata.py - 2010-07-11 ccampo
import numpy as np
def griddata(x, y, z, binsize=0.01, retbin=True, retloc=True):
"""
Place unevenly spaced 2D data on a grid by 2D binning (nearest
neighbor interpolation).
Parameters
----------
x : ndarray (1D)
The idependent data x-axis of the grid.
y : ndarray (1D)
The idependent data y-axis of the grid.
z : ndarray (1D)
The dependent data in the form z = f(x,y).
binsize : scalar, optional
The full width and height of each bin on the grid. If each
bin is a cube, then this is the x and y dimension. This is
the step in both directions, x and y. Defaults to 0.01.
retbin : boolean, optional
Function returns `bins` variable (see below for description)
if set to True. Defaults to True.
retloc : boolean, optional
Function returns `wherebins` variable (see below for description)
if set to True. Defaults to True.
Returns
-------
grid : ndarray (2D)
The evenly gridded data. The value of each cell is the median
value of the contents of the bin.
bins : ndarray (2D)
A grid the same shape as `grid`, except the value of each cell
is the number of points in that bin. Returns only if
`retbin` is set to True.
wherebin : list (2D)
A 2D list the same shape as `grid` and `bins` where each cell
contains the indicies of `z` which contain the values stored
in the particular bin.
Revisions
---------
2010-07-11 ccampo Initial version
"""
# get extrema values.
xmin, xmax = x.min(), x.max()
ymin, ymax = y.min(), y.max()
# make coordinate arrays.
xi = np.arange(xmin, xmax+binsize, binsize)
yi = np.arange(ymin, ymax+binsize, binsize)
xi, yi = np.meshgrid(xi,yi)
# make the grid.
grid = np.zeros(xi.shape, dtype=x.dtype)
nrow, ncol = grid.shape
if retbin: bins = np.copy(grid)
# create list in same shape as grid to store indices
if retloc:
wherebin = np.copy(grid)
wherebin = wherebin.tolist()
# fill in the grid.
for row in range(nrow):
for col in range(ncol):
xc = xi[row, col] # x coordinate.
yc = yi[row, col] # y coordinate.
# find the position that xc and yc correspond to.
posx = np.abs(x - xc)
posy = np.abs(y - yc)
ibin = np.logical_and(posx < binsize/2., posy < binsize/2.)
ind = np.where(ibin == True)[0]
# fill the bin.
bin = z[ibin]
if retloc: wherebin[row][col] = ind
if retbin: bins[row, col] = bin.size
if bin.size != 0:
binval = np.median(bin)
grid[row, col] = binval
else:
grid[row, col] = np.nan # fill empty bins with nans.
# return the grid
if retbin:
if retloc:
return grid, bins, wherebin
else:
return grid, bins
else:
if retloc:
return grid, wherebin
else:
return grid | shaunwbell/FOCI_Analysis | temp/griddata.py | Python | mit | 3,221 | 0.005899 |
# Copyright (C) 2006-2011, University of Maryland
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/ or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: James Krycka
"""
This module implements the SimulationPage class that provides the simulation
feature of the application. It creates simulated reflectometry data files from
the user's model description and user specified parameter settings which are
then used to perform a direct inversion to generate a scattering length density
profile of the sample.
"""
#==============================================================================
from __future__ import print_function
import os
import sys
import time
import numpy as np
import wx
from wx.lib import delayedresult
import matplotlib
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas
from matplotlib.backends.backend_wxagg import NavigationToolbar2WxAgg as Toolbar
# The Figure object is used to create backend-independent plot representations.
from matplotlib.figure import Figure
from matplotlib.font_manager import FontProperties
# For use in the matplotlib toolbar.
from matplotlib.widgets import Slider, Button, RadioButtons
# Wx-Pylab magic for displaying plots within an application's window.
from matplotlib import _pylab_helpers
from matplotlib.backend_bases import FigureManagerBase
#from matplotlib import pyplot as plt
import pylab
from ..api.resolution import bins, binwidths
from ..api.simulate import Simulation
from .utilities import example_data
from .input_list import InputListPanel
from .instrument_params import InstrumentParameters
from .wx_utils import (popup_error_message, popup_warning_message,
StatusBarInfo, ExecuteInThread, WorkInProgress)
# Text strings for use in file selection dialog boxes.
DATA_FILES = "Data files (*.dat)|*.dat"
TEXT_FILES = "Text files (*.txt)|*.txt"
ALL_FILES = "All files (*.*)|*.*"
# Resource files.
DEMO_MODEL1_DESC = "demo_model_1.dat"
DEMO_MODEL2_DESC = "demo_model_2.dat"
DEMO_MODEL3_DESC = "demo_model_3.dat"
# Custom colors.
WINDOW_BKGD_COLOUR = "#ECE9D8"
PALE_YELLOW = "#FFFFB0"
# Other constants
NEWLINE = "\n"
NEWLINES_2 = "\n\n"
DATA_ENTRY_ERRMSG = """\
Please correct any highlighted field in error,
then retry the operation.\n
Yellow indicates an input value is required.
Red means the input value has incorrect syntax."""
INSTR_PARAM_ERRMSG = """\
Please edit the instrument data to supply missing
required parameters needed to compute resolution for
the simulated datasets."""
INSTR_CALC_RESO_ERRMSG = """\
Please specify an instrument to be used for calculating
resolution for the simulated datasets, or disable this
calculation by answering 'No' to the 'With Resolution'
question at the bottom of the page."""
SIM_HELP1 = """\
Edit parameters then click Compute to generate a density profile \
from your model."""
#==============================================================================
class SimulationPage(wx.Panel):
"""
This class implements phase reconstruction and direct inversion analysis
of two simulated surround variation data sets (generated from a model)
to produce a scattering length density profile of the sample.
"""
def __init__(self, parent, id=wx.ID_ANY, colour="", fignum=0, **kwargs):
wx.Panel.__init__(self, parent, id=id, **kwargs)
self.fignum = fignum
self.SetBackgroundColour(colour)
self.sbi = StatusBarInfo()
self.sbi.write(1, SIM_HELP1)
# Split the panel into parameter and plot subpanels.
sp = wx.SplitterWindow(self, style=wx.SP_3D|wx.SP_LIVE_UPDATE)
if wx.Platform == "__WXMAC__": # workaround to set sash position on
sp.SetMinimumPaneSize(300) # frame.Show() to desired initial value
else:
sp.SetMinimumPaneSize(100)
# Create display panels as children of the splitter.
self.pan1 = wx.Panel(sp, wx.ID_ANY, style=wx.SUNKEN_BORDER)
self.pan1.SetBackgroundColour(colour)
self.pan2 = wx.Panel(sp, wx.ID_ANY, style=wx.SUNKEN_BORDER)
self.pan2.SetBackgroundColour("WHITE")
# Initialize the left and right panels.
self.init_param_panel()
self.init_plot_panel()
# Attach the child panels to the splitter.
sp.SplitVertically(self.pan1, self.pan2)
sp.SetSashPosition(300) # on Mac needs to be set after frame.Show()
sp.SetSashGravity(0.2) # on resize grow mostly on right side
# Put the splitter in a sizer attached to the main panel of the page.
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(sp, 1, wx.EXPAND)
self.SetSizer(sizer)
sizer.Fit(self)
def init_param_panel(self):
"""Initializes the parameter input panel of the SimulationPage."""
# Determine the border size for widgets placed inside a StaticBox.
# On the Mac, a generous minimum border is provided that is sufficient.
if wx.Platform == "__WXMAC__":
SBB = 0
else:
SBB = 5
#----------------------------
# Section 1: Model Parameters
#----------------------------
# Note that a static box must be created before creating the widgets
# that appear inside it (box and widgets must have the same parent).
sbox1 = wx.StaticBox(self.pan1, wx.ID_ANY, "Model Parameters")
# Create instructions for using the model description input box.
line1 = wx.StaticText(self.pan1, wx.ID_ANY,
label="Define the Surface, Sample, and Substrate")
line2 = wx.StaticText(self.pan1, wx.ID_ANY,
label="layers of your model (one layer per line):")
demo_model_params = \
"# SLDensity Thickness Roughness" + \
NEWLINES_2 + NEWLINES_2 + NEWLINES_2 + NEWLINE
# Create an input box to enter and edit the model description and
# populate it with a header but no layer information.
# Note that the number of lines determines the height of the box.
self.model = wx.TextCtrl(self.pan1, wx.ID_ANY, value=demo_model_params,
style=wx.TE_MULTILINE|wx.TE_WORDWRAP|wx.RAISED_BORDER)
self.model.SetBackgroundColour(WINDOW_BKGD_COLOUR)
# Group model parameter widgets into a labeled section and
# manage them with a static box sizer.
sbox1_sizer = wx.StaticBoxSizer(sbox1, wx.VERTICAL)
sbox1_sizer.Add(line1, 0, wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT, border=SBB)
sbox1_sizer.Add(line2, 0, wx.EXPAND|wx.LEFT|wx.RIGHT, border=SBB)
sbox1_sizer.Add((-1, 4), 0, wx.EXPAND|wx.LEFT|wx.RIGHT, border=SBB)
sbox1_sizer.Add(self.model, 1, wx.EXPAND|wx.BOTTOM|wx.LEFT|wx.RIGHT,
border=SBB)
#---------------------------------
# Section 2: Instrument Parameters
#---------------------------------
sbox2 = wx.StaticBox(self.pan1, wx.ID_ANY, "Resolution Parameters")
# Instantiate object that manages and stores instrument metadata.
self.instr_param = InstrumentParameters()
# Create a panel for gathering instrument metadata.
self.pan12 = wx.Panel(self.pan1, wx.ID_ANY, style=wx.RAISED_BORDER)
self.pan12.SetBackgroundColour(WINDOW_BKGD_COLOUR)
# Present a combobox with instrument choices.
cb_label = wx.StaticText(self.pan12, wx.ID_ANY, "Choose Instrument:")
instr_names = self.instr_param.get_instr_names()
cb = wx.ComboBox(self.pan12, wx.ID_ANY,
#value=instr_names[self.instr_param.get_instr_idx()],
value="",
choices=instr_names,
style=wx.CB_DROPDOWN|wx.CB_READONLY)
cb.SetBackgroundColour(PALE_YELLOW)
self.Bind(wx.EVT_COMBOBOX, self.OnComboBoxSelect, cb)
self.instr_cb = cb
# Create a horizontal box sizer for the combo box and its label.
hbox1_sizer = wx.BoxSizer(wx.HORIZONTAL)
hbox1_sizer.Add(cb_label, 0, border=5,
flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL|wx.RIGHT)
hbox1_sizer.Add(cb, 1, wx.EXPAND)
# Create button controls.
btn_edit = wx.Button(self.pan12, wx.ID_ANY, "Edit")
self.Bind(wx.EVT_BUTTON, self.OnEdit, btn_edit)
btn_reset = wx.Button(self.pan12, wx.ID_ANY, "Reset")
self.Bind(wx.EVT_BUTTON, self.OnReset, btn_reset)
# Create a horizontal box sizer for the buttons.
hbox2_sizer = wx.BoxSizer(wx.HORIZONTAL)
hbox2_sizer.Add((10, -1), 1) # stretchable whitespace
hbox2_sizer.Add(btn_edit, 0)
hbox2_sizer.Add((10, -1), 0) # non-stretchable whitespace
hbox2_sizer.Add(btn_reset, 0)
# Create a vertical box sizer for the input file selectors.
vbox2_sizer = wx.BoxSizer(wx.VERTICAL)
vbox2_sizer.Add(hbox1_sizer, 0, wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT,
border=10)
vbox2_sizer.Add(hbox2_sizer, 0, wx.EXPAND|wx.ALL, border=10)
# Associate the sizer with its container.
self.pan12.SetSizer(vbox2_sizer)
vbox2_sizer.Fit(self.pan12)
# Group instrument metadata widgets into a labeled section and
# manage them with a static box sizer.
sbox2_sizer = wx.StaticBoxSizer(sbox2, wx.VERTICAL)
sbox2_sizer.Add(self.pan12, 0, wx.EXPAND|wx.ALL, border=SBB)
#---------------------------------------------------
# Section 3: Inversion and Reconstruction Parameters
#---------------------------------------------------
sbox3 = wx.StaticBox(self.pan1, wx.ID_ANY, "Inversion Parameters")
# Instantiate object that manages and stores inversion parameters.
fields = [
["SLD of Surface for Exp 1:", None, "float", 'RE', None],
["SLD of Surface for Exp 2:", None, "float", 'RE', None],
###["SLD of Substrate:", 2.07, "float", 'RE', None],
###["Sample Thickness:", 1000, "float", 'RE', None],
["Qmin:", 0.0, "float", 'RE', None],
["Qmax:", 0.4, "float", 'RE', None],
["# Profile Steps:", 128, "int", 'RE', None],
["Over Sampling Factor:", 4, "int", 'REL', None],
["# Inversion Iterations:", 6, "int", 'RE', None],
["# Monte Carlo Trials:", 10, "int", 'RE', None],
["Simulated Noise (as %):", 5.0, "float", 'RE', None],
["Bound State Energy:", 0.0, "float", 'RE', None],
["Perfect Reconstruction:", "False", "str", 'CRE',
("True", "False")],
###["Cosine Transform Smoothing:", 0.0, "float", 'RE', None],
###["Back Reflectivity:", "True", "str", 'CRE', ("True", "False")],
###["Inversion Noise Factor:", 1, "int", 'RE', None],
###["Show Iterations:", "False", "str", 'CRE', ("True", "False")]
###["Monitor:", "", "str", 'RE', None]
]
self.inver_param = InputListPanel(parent=self.pan1, itemlist=fields,
align=True)
# Group inversion parameter widgets into a labeled section and
# manage them with a static box sizer.
sbox3_sizer = wx.StaticBoxSizer(sbox3, wx.VERTICAL)
sbox3_sizer.Add(self.inver_param, 1, wx.EXPAND|wx.ALL, border=SBB)
#---------------------------
# Section 4: Control Buttons
#---------------------------
sbox4 = wx.StaticBox(self.pan1, wx.ID_ANY, "")
# Create radio buttons to enable/disable resolution calculation.
calc_reso = wx.StaticText(self.pan1, wx.ID_ANY,
label="Resolution: ")
calc_reso.SetBackgroundColour(WINDOW_BKGD_COLOUR)
self.radio1 = wx.RadioButton(self.pan1, wx.ID_ANY, "Yes ",
style=wx.RB_GROUP)
self.radio2 = wx.RadioButton(self.pan1, wx.ID_ANY, "No")
self.radio1.SetBackgroundColour(WINDOW_BKGD_COLOUR)
self.radio2.SetBackgroundColour(WINDOW_BKGD_COLOUR)
self.Bind(wx.EVT_RADIOBUTTON, self.OnCalcResoSelect, self.radio1)
self.Bind(wx.EVT_RADIOBUTTON, self.OnCalcResoSelect, self.radio2)
grid1 = wx.FlexGridSizer(rows=1, cols=2, vgap=0, hgap=0)
grid1.Add(self.radio1, 0, wx.ALIGN_CENTER)
grid1.Add(self.radio2, 0, wx.ALIGN_CENTER)
sbox4_sizer = wx.StaticBoxSizer(sbox4, wx.HORIZONTAL)
sbox4_sizer.Add(calc_reso, 0, wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL)
sbox4_sizer.Add(grid1, 0, wx.ALIGN_CENTER_VERTICAL)
# Create the Compute button.
self.btn_compute = wx.Button(self.pan1, wx.ID_ANY, "Compute")
self.Bind(wx.EVT_BUTTON, self.OnCompute, self.btn_compute)
# Create a horizontal box sizer for the buttons.
hbox3_sizer = wx.BoxSizer(wx.HORIZONTAL)
hbox3_sizer.Add(sbox4_sizer, 0, wx.ALIGN_CENTER_VERTICAL)
hbox3_sizer.Add((10, -1), 1) # stretchable whitespace
hbox3_sizer.Add(self.btn_compute, 0, wx.TOP, border=4)
#----------------------------------------
# Manage all of the widgets in the panel.
#----------------------------------------
# Put all section sizers in a vertical box sizer
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(sbox1_sizer, 2, wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT, border=10)
sizer.Add(sbox2_sizer, 0, wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT, border=10)
sizer.Add(sbox3_sizer, 3, wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT, border=10)
sizer.Add(hbox3_sizer, 0, wx.EXPAND|wx.BOTTOM|wx.ALL, border=10)
# Associate the sizer with its container.
self.pan1.SetSizer(sizer)
sizer.Fit(self.pan1)
# Set flag to indicate that resolution will be calculated for a
# simulation operation.
self.calc_resolution = True
# The splitter sash position should be greater than best width size.
#print("Best size for Simulation panel is", self.pan1.GetBestSizeTuple())
def init_plot_panel(self):
"""Initializes the plotting panel of the SimulationPage."""
INTRO_TEXT = "Phase Reconstruction and Inversion Using Simulated Data:"
# Instantiate a figure object that will contain our plots.
figure = Figure()
# Initialize the figure canvas, mapping the figure object to the plot
# engine backend.
canvas = FigureCanvas(self.pan2, wx.ID_ANY, figure)
# Wx-Pylab magic ...
# Make our canvas the active figure manager for pylab so that when
# pylab plotting statements are executed they will operate on our
# canvas and not create a new frame and canvas for display purposes.
# This technique allows this application to execute code that uses
# pylab stataments to generate plots and embed these plots in our
# application window(s).
self.fm = FigureManagerBase(canvas, self.fignum)
_pylab_helpers.Gcf.set_active(self.fm)
# Instantiate the matplotlib navigation toolbar and explicitly show it.
mpl_toolbar = Toolbar(canvas)
mpl_toolbar.Realize()
# Display a title above the plots.
self.pan2_intro_text = INTRO_TEXT
self.pan2_intro = wx.StaticText(self.pan2, wx.ID_ANY, label=INTRO_TEXT)
font = self.pan2_intro.GetFont()
font.SetPointSize(font.GetPointSize() + 1)
font.SetWeight(wx.BOLD)
self.pan2_intro.SetFont(font)
# Create a progress bar to be displayed during a lengthy computation.
self.pan2_gauge = WorkInProgress(self.pan2)
self.pan2_gauge.Show(False)
# Create a horizontal box sizer to hold the title and progress bar.
hbox1_sizer = wx.BoxSizer(wx.HORIZONTAL)
hbox1_sizer.Add(self.pan2_intro, 0, wx.ALIGN_CENTER_VERTICAL)
hbox1_sizer.Add((10, 25), 1) # stretchable whitespace
hbox1_sizer.Add(self.pan2_gauge, 0)
# Create a vertical box sizer to manage the widgets in the main panel.
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(hbox1_sizer, 0, wx.EXPAND|wx.ALL, border=10)
sizer.Add(canvas, 1, wx.EXPAND|wx.LEFT|wx.RIGHT, border=10)
sizer.Add(mpl_toolbar, 0, wx.EXPAND|wx.ALL, border=10)
# Associate the sizer with its container.
self.pan2.SetSizer(sizer)
sizer.Fit(self.pan2)
def OnActivePage(self):
"""This method is called when user selects (makes current) the page."""
self.sbi.restore()
def OnCalcResoSelect(self, event):
"""Process the With Resolution radio button select event."""
radio_selected = event.GetEventObject()
if self.radio1 is radio_selected:
self.pan12.Enable(True)
self.radio1.SetValue(True)
self.radio2.SetValue(False)
self.calc_resolution = True
else:
self.pan12.Enable(False)
self.radio1.SetValue(False)
self.radio2.SetValue(True)
self.calc_resolution = False
def OnComboBoxSelect(self, event):
"""Processes the user's choice of instrument."""
sel = event.GetEventObject().GetSelection()
self.instr_param.set_instr_idx(sel)
event.GetEventObject().SetBackgroundColour("WHITE")
# Show the instrument data to the user and allow edits.
self.instr_param.edit_metadata()
def OnCompute(self, event):
"""
Generates a simulated dataset then performs phase reconstruction and
phase inversion on the data in a separate thread. OnComputeEnd is
called when the computation is finished to plot the results.
"""
#---------------------------------
# Step 1: Process Model Parameters
#---------------------------------
# Validate and convert the model description into a list of layers.
lines = self.model.GetValue().splitlines()
layers = []
for line in lines:
lin = line.strip()
if lin.startswith('#'):
continue # skip over comment line
if len(lin) == 0:
continue # discard blank line
keep = lin.split('#')
lin = keep[0] # discard trailing comment
ln = lin.split(None, 4) # we'll break into a max of 4 items
if len(ln) == 1:
ln.append('100') # default thickness to 100
if len(ln) == 2:
ln.append('0') # default roughness to 0.0
try:
temp = [float(ln[0]), float(ln[1]), float(ln[2])]
except Exception:
popup_error_message(
"Syntax Error",
"Please correct syntax error in model description.")
return
layers.append(temp)
if len(layers) < 3:
popup_error_message(
"Less Than 3 Layers Defined",
("You must specify at least one Surface, Sample, and " +
"Substrate layer for your model."))
return
#-------------------------------------
# Step 2: Process Inversion Parameters
#-------------------------------------
# Explicitly validate all inversion parameters before proceeding. The
# panel's Validate method will invoke all validators associated with
# its top-level input objects and transfer data from them. Although
# char-by-char validation would have warned the user about any invalid
# entries, the user could have pressed the Compute button without
# making the corrections, so a full validation pass must be done now.
if not self.inver_param.Validate():
popup_error_message("Data Entry Error", DATA_ENTRY_ERRMSG)
return
# Get the validated inversion parameters.
params = self.inver_param.GetResults()
if len(sys.argv) > 1 and '--tracep' in sys.argv[1:]:
print("*** Simulation parameters:"); print(params)
sample = layers[1:-1]
params.append(layers[-1][0]) # add SLD of substrate to list
params.append(layers[-1][2]) # add roughness of substrate to list
if len(sys.argv) > 1 and '--tracep' in sys.argv[1:]:
print("*** Model parameters (all layers):"); print(layers)
print("*** Sample layers excluding Surround:"); print(sample)
#---------------------------------------------------------------
# Step 3: Process Instrument Parameters and Calculate Resolution
#---------------------------------------------------------------
# Get the instrument parameter class and obtain the class that defines
# the selected instrument.
ip = self.instr_param
classes = ip.get_instr_classes()
classname = classes[ip.get_instr_idx()]
# If the user has chosen to disable resolution calculation, then we are
# done with this step.
if not self.calc_resolution:
Q = None
dQ = None
# Check to see if an instrument has been specified.
elif ip.get_instr_idx() < 0:
popup_error_message("Choose an Instrument", INSTR_CALC_RESO_ERRMSG)
return
# For a monochromatic instrument, get its parameters and calculate
# resolution.
elif ip.get_instr_idx() <= 3:
wavelength = ip.get_wavelength()
dLoL = ip.get_dLoL()
d_s1 = ip.get_d_s1()
d_s2 = ip.get_d_s2()
Tlo = ip.get_Tlo()
Thi = ip.get_Thi()
slit1_at_Tlo = ip.get_slit1_at_Tlo()
slit2_at_Tlo = ip.get_slit2_at_Tlo()
slit1_below = ip.get_slit1_below()
slit2_below = ip.get_slit2_below()
slit1_above = ip.get_slit1_above()
slit2_above = ip.get_slit2_above()
sample_width = ip.get_sample_width()
sample_broadening = ip.get_sample_broadening()
# Calculate the resolution of the instrument. Specifically compute
# the resolution vector dQ for given values of a Q vector based on
# L, dL, T, and dT. We do not have all of the input data directly
# (for instance we know L (wavelength) but not dT), however, the
# required parameters can be determined by the resolution method
# from the instrument geometry. At a minimum, we need to supply
# L, dLoL, d_s1, d_s2, Tlo, and slits_at_Tlo.
# First, transform some of the data into the format required by
# the resolution method and in all cases avoid passing a datatype
# of None directly or indirectly as part of a tuple.
slits_at_Tlo = (slit1_at_Tlo, slit2_at_Tlo)
if slit2_at_Tlo is None:
slits_at_Tlo = slit1_at_Tlo
slits_below = (slit1_below, slit2_below)
if slit2_below is None:
slits_below = slit1_below
slits_above = (slit1_above, slit2_above)
if slit2_above is None:
slits_above = slit1_above
if sample_width is None:
sample_width = 1e10 # set to a large value
if sample_broadening is None:
sample_broadening = 0.0
if (wavelength is None or
dLoL is None or
d_s1 is None or
d_s2 is None or
Tlo is None or
slits_at_Tlo is None):
popup_error_message("Need Instrument Parameters",
INSTR_PARAM_ERRMSG)
return
# Define the reflectometer.
instrument = classname(wavelength=wavelength,
dLoL=dLoL,
d_s1=d_s1,
d_s2=d_s2,
Tlo=Tlo,
Thi=Thi,
slits_at_Tlo=slits_at_Tlo,
slits_below=slits_below,
slits_above=slits_above,
sample_width=sample_width,
sample_broadening=sample_broadening)
# Compute the resolution.
Q = np.linspace(params[2], params[3], params[4])
res = instrument.resolution(Q=Q)
Q = res.Q
dQ = res.dQ
# For a monochromatic instrument, get its parameters and calculate
# resolution.
elif ip.get_instr_idx() > 3:
wavelength_lo = ip.get_wavelength_lo()
wavelength_hi = ip.get_wavelength_hi()
dLoL = ip.get_dLoL()
slit1_size = ip.get_slit1_size()
slit2_size = ip.get_slit2_size()
d_s1 = ip.get_d_s1()
d_s2 = ip.get_d_s2()
T = ip.get_T()
sample_width = ip.get_sample_width()
sample_broadening = ip.get_sample_broadening()
# Calculate the resolution of the instrument. Specifically compute
# the resolution vector dQ for given values of a Q vector.
# First, transform some of the data into the format required by
# the resolution method and in all cases avoid passing a datatype
# of None directly or indirectly as part of a tuple.
wavelength = (wavelength_lo, wavelength_hi)
slits = (slit1_size, slit2_size)
if slit2_size is None:
slits = slit1_size
if sample_width is None:
sample_width = 1e10 # set to a large value
if sample_broadening is None:
sample_broadening = 0.0
if (wavelength is None or
dLoL is None or
d_s1 is None or
d_s2 is None or
T is None or
slits is None):
popup_error_message("Need Instrument Parameters",
INSTR_PARAM_ERRMSG)
return
# Define the reflectometer.
instrument = classname(wavelength=wavelength,
dLoL=dLoL,
d_s1=d_s1,
d_s2=d_s2,
T=T,
slits=slits,
sample_width=sample_width,
sample_broadening=sample_broadening)
# Compute the resolution.
L = bins(wavelength[0], wavelength[1], dLoL)
dL = binwidths(L)
'''
Q = np.linspace(params[2], params[3], params[4])
res = instrument.resolution(Q=Q, L=L, dL=dL)
print("*** len of Q, res.Q, res.dQ, L:",
len(Q), len(res.Q), len(res.dQ), len(L))
'''
res = instrument.resolution(L=L, dL=dL)
Q = res.Q
dQ = res.dQ
# FIXME: perform_simulation fails if either Q or dQ is not None
Q = None
dQ = None
#--------------------------------------------------------------
# Step 4: Perform the Simulation, Reconstruction, and Inversion
#--------------------------------------------------------------
# Hide widgets that can change the active plotting canvas or initiate
# another compute operation before we're finished with the current one.
self.btn_compute.Enable(False)
frame = wx.FindWindowByName("AppFrame")
frame.load_demo_dataset_1_item.Enable(False)
frame.load_demo_dataset_2_item.Enable(False)
# Display the progress gauge.
self.pan2_gauge.Start()
self.pan2_gauge.Show(True)
self.pan2.Layout()
# Keep track of the time it takes to do the computation and plotting.
self.t0 = time.time()
# Set the plotting figure manager for this class as the active one and
# erase the current figure.
_pylab_helpers.Gcf.set_active(self.fm)
pylab.clf()
pylab.draw()
# Inform the user that we're starting the computation.
self.sbi.write(2, "Generating new plots ...")
# Apply phase reconstruction and direct inversion techniques on the
# experimental reflectivity datasets.
try:
ExecuteInThread(self.OnComputeEnd, perform_simulation,
sample, params, Q=Q, dQ=dQ)
except Exception as e:
popup_error_message("Operation Failed", str(e))
self.sbi.write(2, "")
return
else:
self.pan2_intro.SetLabel(self.pan2_intro_text)
self.pan2_intro.Refresh()
def OnComputeEnd(self, delayedResult):
"""
Callback function that plots the results of a phase reconstruction and
phase inversion operation.
"""
# The delayedResult object is not used to get the results because
# currently no results are passed back; instead plots are generated.
# Stop and hide the progress gauge.
self.pan2_gauge.Stop()
self.pan2_gauge.Show(False)
self.pan2.Layout()
# Make the plots visible.
pylab.draw()
# Write the total execution and plotting time to the status bar.
secs = time.time() - self.t0
self.sbi.write(2, " %g secs" %(secs))
# Show widgets previously hidden at the start of this computation.
self.btn_compute.Enable(True)
frame = wx.FindWindowByName("AppFrame")
frame.load_demo_dataset_1_item.Enable(True)
frame.load_demo_dataset_2_item.Enable(True)
def OnEdit(self, event):
"""Shows the instrument metadata to the user and allows editing."""
if self.instr_param.get_instr_idx() < 0:
popup_warning_message(
"Select an Instrument",
"Please select an instrument to edit from the drop down list.")
return
self.instr_param.edit_metadata()
def OnReset(self, event):
"""
Restores default parameters for the currently selected instrument.
"""
self.instr_param.init_metadata()
def OnLoadDemoModel1(self, event):
"""Loads Demo Model 1 from a file."""
filespec = example_data(DEMO_MODEL1_DESC)
# Read the entire input file into a buffer.
try:
fd = open(filespec, 'rU')
demo_model_params = fd.read()
fd.close()
except Exception:
popup_warning_message(
"Load Model Error",
"Error loading demo model from file "+DEMO_MODEL1_DESC)
return
# Replace the contents of the model parameter text control box with
# the data from the file.
self.model.Clear()
self.model.SetValue(demo_model_params)
# Specify the instrument (NG-1) and set missing required parameters
# that do not have default values.
self.instr_param.set_instr_idx(1)
self.instr_param.set_Tlo(0.5)
self.instr_param.set_slit1_at_Tlo(0.2)
self.instr_param.set_slit1_below(0.1)
# Put the instrument name in the combo box.
# Note: set background colour before setting the value to update both.
self.instr_cb.SetBackgroundColour("WHITE")
self.instr_cb.SetValue(self.instr_param.get_instr_names()[1])
# Set surface SLD values for simulations 1 and 2 in the inversion and
# reconstruction paramaters panel.
# Note that datatype of None means do not change.
plist = (0.0, 4.5,
None, None, None, None, None, None, None, None, None)
self.inver_param.update_items_in_panel(plist)
def OnLoadDemoModel2(self, event):
"""Loads Demo Model 2 from a file."""
filespec = example_data(DEMO_MODEL2_DESC)
# Read the entire input file into a buffer.
try:
fd = open(filespec, 'rU')
demo_model_params = fd.read()
fd.close()
except Exception:
popup_warning_message(
"Load Model Error",
"Error loading demo model from file "+DEMO_MODEL2_DESC)
return
# Replace the contents of the model parameter text control box with
# the data from the file.
self.model.Clear()
self.model.SetValue(demo_model_params)
# Specify the instrument (NG-1) and set missing required parameters
# that do not have default values.
self.instr_param.set_instr_idx(1)
self.instr_param.set_Tlo(0.5)
self.instr_param.set_slit1_at_Tlo(0.2)
self.instr_param.set_slit1_below(0.1)
# Put the instrument name in the combo box.
# Note: set background colour before setting the value to update both.
self.instr_cb.SetBackgroundColour("WHITE")
self.instr_cb.SetValue(self.instr_param.get_instr_names()[1])
# Set surface SLD values for simulations 1 and 2 in the inversion and
# reconstruction paramaters panel.
# Note that datatype of None means do not change.
plist = (0.0, 6.33,
None, None, None, None, None, None, None, None, None)
self.inver_param.update_items_in_panel(plist)
def OnLoadDemoModel3(self, event):
"""Loads Demo Model 3 from a file."""
filespec = example_data(DEMO_MODEL3_DESC)
# Read the entire input file into a buffer.
try:
fd = open(filespec, 'rU')
demo_model_params = fd.read()
fd.close()
except Exception:
popup_warning_message(
"Load Model Error",
"Error loading demo model from file "+DEMO_MODEL3_DESC)
return
# Replace the contents of the model parameter text control box with
# the data from the file.
self.model.Clear()
self.model.SetValue(demo_model_params)
# Specify the instrument (Liquids) and set missing required parameters
# that do not have default values.
self.instr_param.set_instr_idx(4)
self.instr_param.set_T(4.0)
self.instr_param.set_slit1_size(0.8)
self.instr_param.set_slit2_size(0.8)
# Put the instrument name in the combo box.
# Note: set background colour before setting the value to update both.
self.instr_cb.SetBackgroundColour("WHITE")
self.instr_cb.SetValue(self.instr_param.get_instr_names()[4])
# Set surface SLD values for simulations 1 and 2 in the inversion and
# reconstruction paramaters panel.
# Note that datatype of None means do not change.
plist = (0.0, 6.33,
None, None, None, None, None, None, None, None, None)
self.inver_param.update_items_in_panel(plist)
def OnLoadModel(self, event):
"""Loads the Model from a file."""
dlg = wx.FileDialog(self,
message="Load Model from File ...",
defaultDir=os.getcwd(),
defaultFile="",
wildcard=DATA_FILES+"|"+TEXT_FILES+"|"+ALL_FILES,
style=wx.FD_OPEN)
# Wait for user to close the dialog.
sts = dlg.ShowModal()
if sts == wx.ID_OK:
pathname = dlg.GetDirectory()
filename = dlg.GetFilename()
filespec = os.path.join(pathname, filename)
dlg.Destroy()
if sts == wx.ID_CANCEL:
return # Do nothing
# Read the entire input file into a buffer.
try:
fd = open(filespec, 'rU')
model_params = fd.read()
fd.close()
except Exception:
popup_error_message(
"Load Model Error",
"Error loading model from file "+filename)
return
# Replace the contents of the model parameter text control box with
# the data from the file.
self.model.Clear()
self.model.SetValue(model_params)
def OnSaveModel(self, event):
"""Saves the Model to a file."""
dlg = wx.FileDialog(self,
message="Save Model to File ...",
defaultDir=os.getcwd(),
defaultFile="",
wildcard=DATA_FILES+"|"+TEXT_FILES+"|"+ALL_FILES,
style=wx.SAVE|wx.OVERWRITE_PROMPT)
# Wait for user to close the dialog.
sts = dlg.ShowModal()
if sts == wx.ID_OK:
pathname = dlg.GetDirectory()
filename = dlg.GetFilename()
filespec = os.path.join(pathname, filename)
dlg.Destroy()
if sts == wx.ID_CANCEL:
return # Do nothing
# Put the contents of the model parameter text control box into a
# buffer.
model_params = self.model.GetValue()
# Write the entire buffer to the output file.
try:
fd = open(filespec, 'w')
fd.write(model_params)
fd.close()
except Exception:
popup_error_message("Save Model Error",
"Error saving model to file "+filename)
return
#==============================================================================
def perform_simulation(sample, params, Q=None, dQ=None):
"""
Simulates reflectometry data sets from model information then performs
phase reconstruction and direct inversion on the data to generate a
scattering length density profile.
"""
if len(sys.argv) > 1 and '--debug' in sys.argv[1:]:
print("*** Inputs to perform_simulation()")
print("*** sample =", sample)
print("*** params =", params)
if Q is not None:
print("*** Q len =", len(Q), " Q lo:hi =", Q[0], Q[-1])
if dQ is not None:
print("*** dQ len =", len(dQ), " dQ lo:hi =", dQ[0], dQ[-1])
# Construct a dictionary of keyword arguments for the invert_args parameter
# used by the phase inversion algorithm.
#
# Note that the inversion noise factor here is different than the
# simulation noise parameter (determined by user input)!
inversion_args = dict(rhopoints=params[4],
calcpoints=params[5],
iters=params[6],
stages=params[7],
bse=params[9],
noise=1,
showiters=False,
monitor=None)
# Construct a dictionary of keyword arguments for the phase_args parameter
# used by the phase reconstruction algorithm.
reconstruction_args = dict(stages=100)
# Convert the noise (uncertainly) parameter from a percentage value to a
# hundredths value (e.g., if the user enters 5%, change it to 0.05). Also
# make the noise a non-zero value as Simulation cannot tolerate a zero.
noise = params[8]
if noise < 0.01:
noise = 0.01
noise /= 100.0 # convert percent value to hundreths value
# Convert flag from a string to a Boolean value.
perfect_reconstruction = True if params[10] == "True" else False
# For monochromatic instruments, Q will be None.
if Q is None:
Q = np.linspace(params[2], params[3], params[4])
# Create simulated datasets and perform phase reconstruction and phase
# inversion using the simulated datasets.
#
# Note that Simulation internally calls both SurroundVariation and
# Inversion as is done when simulation is not used to create the datasets.
sim = Simulation(q=Q,
dq=dQ,
sample=sample,
u=params[11],
urough=params[12],
v1=params[0],
v2=params[1],
noise=noise,
seed=None,
invert_args=inversion_args,
phase_args=reconstruction_args,
perfect_reconstruction=perfect_reconstruction)
# Generate the plots.
sim.plot()
pylab.subplots_adjust(wspace=0.25, hspace=0.33,
left=0.09, right=0.96,
top=0.95, bottom=0.08)
| reflectometry/direfl | direfl/gui/simulation_page.py | Python | mit | 41,666 | 0.002328 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template.defaultfilters import filesizeformat
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from openstack_dashboard.api import glance
def get_available_images(request, project_id=None, images_cache=None):
"""Returns a list of images that are public or owned by the given
project_id. If project_id is not specified, only public images
are returned.
:param images_cache: An optional dict-like object in which to
cache public and per-project id image metadata.
"""
if images_cache is None:
images_cache = {}
public_images = images_cache.get('public_images', [])
images_by_project = images_cache.get('images_by_project', {})
if 'public_images' not in images_cache:
public = {"is_public": True,
"status": "active"}
try:
images, _more, _prev = glance.image_list_detailed(
request, filters=public)
[public_images.append(image) for image in images]
images_cache['public_images'] = public_images
except Exception:
exceptions.handle(request,
_("Unable to retrieve public images."))
# Preempt if we don't have a project_id yet.
if project_id is None:
images_by_project[project_id] = []
if project_id not in images_by_project:
owner = {"property-owner_id": project_id,
"status": "active"}
try:
owned_images, _more, _prev = glance.image_list_detailed(
request, filters=owner)
images_by_project[project_id] = owned_images
except Exception:
owned_images = []
exceptions.handle(request,
_("Unable to retrieve images for "
"the current project."))
else:
owned_images = images_by_project[project_id]
if 'images_by_project' not in images_cache:
images_cache['images_by_project'] = images_by_project
images = owned_images + public_images
# Remove duplicate images
image_ids = []
final_images = []
for image in images:
if image.id not in image_ids:
image_ids.append(image.id)
final_images.append(image)
return [image for image in final_images
if image.container_format not in ('aki', 'ari')]
def image_field_data(request, include_empty_option=False):
"""Returns a list of tuples of all images.
Generates a sorted list of images available. And returns a list of
(id, name) tuples.
:param request: django http request object
:param include_empty_option: flag to include a empty tuple in the front of
the list
:return: list of (id, name) tuples
"""
try:
images = get_available_images(request, request.user.project_id)
except Exception:
exceptions.handle(request, _('Unable to retrieve images'))
images.sort(key=lambda c: c.name)
images_list = [('', _('Select Image'))]
for image in images:
image_label = u"{} ({})".format(image.name, filesizeformat(image.size))
images_list.append((image.id, image_label))
if not images:
return [("", _("No images available")), ]
return images_list
| coreycb/horizon | openstack_dashboard/dashboards/project/images/utils.py | Python | apache-2.0 | 3,845 | 0 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Eval checkpoint driver.
This is an example evaluation script for users to understand the EfficientNet
model checkpoints on CPU. To serve EfficientNet, please consider to export a
`SavedModel` from checkpoints and use tf-serving to serve.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import tensorflow.compat.v1 as tf
import model_builder_factory
import preprocessing
import utils
flags.DEFINE_string('model_name', 'efficientnet-b0', 'Model name to eval.')
flags.DEFINE_string('runmode', 'examples', 'Running mode: examples or imagenet')
flags.DEFINE_string(
'imagenet_eval_glob', None, 'Imagenet eval image glob, '
'such as /imagenet/ILSVRC2012*.JPEG')
flags.DEFINE_string(
'imagenet_eval_label', None, 'Imagenet eval label file path, '
'such as /imagenet/ILSVRC2012_validation_ground_truth.txt')
flags.DEFINE_string('ckpt_dir', '/tmp/ckpt/', 'Checkpoint folders')
flags.DEFINE_boolean('enable_ema', True, 'Enable exponential moving average.')
flags.DEFINE_string('export_ckpt', None, 'Exported ckpt for eval graph.')
flags.DEFINE_string('example_img', '/tmp/panda.jpg',
'Filepath for a single example image.')
flags.DEFINE_string('labels_map_file', '/tmp/labels_map.txt',
'Labels map from label id to its meaning.')
flags.DEFINE_bool('include_background_label', False,
'Whether to include background as label #0')
flags.DEFINE_bool('advprop_preprocessing', False,
'Whether to use AdvProp preprocessing.')
flags.DEFINE_integer('num_images', 5000,
'Number of images to eval. Use -1 to eval all images.')
class EvalCkptDriver(utils.EvalCkptDriver):
"""A driver for running eval inference."""
def build_model(self, features, is_training):
"""Build model with input features."""
tf.logging.info(self.model_name)
model_builder = model_builder_factory.get_model_builder(self.model_name)
if self.advprop_preprocessing:
# AdvProp uses Inception preprocessing.
features = features * 2.0 / 255 - 1.0
else:
features -= tf.constant(
model_builder.MEAN_RGB, shape=[1, 1, 3], dtype=features.dtype)
features /= tf.constant(
model_builder.STDDEV_RGB, shape=[1, 1, 3], dtype=features.dtype)
logits, _ = model_builder.build_model(
features, self.model_name, is_training)
probs = tf.nn.softmax(logits)
probs = tf.squeeze(probs)
return probs
def get_preprocess_fn(self):
"""Build input dataset."""
return preprocessing.preprocess_image
def get_eval_driver(model_name,
include_background_label=False,
advprop_preprocessing=False):
"""Get a eval driver."""
image_size = model_builder_factory.get_model_input_size(model_name)
return EvalCkptDriver(
model_name=model_name,
batch_size=1,
image_size=image_size,
include_background_label=include_background_label,
advprop_preprocessing=advprop_preprocessing)
# FLAGS should not be used before main.
FLAGS = flags.FLAGS
def main(unused_argv):
logging.set_verbosity(logging.ERROR)
driver = get_eval_driver(FLAGS.model_name, FLAGS.include_background_label,
FLAGS.advprop_preprocessing)
if FLAGS.runmode == 'examples':
# Run inference for an example image.
driver.eval_example_images(FLAGS.ckpt_dir, [FLAGS.example_img],
FLAGS.labels_map_file, FLAGS.enable_ema,
FLAGS.export_ckpt)
elif FLAGS.runmode == 'imagenet':
# Run inference for imagenet.
driver.eval_imagenet(FLAGS.ckpt_dir, FLAGS.imagenet_eval_glob,
FLAGS.imagenet_eval_label, FLAGS.num_images,
FLAGS.enable_ema, FLAGS.export_ckpt)
else:
print('must specify runmode: examples or imagenet')
if __name__ == '__main__':
app.run(main)
| tensorflow/tpu | models/official/efficientnet/eval_ckpt_main.py | Python | apache-2.0 | 4,717 | 0.003604 |
import utils, TLV_utils
from iso_7816_4_card import *
import building_blocks
class CardOS_Card(ISO_7816_4_Card,building_blocks.Card_with_ls):
DRIVER_NAME = ["CardOS"]
ATRS = [
("3bf2180002c10a31fe58c80874", None),
]
APDU_LIFECYCLE = C_APDU("\x00\xCA\x01\x83\x00")
APDU_PHASE_CONTROL = C_APDU("\x80\x10\x00\x00\x00")
APDU_LIST_X = C_APDU("\x80\x16\x01\x00\x00")
LIST_X_DF = 0
LIST_X_EF = 1
LS_L_SIZE_TAG = 0x80
CARDOS_LIFE_CYCLE_STATUS_BYTE_DESCRIPTIONS = [
(0x10, "operational"),
(0x20, "Administration"),
(0x23, "Personalization"),
(0x26, "Initialisation"),
(0x34, "Manufacturing"),
(0x3F, "Death"),
(0x29, "Erase in Progress"),
]
STATUS_WORDS = ( {
"6283": "File is deactivated",
"6300": "Authentication failed",
"6581": "EEPROM error, command aborted",
"6700": "LC invalid",
"6881": "Logical channel not supported",
"6981": "Command can not be used for file structure",
"6982": "Required access right not granted",
"6983": "BS object blocked",
"6984": "BS object has invalid format",
"6985": "No random number available",
"6986": "No current EF selected",
"6987": "Key object for SM not found",
"6988": "Key object used for SM has invalid format",
"6A80": "Invalid parameters in data field",
"6A81": "Function/mode not supported",
"6A82": "File not found",
"6A83": "Record/object not found",
"6A84": "Not enough memory in file / in file system available",
"6A85": "LC does not fit the TLV structure of the data field",
"6A86": "P1/P2 invalid",
"6A87": "LC does not fit P1/P2",
"6A88": "Object not found (GET DATA)",
"6C00": "LC does not fit the data to be sent (e.g. SM)",
"6D00": "INS invalid",
"6E00": "CLA invalid (Hi nibble)",
"6F00": "Technical error:\n + It was tried to create more than 254 records in a file\n + Package uses SDK version which is not compatible to API version\n + Package contains invalid statements (LOAD EXECUTABLE)",
"6F81": "File is invalidated because of checksum error (prop.)",
"6F82": "Not enough memory available in XRAM",
"6F83": "Transaction error (i.e. command must not be used in transaction)",
"6F84": "General protection fault (prop.)",
"6F85": "Internal failure of PK-API (e.g. wrong CCMS format)",
"6F86": "Key Object not found",
"6F87": "Chaining error",
"6FFF": "Internal assertion (invalid internal error)\n + This error is no runtime error, but an internal error which can occur because of a programming error only.",
"9000": "Command executed correctly",
"9001": "Command exectued correctly; EEPROM weakness detected (EEPROM written with second trial; the EEPROM area overwritten has a limited lifetime only)",
"9850": "Overflow using INCREASE / underflow using DECREASE"
} )
def list_x(self, x):
"Get a list of x objects, where x is one of 0 (DFs) or 1 (EFs) or 2 (DFs and EFs)"
## FIXME I just guessed this information
result = self.send_apdu(C_APDU(self.APDU_LIST_X, p1=x))
files = []
unpacked = TLV_utils.unpack(result.data)
for tag, length, value in unpacked:
if isinstance(value, list):
for tag, length, value in value:
if tag == 0x86:
files.append(value)
else:
if tag == 0x86:
files.append(value)
return files
def cmd_listdirs(self):
"List DFs in current DF"
result = self.list_x(0)
print "DFs: " + ", ".join([utils.hexdump(a, short=True) for a in result])
def cmd_listfiles(self):
"List EFs in current DF"
result = self.list_x(1)
print "EFs: " + ", ".join([utils.hexdump(a, short=True) for a in result])
def cmd_lifecycle(self):
"Check the current lifecycle"
result = self.send_apdu(C_APDU(self.APDU_LIFECYCLE))
#status = binascii.b2a_hex(result.data)
for hex, mes in self.CARDOS_LIFE_CYCLE_STATUS_BYTE_DESCRIPTIONS:
if (int(binascii.b2a_hex(result.data), 16) == hex):
print "Satus: " + mes
break
def cmd_phase_control(self):
"change lifecycle between Administration and Operational"
result = self.send_apdu(C_APDU(self.APDU_PHASE_CONTROL))
COMMANDS = {
"list_dirs": cmd_listdirs,
"list_files": cmd_listfiles,
"ls": building_blocks.Card_with_ls.cmd_list,
"check_lifecycle": cmd_lifecycle,
"phase_control": cmd_phase_control,
}
| 12019/cyberflex-shell | cards/cardos_card.py | Python | gpl-2.0 | 4,862 | 0.005965 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.