repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
miing/mci_migo
|
identityprovider/models/emailaddress.py
|
Python
|
agpl-3.0
| 4,191 | 0 |
# Copyright 2010, 2012 Canonical Ltd. This software is licensed under
# the GNU Affero General Public License version 3 (see the file
# LICENSE).
import logging
import re
from datetime import datetime
from django.core.validators import validate_email
from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.managers import PassThroughManager
from identityprovider.models import Account, Person
from identityprovider.models.const import EmailStatus
__all__ = (
'EmailAddress', 'InvalidatedEmailAddress',
)
PHONE_EMAIL_DOMAIN = 'phone.ubuntu'
PHONE_EMAIL_INVALID_CHARS = re.compile(r"[^-!#$%&'*+/=?^_`{}|~0-9A-Z\.]",
re.IGNORECASE)
class EmailAddressQuerySet(models.query.QuerySet):
def verified(self):
return self.filter(
status__in=(EmailStatus.VALIDATED, EmailStatus.PREFERRED))
class EmailAddressManager(PassThroughManager):
def _generate_email_from_phone_id(self, phone_id):
# replace chars not validated by django validate_email by #
email = '%s@%s' % (PHONE_EMAIL_INVALID_CHARS.sub('#', phone_id),
PHONE_EMAIL_DOMAIN)
return email
def create_from_phone_id(self, phone_id, account):
email = self._generate_email_from_phone_id(phone_id)
email_address = EmailAddress.objects.create(
email=email, account=account, s
|
tatus=EmailStatus.NEW)
return email_address
def get_from_phone_id(self, phone_id):
email = self._generate_email_from_phone_id(phone_id)
email_address = self.get(email=email)
return email_address
class EmailAddress(models.Model):
email = models.TextField(validators=[validate_email])
lp_person = models.IntegerField(
db_column='person', blank=True, null=True, editable=False)
status = models.IntegerField(choices=EmailStatus._get_choices())
|
date_created = models.DateTimeField(
default=datetime.utcnow, blank=True, editable=False)
account = models.ForeignKey(
Account, db_column='account', blank=True, null=True)
objects = EmailAddressManager.for_queryset_class(EmailAddressQuerySet)()
class Meta:
app_label = 'identityprovider'
db_table = u'emailaddress'
verbose_name_plural = _('Email addresses')
def __unicode__(self):
return self.email
@property
def is_preferred(self):
return self.status == EmailStatus.PREFERRED
@property
def is_verifiable(self):
suffix = '@%s' % PHONE_EMAIL_DOMAIN
return not self.email.endswith(suffix)
@property
def is_verified(self):
return self.status in (EmailStatus.VALIDATED, EmailStatus.PREFERRED)
def invalidate(self):
account = self.account
if account is None and self.lp_person:
try:
person = Person.objects.get(id=self.lp_person)
account = person.account
except Person.DoesNotExist:
pass
invalidated_email = None
if account:
# create invalidated entry
invalidated_email = InvalidatedEmailAddress.objects.create(
email=self.email, account=account,
date_created=self.date_created)
else:
logging.warning(
"Could not create invalidated entry for %s, "
"no associated account found" % self.email)
# and delete from emails table
self.delete()
return invalidated_email
class InvalidatedEmailAddress(models.Model):
email = models.TextField(validators=[validate_email])
date_created = models.DateTimeField(blank=True, editable=False)
date_invalidated = models.DateTimeField(
default=datetime.utcnow, null=True, blank=True)
account = models.ForeignKey(
Account, db_column='account', blank=True, null=True)
account_notified = models.BooleanField()
class Meta:
app_label = 'identityprovider'
db_table = u'invalidated_emailaddress'
verbose_name_plural = _('Invalidated email addresses')
def __unicode__(self):
return self.email
|
mdenbina/kapok
|
kapok/lib/slope.py
|
Python
|
gpl-3.0
| 2,658 | 0.007524 |
# -*- coding: utf-8 -*-
"""Terrain slope calculation, and ground range spacing calculation from
DEM.
Author: Michael Denbina
Copyright 2016 California Institute of Technology. All rights reserved.
United States Government Sponsorship acknowledged.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import numpy as np
def calcslope(dem, spacing, inc):
"""Calculate terrain slope angles.
Given an input DEM in radar (azimuth, slant range) coordinates, the DEM
pixel spacing (in azimuth and slant range), and the incidence angle,
calculate and return the azimuth and ground range slope angles (in
radians).
Arguments:
dem: an array containing the DEM heights.
spacing: a tuple containing the (azimuth, slant range) pixel spacing
of the DEM, in meters.
inc: the incidence angle, in radians.
Returns:
rngslope: the terrain slope angle in the ground range direction
azslope: the slope angle in the azimuth direction
"""
(azslope,rngslope) = np.gradient(dem)
azslope = np.arctan(azslope / spacing[0])
rngslo
|
pe = np.arctan(rngslope / ((spacing[1]/np.sin(inc)) + (rngslope/np.tan(inc))))
return rngslope, azslope
def calcgrspacing(dem, spacing, inc):
"""Calculate ground range pixel spacing.
Given an input DEM in radar (azimuth, slant range) coordinates, the DEM
pixel spacing (in azimuth and slant range), and the incidence angle,
calculate and return the ground range spacing.
Arguments:
dem: an array containing the DEM heights.
|
spacing: the slant range spacing of the DEM, in meters.
inc: the incidence angle, in radians.
Returns:
grspacing: Ground range spacing for each pixel, in meters.
"""
(azgrad,srgrad) = np.gradient(dem)
grspacing = ((spacing/np.sin(inc)) + (srgrad/np.tan(inc)))
return grspacing
|
LLNL/spack
|
var/spack/repos/builtin/packages/r-triebeard/package.py
|
Python
|
lgpl-2.1
| 652 | 0.003067 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT fi
|
le for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RTriebeard(RPackage):
"""triebeard: 'Radix' Trees in 'Rcpp'"""
homepage = "https://github.com/Ironholds/triebeard/"
url = "https://cloud.r-project.org/src/contrib/triebeard_0.3.0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/triebeard"
version('0.3.0', sha256='bf1dd620
|
9cea1aab24e21a85375ca473ad11c2eff400d65c6202c0fb4ef91ec3')
depends_on('r-rcpp', type=('build', 'run'))
|
birknilson/oyster
|
examples/piechart/requirements.py
|
Python
|
mit
| 139 | 0.064748 |
Flask=
|
=0.10.1
Jinja2==2.7.2
MarkupSafe==0.18
Werkzeug==0.9.4
distribute==0.6.31
itsdangerous==0.23
lxml==3.3.1
pygal==1.3.1
wsgiref=
|
=0.1.2
|
NTHU-CVLab/ActivityProps
|
network/trainer.py
|
Python
|
apache-2.0
| 2,961 | 0.001351 |
import pickle
from sklearn.model_selection import train_test_split
from preprocess.feature import FeatureFile
from network.model import FC1Net, FC4Net, MLPModel, SaNet
from network.evaluate import NetEvaluator
class Trainer:
def __init__(self, feature_file=None):
self.exclude_features_keys = None
self.feature_filename = feature_file
self.X, self.Y = self.load_full_data(feature_file)
self.feature_file = FeatureFile(feature_file)
def run(self, args, **kwargs):
print('** Train under {} **'.format(self.feature_filename))
if args.videowise:
train_x, test_x, train_y, test_y = self.train_test_split_videowise()
elif kwargs.get('extra_test'):
x, y = self.load_full_data(kwargs.get('extra_test'))
train_x, test_x, train_y, test_y = self.X, x, self.Y, y
else:
train_x, test_x, train_y, test_y = train_test_split(self.X, self.Y, test_size=0.2)
FC1Net(train=True).run((train_x, train_y), (test_x, test_y), args.save)
FC4Net(train=True).run((train_x, train_y), (test_x, test_y), args.save)
MLPModel(train=True).run((train_x, train_y), (test_x, test_y), args.save)
SaNet(train=
|
True).run((train_x, train_y), (test_x, test_y), args.save)
self.evaluator((train_x, train_y), (test_x, test_y), self.X, self.Y)
def evaluator(self, train, test, X, Y):
train_x, train_y = train
test_x, te
|
st_y = test
evaluator = NetEvaluator(X, Y)
evaluator.X, evaluator.Y = self.X, self.Y
evaluator.train_x, evaluator.test_x, evaluator.train_y, evaluator.test_y = train_x, test_x, train_y, test_y
print('=== evaluator & cross-validate ===')
evaluator.baseline_svm()
evaluator.baseline_randomforest()
print('-For FC1Net-')
evaluator.cross_validation(FC1Net.build_model)
print('-For FC4Net-')
evaluator.cross_validation(FC4Net.build_model)
print('-For MLPModel-')
evaluator.cross_validation(MLPModel.build_model)
print('-For SaNet-')
evaluator.cross_validation(SaNet.build_model)
def load_full_data(self, feature_file):
X, Y = FeatureFile(feature_file).load()
Y[Y > 0] = 1
return X, Y
def train_test_split_videowise(self):
f = self.feature_file
data = f.load(random=True, video_wise=True, split=0.1)
X, Y = data['train']
X_, Y_ = data['test']
Y[Y > 0] = 1
Y_[Y_ > 0] = 1
print('Excluded videos: ', f.excluded)
print('Train/Test ({}/{}) features'.format(len(Y), len(Y_)))
self.exclude_features_keys = f.excluded
return X, X_, Y, Y_
def summary(self):
results = {
'exclude_features_keys': self.exclude_features_keys
}
with open('data/outputs/training_info.pkl', 'wb') as f:
pickle.dump(results, f, protocol=2) # For Python2
|
justathoughtor2/atomicApe
|
cygwin/lib/python2.7/site-packages/pylint/test/extensions/test_elseif_used.py
|
Python
|
gpl-3.0
| 1,359 | 0 |
"""Tests for the pylint checker in :mod:`pylint.extensions.check_elif
"""
import os
import os.path as osp
import unittest
from pylint import checkers
from pylint.extensions.check_elif import ElseifUsedChecker
from pylint.lint import PyLinter
from pylint.reporters import BaseReporter
class TestReporter(BaseReporter):
def handle_message(self, msg):
self.messages.append(msg)
def on_set_current_module(self, module, filepath):
self.messages = []
class CheckElseIfUsedTC(unittest.TestCase):
@classmethod
de
|
f setUpClass(cls):
cls._linter = PyLinter()
cls._linter.set_reporter(TestReporter())
checkers.initialize(cls._linter)
cls._linter.register
|
_checker(ElseifUsedChecker(cls._linter))
def test_elseif_message(self):
elif_test = osp.join(osp.dirname(osp.abspath(__file__)), 'data',
'elif.py')
self._linter.check([elif_test])
msgs = self._linter.reporter.messages
self.assertEqual(len(msgs), 2)
for msg in msgs:
self.assertEqual(msg.symbol, 'else-if-used')
self.assertEqual(msg.msg,
'Consider using "elif" instead of "else if"')
self.assertEqual(msgs[0].line, 9)
self.assertEqual(msgs[1].line, 21)
if __name__ == '__main__':
unittest.main()
|
SaileshPatel/Python-Exercises
|
ex16.py
|
Python
|
mit
| 1,257 | 0.001591 |
# importing 'argv' from 'sys' library
from sys import argv
# assigning the variables 'script' (which is the name of the script), and 'filename' (which is the name of a file) to the command line argument array 'argv'
script, filename = argv
# printing string with formatter r
|
epresenting 'filename'
print "We're going to erase %r." % filename
# printing string
print "If you don't want that, hit CTRL-C (^C)."
print "If you do want that, hit RETURN."
raw_input("?")
print "Opening the file..."
# assigning operator to variable. using 'open' to open the file.
target = open(filename, 'w')
# printing string
print "Truncating the file. Goodbye!"
# emptying file using truncate functio
|
n
target.truncate()
print "Now I'm going to ask you for three lines."
# assigning user data to variable and printing string
line1 = raw_input("line 1: ")
line2 = raw_input("line 2: ")
line3 = raw_input("line 3: ")
print "I'm going to write these to the file."
# writing string from variable to the file
target.write(line1)
# writing new line to the file
target.write('\n')
target.write(line2)
target.write('\n')
target.write(line3)
target.write('\n')
print "And finally, we close it."
# now we close the case! - '...It was Elementary, my dear Watson...'
target.close()
|
joopert/home-assistant
|
tests/components/google_assistant/test_smart_home.py
|
Python
|
apache-2.0
| 28,484 | 0.000807 |
"""Test Google Smart Home."""
from unittest.mock import patch, Mock
import pytest
from homeassistant.core import State, EVENT_CALL_SERVICE
from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, TEMP_CELSIUS, __version__
from homeassistant.setup import async_setup_component
from homeassistant.components import camera
from homeassistant.components.climate.const import (
ATTR_MIN_TEMP,
ATTR_MAX_TEMP,
HVAC_MODE_HEAT,
)
from homeassistant.components.google_assistant import (
const,
trait,
smart_home as sh,
EVENT_COMMAND_RECEIVED,
EVENT_QUERY_RECEIVED,
EVENT_SYNC_RECEIVED,
)
from homeassistant.components.demo.binary_sensor import DemoBinarySensor
from homeassistant.components.demo.cover import DemoCover
from homeassistant.components.demo.light import DemoLight
from homeassistant.components.demo.media_player import AbstractDemoPlayer
from homeassistant.components.demo.switch import DemoSwitch
from homeassistant.helpers import device_registry
from tests.common import (
mock_device_registry,
mock_registry,
mock_area_registry,
mock_coro,
)
from . import BASIC
|
_CONFIG, MockConfig
REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf"
@pytest.fixture
def registries(hass):
"""Registry mock setup."""
from types import SimpleNamespace
ret = SimpleNamespace()
ret.entity = mock_registry(hass)
ret.device = mock_device_registry(hass)
ret.area = mock_area_registry(hass)
return ret
async def test_sync_message(hass):
"""Test a sync message."""
light = DemoLigh
|
t(None, "Demo Light", state=False, hs_color=(180, 75))
light.hass = hass
light.entity_id = "light.demo_light"
await light.async_update_ha_state()
# This should not show up in the sync request
hass.states.async_set("sensor.no_match", "something")
# Excluded via config
hass.states.async_set("light.not_expose", "on")
config = MockConfig(
should_expose=lambda state: state.entity_id != "light.not_expose",
entity_config={
"light.demo_light": {
const.CONF_ROOM_HINT: "Living Room",
const.CONF_ALIASES: ["Hello", "World"],
}
},
)
events = []
hass.bus.async_listen(EVENT_SYNC_RECEIVED, events.append)
result = await sh.async_handle_message(
hass,
config,
"test-agent",
{"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]},
)
assert result == {
"requestId": REQ_ID,
"payload": {
"agentUserId": "test-agent",
"devices": [
{
"id": "light.demo_light",
"name": {"name": "Demo Light", "nicknames": ["Hello", "World"]},
"traits": [
trait.TRAIT_BRIGHTNESS,
trait.TRAIT_ONOFF,
trait.TRAIT_COLOR_SETTING,
],
"type": const.TYPE_LIGHT,
"willReportState": False,
"attributes": {
"colorModel": "hsv",
"colorTemperatureRange": {
"temperatureMinK": 2000,
"temperatureMaxK": 6535,
},
},
"roomHint": "Living Room",
}
],
},
}
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].event_type == EVENT_SYNC_RECEIVED
assert events[0].data == {"request_id": REQ_ID}
# pylint: disable=redefined-outer-name
async def test_sync_in_area(hass, registries):
"""Test a sync message where room hint comes from area."""
area = registries.area.async_create("Living Room")
device = registries.device.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
registries.device.async_update_device(device.id, area_id=area.id)
entity = registries.entity.async_get_or_create(
"light", "test", "1235", suggested_object_id="demo_light", device_id=device.id
)
light = DemoLight(None, "Demo Light", state=False, hs_color=(180, 75))
light.hass = hass
light.entity_id = entity.entity_id
await light.async_update_ha_state()
config = MockConfig(should_expose=lambda _: True, entity_config={})
events = []
hass.bus.async_listen(EVENT_SYNC_RECEIVED, events.append)
result = await sh.async_handle_message(
hass,
config,
"test-agent",
{"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]},
)
assert result == {
"requestId": REQ_ID,
"payload": {
"agentUserId": "test-agent",
"devices": [
{
"id": "light.demo_light",
"name": {"name": "Demo Light"},
"traits": [
trait.TRAIT_BRIGHTNESS,
trait.TRAIT_ONOFF,
trait.TRAIT_COLOR_SETTING,
],
"type": const.TYPE_LIGHT,
"willReportState": False,
"attributes": {
"colorModel": "hsv",
"colorTemperatureRange": {
"temperatureMinK": 2000,
"temperatureMaxK": 6535,
},
},
"roomHint": "Living Room",
}
],
},
}
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].event_type == EVENT_SYNC_RECEIVED
assert events[0].data == {"request_id": REQ_ID}
async def test_query_message(hass):
"""Test a sync message."""
light = DemoLight(None, "Demo Light", state=False, hs_color=(180, 75))
light.hass = hass
light.entity_id = "light.demo_light"
await light.async_update_ha_state()
light2 = DemoLight(
None, "Another Light", state=True, hs_color=(180, 75), ct=400, brightness=78
)
light2.hass = hass
light2.entity_id = "light.another_light"
await light2.async_update_ha_state()
events = []
hass.bus.async_listen(EVENT_QUERY_RECEIVED, events.append)
result = await sh.async_handle_message(
hass,
BASIC_CONFIG,
"test-agent",
{
"requestId": REQ_ID,
"inputs": [
{
"intent": "action.devices.QUERY",
"payload": {
"devices": [
{"id": "light.demo_light"},
{"id": "light.another_light"},
{"id": "light.non_existing"},
]
},
}
],
},
)
assert result == {
"requestId": REQ_ID,
"payload": {
"devices": {
"light.non_existing": {"online": False},
"light.demo_light": {"on": False, "online": True, "brightness": 0},
"light.another_light": {
"on": True,
"online": True,
"brightness": 30,
"color": {
"spectrumHsv": {
"hue": 180,
"saturation": 0.75,
"value": 0.3058823529411765,
},
"temperatureK": 2500,
},
},
}
},
}
assert len(events) == 3
assert events[0].event_type == EVENT_QUERY_RECEIVED
assert events[0].data == {"request_id": REQ_ID, "entity_id": "light.demo_light"}
assert events[1].event_type == EVENT_QUERY_RECEIVED
assert events[1].data == {"request_id": REQ_ID, "entity_id": "light.another_light"}
assert events[2].event_type == EVENT_QUERY_RECEIVED
assert events[2].data == {"request_id": REQ_ID, "entity_id": "light.non_existing"}
async def test_execute(ha
|
xenserver/auto-cert-kit
|
autocertkit/operations_tests.py
|
Python
|
bsd-2-clause
| 13,353 | 0.001573 |
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import testbase
import time
from utils import *
class VMOpsTestClass(testbase.OperationsTestClass):
"""Test class to determine proper operation of the most
basic VM procedures"""
# Deine the number of test VMs
vm_count = 3
def _setup_vms(self, session):
"""Creates vm_count VMs on the
master host's local SR"""
host_ref = get_pool_master(session)
net_ref = get_management_network(session)
return deploy_common_droid_vms_on_hosts(session,
[host_ref],
[net_ref],
self.vm_count,
{net_ref: self.get_static_manager(net_ref)})[host_ref]
def test_vm_power_control(self, session):
"""Creates a number of VMs and alterates the power
state a predefined number of times"""
vm_ref_list = self._setup_vms(session)
for i in range(3):
log.debug("Starting test run %d of %d" % (i + 1, range(3)[-1] + 1))
# Make certain the VMs are available
for vm_ref in vm_ref_list:
check_vm_ping_response(
session, vm_ref, get_context_vm_mip(vm_ref))
# Shut down all VMs
log.debug("Shutting down VMs: %s" % vm_ref_list)
"""Note that it is required we build the following 'task_list'
in this manner, i.e 'x=vm_ref', so that we can get around a
particular issue with Python variable bindings within loops"""
task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.clean_shutdown(x))
for vm_ref in vm_ref_list]
res = run_xapi_async_tasks(session, task_list)
# Verify the VMs report a 'Halted' power state
log.debug("Verrifying VM power control operations")
for vm_ref in vm_ref_list:
if session.xenapi.VM.get_power_state(vm_ref) != 'Halted':
raise Exception(
"ERROR: Unexpected power state; VM did not shut down")
log.debug("VM %s is shut down" % vm_ref)
log.debug("Verrification complete: All VMs have shut down")
# Boot all VMs
log.debug("Booting VMs: %s" % vm_ref_list)
host_ref = get_pool_master(session)
task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.start_on(x,
host_ref,
False,
False))
for vm_ref in vm_ref_list]
res = run_xapi_async_tasks(session, task_list)
# Verify the VMs report a 'Running' power state
log.debug("Verrifying VM power control operations")
for vm_ref in vm_ref_list:
if session.xenapi.VM.get_power_state(vm_ref) != 'Running':
raise Exception(
"ERROR: Unexpected power state; VM did not boot")
log.debug("VM %s is running" % vm_ref)
log.debug("Verrification complete: All VMs have booted")
log.debug("Test run %d of %d has completed successfully" %
(i + 1, range(3)[-1] + 1))
wait_for_vms_ips(session, vm_ref_list)
rec = {}
rec['info'] = ("VM power state tests completed successfully.")
return rec
def test_vm_reboot(self, session):
"""Creates a number of VMs and continuously reboots
them a predefined number of times"""
vm_ref_list = self._setup_vms(session)
for i in range(3):
log.debug("Starting test run %d of %d" % (i + 1, range(3)[-1] + 1))
# Make certain the VMs are available
for vm_ref in vm_ref_list:
check_vm_ping_response(
session, vm_ref, get_context_vm_mip(vm_ref))
# Reboot all VMs
log.debug("Rebooting VMs: %s" % vm_ref_list)
task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.clean_reboot(x))
for vm_ref in vm_ref_list]
res = run_xapi_async_tasks(session, task_list)
# Verify the VMs report a 'Running' power state
log.debug("Verrifying VM power control operations")
for vm_ref in vm_ref_list:
if session.xenapi.VM.get_power_state(vm_ref) != 'Running':
raise Exception("ERROR: Unexpected power state")
log.debug("VM %s is running" % vm_ref)
log.debug("Verrification complete: All VMs have rebooted")
log.debug("Test run %d of %d has completed successfully" %
(i + 1, range(3)[-1] + 1))
wait_for_vms_ips(session, vm_ref_list)
rec = {}
rec['info'] = ("VM reboot test completed successfully")
return rec
def test_vm_suspend(self, session):
"""Creates a number of VMs and verifies correct
suspend/resume functionality through three test runs"""
vm_ref_list = self._setup_vms(session)
for i in range(3):
log.debug("Starting test run %d of %d" % (i + 1, range(3)[-1] + 1))
# Make certain the VMs are available
for vm_ref in vm_ref_list:
check_vm_ping_response(
session, vm_ref, get_context_vm_mip(vm_ref))
# Suspend all VMs
log.debug("Suspending VMs: %s" % vm_ref_list)
task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.suspend(x))
for vm_ref in vm_ref_list]
start = time.time()
res = run_xapi_async_tasks(session, task_list, 1200)
suspend_time = time.time() - start
log.debug(
"Suspend operation returned complete in %s seconds" % suspend_time)
# Verify the VMs report a 'Suspended' power state
log.debug("Verrifying VM power control operations")
for vm_ref in vm_ref_list:
if session.xenapi.VM.get_power_state(vm_ref) != 'Suspended':
raise Exception("ERROR: VM %s did not suspend" % vm_ref)
log.debug("VM %s is suspended" % vm_ref)
log.debug("Verrifica
|
tion complete: All VMs have been suspended")
# Resume all VMs
log.debug(
|
"Resuming VMs: %s" % vm_ref_list)
host_ref = get_pool_master(session)
task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.resume_on(x,
|
jsannemo/programming-battle
|
battle/battle/api/__init__.py
|
Python
|
bsd-2-clause
| 1,247 | 0.005613 |
import os.path
from enum import Enum
class NamedEnum(Enum):
def __init__(self, name):
self.display_name = name
@classmethod
def get_names(cls):
return [name for name, _ in cls.__members__.items()]
class Role(NamedEnum):
solver = ('Solver')
tester = ('Tester')
class Status(NamedEnum):
queued = ('Queued')
testing = ('Testing')
failed = ('Failed')
inactive = ('Inactive')
rejected = ('Rejected')
defeated = ('Defeated')
active = ('Active')
class Verdict(NamedEnum):
solved = ('Solved')
wrong_answer = ('Wrong Answer')
time_limit_exceeded = ('Time Limit Exceeded')
run_time_error = ('Run-Time Error')
security_violation = ('Security Violation')
judge_error = ('Judge Error')
class ExtensionEnum(NamedEnum):
def __init__(self, name, extension):
self.extension = extension
super
|
(ExtensionEnum, self).__init__(name)
class Language(ExtensionEnum):
cpp = ('C++', 'cpp
|
')
python = ('Python', 'py')
def detect_language(filename):
extension = os.path.splitext(filename)[-1]
if extension == '.cpp' or extension == '.cc':
return Language.cpp
if extension == '.py':
return Language.python
return None
|
baidu/Paddle
|
python/paddle/fluid/tests/unittests/test_pool3d_op.py
|
Python
|
apache-2.0
| 13,221 | 0.000454 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from __future__ import division
import unittest
import numpy as np
import paddle.fluid.core as core
from op_test import OpTest
def adaptive_start_index(index, input_size, output_size):
return int(np.floor(index * input_size / output_size))
def adaptive_end_index(index, input_size, output_size):
return int(np.ceil((index + 1) * input_size / output_size))
def max_pool3D_forward_naive(x,
ksize,
strides,
paddings,
global_pool=0,
ceil_mode=False,
exclusive=True,
adaptive=False):
N, C, D, H, W = x.shape
if global_pool == 1:
ksize = [D, H, W]
if adaptive:
D_out, H_out, W_out = ksize
else:
D_out = (D - ksize[0] + 2 * paddings[0] + strides[0] - 1
) // strides[0] + 1 if ceil_mode else (
H - ksize[0] + 2 * paddings[0]) // strides[0] + 1
H_out = (H - ksize[1] + 2 * paddings[1] + strides[1] - 1
) // strides[1] + 1 if ceil_mode else (
W - ksize[1] + 2 * paddings[1]) // strides[1] + 1
W_out = (W - ksize[2] + 2 * paddings[2] + strides[2] - 1
) // strides[2] + 1 if ceil_mode else (
W - ksize[2] + 2 * paddings[2]) // strides[2] + 1
out = np.zeros((N, C, D_out, H_out, W_out))
for k in range(D_out):
if adaptive:
d_start = adaptive_start_index(k, D, ksize[0])
d_end = adaptive_end_index(k, D, ksize[0])
else:
d_start = np.max((k * strides[0] - paddings[0], 0))
d_end = np.min((k * strides[0] + ksize[0] - paddings[0], D))
for i in range(H_out):
if adaptive:
h_start = adaptive_start_index(i, H, ksize[1])
h_end = adaptive_end_index(i, H, ksize[1])
else:
h_start = np.max((i * strides[1] - paddings[1], 0))
h_end = np.min((i * strides[1] + ksize[1] - paddings[1], H))
for j in range(W_out):
if adaptive:
w_start = adaptive_start_index(j, W, ksize[2])
w_end = adaptive_end_index(j, W, ksize[2])
else:
w_start = np.max((j * strides[2] - paddings[2], 0))
w_end = np.min((j * strides[2] + ksize[2] - paddings[2], W))
x_masked = x[:, :, d_start:d_end, h_start:h_end, w_start:w_end]
out[:, :, k, i, j] = np.max(x_masked, axis=(2, 3, 4))
return out
def avg_pool3D_forward_naive(x,
ksize,
strides,
paddings,
global_pool=0,
ceil_mode=False,
exclusive=True,
adaptive=False):
N, C, D, H, W = x.shape
if global_pool == 1:
ksize = [D, H, W]
if adaptive:
D_out, H_out, W_out = ksize
else:
D_out = (D - ksize[0] + 2 * paddings[0] + strides[0] - 1
) // strides[0] + 1 if ceil_mode else (
H - ksize[0] + 2 * paddings[0]) // strides[0] + 1
H_out = (H - ksize[1] + 2 * paddings[1] + strides[1] - 1
) // strides[1] + 1 if ceil_mode else (
W - ksize[1] + 2 * paddings[1]) // strides[1] + 1
W_out = (W - ksize[2] + 2 * paddings[2] + strides[2] - 1
) // strides[2] + 1 if ceil_mode else (
W - ksize[2] + 2 * paddings[2]) // strides[2] + 1
out = np.zeros((N, C, D_out, H_out, W_out))
for k in range(D_out):
if adaptive:
d_start = adaptive_start_index(k, D, ksize[0])
d_end = adaptive_end_index(k, D, ksize[0])
else:
d_start = np.max((k * strides[0] - paddings[0], 0))
d_end = np.min((k * strides[0] + ksize[0] - paddings[0], D))
for i in range(H_out):
if adaptive:
h_start = adaptive_start_index(i, H, ksize[1])
h_end = adaptive_end_index(i, H, ksize[1])
else:
h_start = np.max((i * strides[1] - paddings[1], 0))
h_end = np.min((i * strides[1] + ksize[1] - paddings[1], H))
for j in range(W_out):
if adaptive:
w_start = adaptive_start_index(j, W, ksize[2])
w_end = adaptive_end_index(j, W, ksize[2])
else:
w_start = np.max((j * strides[2] - paddings[2], 0))
w_end = np.min((j * strides[2] + ksize[2] - paddings[2], W))
x_masked = x[:, :, d_start:d_end, h_start:h_end, w_start:w_end]
field_size = (d_end - d_start) * (h_end - h_start) * (w_end - w_start) \
if (exclusive or adaptive) else ksize[0] * ksize[1] * ksize[2]
out[:, :, k, i, j] = np.sum(x_masked, axis=(2, 3,
4)) / field_size
return out
class TestPool3d_Op(OpTest):
def setUp(self):
self.op_type = "pool3d"
self.use_cudnn = False
self.dtype = np.float32
self.init_test_case()
self.init_global_pool()
self.init_kernel_type()
self.init_pool_type()
self.init_ceil_mode()
self.init_exclusive()
self.init_adaptive()
if self.global_pool:
self.paddings = [0 for _ in range(len(self.paddings))]
input = np.random.random(self.shape).astype(self.dtype)
output = self.pool3D_forward_naive(
input, self.ksize, self.strides, self.paddings, self.global_pool,
self.ceil_mode, self.exclusive, self.adaptive).astype(self.dtype)
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(input)}
self.attrs = {
'strides': self.strides,
'paddings': self.paddings,
'ksize': self.ksize,
'pooling_type': self.pool_type,
'global_pooling': self.global_po
|
ol,
'use_cudnn': self.use_cudnn,
'ceil_mode': self.ceil_mode,
'data_format':
'AnyLayout', # TODO(dzhwinter) : should be fix latter
'exclusive': self.exclusive,
'adaptive': self.adaptive
|
}
self.outputs = {'Out': output}
def testcudnn(self):
return core.is_compiled_with_cuda() and self.use_cudnn
def test_check_output(self):
if self.testcudnn():
place = core.CUDAPlace(0)
self.check_output_with_place(place, atol=1e-5)
else:
self.check_output()
def test_check_grad(self):
if self.dtype == np.float16:
return
if self.testcudnn() and self.pool_type != "max":
place = core.CUDAPlace(0)
self.check_grad_with_place(
place, set(['X']), 'Out', max_relative_error=0.07)
elif self.pool_type != "max":
self.check_grad(set(['X']), 'Out', max_relative_error=0.07)
def init_test_case(self):
self.shape = [2, 3, 5, 5, 5]
self.ksize = [3, 3, 3]
self.strides = [1, 1, 1]
self.paddings = [0, 0, 0]
def init_kernel_type(self):
pass
def init_pool_type(self):
self.pool_type = "avg"
self.pool3D_forward_naive = avg_pool3D
|
apache/flink
|
flink-python/pyflink/datastream/tests/test_connectors.py
|
Python
|
apache-2.0
| 20,161 | 0.004018 |
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from abc import ABC, abstractmethod
from pyflink.common import typeinfo, Duration, WatermarkStrategy, ConfigOptions
from pyflink.common.serialization import JsonRowDeserializationSchema, \
JsonRowSerializationSchema, Encoder, SimpleStringSchema
from pyflink.common.typeinfo import Types
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.datastream.connectors import FlinkKafkaConsumer, FlinkKafkaProducer, JdbcSink, \
JdbcConnectionOptions, JdbcExecutionOptions, StreamingFileSink, \
OutputFileConfig, FileSource, StreamFormat, FileEnumeratorProvider, FileSplitAssignerProvider, \
NumberSequenceSource, RollingPolicy, FileSink, BucketAssigner, RMQSink, RMQSource, \
RMQConnectionConfig, PulsarSource, StartCursor, PulsarDeserializationSchema, StopCursor, \
SubscriptionType
from pyflink.datastream.tests.test_util import DataStreamTestSinkFunction
from pyflink.java_gateway import get_gateway
from pyflink.testing.test_case_utils import PyFlinkTestCase, _load_specific_flink_module_jars, \
invoke_java_object_method
from pyflink.util.java_utils import load_java_class, get_field_value
class ConnectorTestBase(PyFlinkTestCase, ABC):
@classmethod
@abstractmethod
def _get_jars_relative_path(cls):
"""
Return the relative path of connector, such as `/flink-connectors/flink-sql-connector-jdbc`.
"""
pass
def setUp(self) -> None:
self.env = StreamExecutionEnvironment.get_execution_environment()
# Cache current ContextClassLoader, we will replace it with a temporary URLClassLoader to
# load specific connector jars with given module path to do dependency isolation. And We
# will change the ClassLoader back to the cached ContextClassLoader after the test case
# finished.
self._cxt_clz_loader = get_gateway().jvm.Thread.currentThread().getContextClassLoader()
_load_specific_flink_module_jars(self._get_jars_relative_path())
def tearDown(self):
# Change the ClassLoader back to the cached ContextClassLoader after the test case finished.
if self._cxt_clz_loader is not None:
get_gateway().jvm.Thread.currentThread().setContextClassLoader(self._cxt_clz_loader)
class FlinkKafkaTest(ConnectorTestBase):
@classmethod
def _get_jars_relative_path(cls):
return '/flink-connectors/flink-sql-connector-kafka'
def setUp(self) -> None:
super().setUp()
self.env.set_parallelism(2)
def test_kafka_connector_universal(self):
self.kafka_connector_assertion(FlinkKafkaConsumer, FlinkKafkaProducer)
def kafka_connector_assertion(self, flink_kafka_consumer_clz, flink_kafka_producer_clz):
source_topic = 'test_source_topic'
sink_topic = 'test_sink_topic'
props = {'bootstrap.servers': 'localhost:9092', 'group.id': 'test_group'}
type_info = Types.ROW([Types.INT(), Types.STRING()])
# Test for kafka consumer
deserialization_schema = JsonRowDeserializationSchema.builder() \
.type_info(type_info=type_info).build()
flink_kafka_consumer = flink_kafka_consumer_clz(source_topic, deserialization_schema, props)
flink_kafka_consumer.set_start_from_earliest()
flink_kafka_consumer.set_commit_offsets_on_checkpoints(True)
j_properties = get_field_value(flink_kafka_consumer.get_java_function(), 'properties')
self.assertEqual('localhost:9092', j
|
_properties.getProperty('bootstrap.servers'))
self.assertEqual('test_group', j_properties.getProperty('group.id'))
self.assertTrue(get_field_value(flink_kafka_consumer.get_java_function(),
'enableCommitOnCheckpoints'))
j_start_up_mode = get_field_va
|
lue(flink_kafka_consumer.get_java_function(), 'startupMode')
j_deserializer = get_field_value(flink_kafka_consumer.get_java_function(), 'deserializer')
j_deserialize_type_info = invoke_java_object_method(j_deserializer, "getProducedType")
deserialize_type_info = typeinfo._from_java_type(j_deserialize_type_info)
self.assertTrue(deserialize_type_info == type_info)
self.assertTrue(j_start_up_mode.equals(get_gateway().jvm
.org.apache.flink.streaming.connectors
.kafka.config.StartupMode.EARLIEST))
j_topic_desc = get_field_value(flink_kafka_consumer.get_java_function(),
'topicsDescriptor')
j_topics = invoke_java_object_method(j_topic_desc, 'getFixedTopics')
self.assertEqual(['test_source_topic'], list(j_topics))
# Test for kafka producer
serialization_schema = JsonRowSerializationSchema.builder().with_type_info(type_info) \
.build()
flink_kafka_producer = flink_kafka_producer_clz(sink_topic, serialization_schema, props)
flink_kafka_producer.set_write_timestamp_to_kafka(False)
j_producer_config = get_field_value(flink_kafka_producer.get_java_function(),
'producerConfig')
self.assertEqual('localhost:9092', j_producer_config.getProperty('bootstrap.servers'))
self.assertEqual('test_group', j_producer_config.getProperty('group.id'))
self.assertFalse(get_field_value(flink_kafka_producer.get_java_function(),
'writeTimestampToKafka'))
class FlinkJdbcSinkTest(ConnectorTestBase):
@classmethod
def _get_jars_relative_path(cls):
return '/flink-connectors/flink-connector-jdbc'
def test_jdbc_sink(self):
ds = self.env.from_collection([('ab', 1), ('bdc', 2), ('cfgs', 3), ('deeefg', 4)],
type_info=Types.ROW([Types.STRING(), Types.INT()]))
jdbc_connection_options = JdbcConnectionOptions.JdbcConnectionOptionsBuilder()\
.with_driver_name('com.mysql.jdbc.Driver')\
.with_user_name('root')\
.with_password('password')\
.with_url('jdbc:mysql://server-name:server-port/database-name').build()
jdbc_execution_options = JdbcExecutionOptions.builder().with_batch_interval_ms(2000)\
.with_batch_size(100).with_max_retries(5).build()
jdbc_sink = JdbcSink.sink("insert into test table", ds.get_type(), jdbc_connection_options,
jdbc_execution_options)
ds.add_sink(jdbc_sink).name('jdbc sink')
plan = eval(self.env.get_execution_plan())
self.assertEqual('Sink: jdbc sink', plan['nodes'][1]['type'])
j_output_format = get_field_value(jdbc_sink.get_java_function(), 'outputFormat')
connection_options = JdbcConnectionOptions(
get_field_value(get_field_value(j_output_format, 'connectionProvider'),
'jdbcOptions'))
self.assertEqual(jdbc_connection_options.get_db_url(), connection_options.get_db_url())
self.assertEqual(jdbc_connection_options.get_driver_name(),
connection_options.get_driver_name())
self.assertEqual(jdbc_connection_options.get_password(), connection
|
obi-two/Rebelion
|
data/scripts/templates/object/tangible/wearables/armor/mandalorian/shared_armor_mandalorian_helmet.py
|
Python
|
mit
| 494 | 0.044534 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
fr
|
om swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/wearables/armor/mandalorian/shared_armor_mandalorian_helmet.iff"
result.attribute_template_id = 0
result.stfName("wearables_name","armor_mandalorian_helmet")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return res
|
ult
|
merlin-lang/kulfi
|
simulate/viz/LatencyDiffPoints.py
|
Python
|
lgpl-3.0
| 5,142 | 0.004084 |
import os
import operator
import sys
from collections import defaultdict
import matplotlib.pyplot as pp
import CommonConf
BASE = 'ksp'
algs = ['mcf', 'raeke', 'edksp', 'vlb', 'ksp', 'ecmp']
def setupMPPDefaults():
pp.rcParams['font.size'] = 66
pp.rcParams['mathtext.default'] = 'regular'
pp.rcParams['ytick.labelsize'] = 62
pp.rcParams['xtick.labelsize'] = 62
pp.rcParams['legend.fontsize'] = 62
pp.rcParams['lines.markersize'] = 12
pp.rcParams['axes.titlesize'] = 60
pp.rcParams['axes.labelsize'] = 60
pp.rcParams['axes.edgecolor'] = 'grey'
pp.rcParams['axes.linewidth'] = 3.0
pp.rcParams['axes.grid'] = True
pp.rcParams['grid.alpha'] = 0.4
pp.rcParams['grid.color'] = 'grey'
pp.rcParams['legend.frameon'] = True
pp.rcParams['legend.framealph
|
a'] = 0.4
pp.rcParams['legend.numpoints'] = 1
pp.rcParams['legend.scatterpoints'] = 1
def parse_rtt_file(rtt_file):
rtts = dict()
with open(rtt_file) as f:
for l in f.readlines():
tokens = l.split()
rtts[tokens[0]] = float(tokens[1])
return rtts
def parse_path_file(paths_file):
src,dst = '',''
paths = dict()
with open
|
(paths_file) as f:
for l in f.readlines():
if "->" in l:
src = l.split()[0]
dst = l.split()[2]
paths[(src,dst)] = dict()
else:
if len(l.strip()) == 0:
continue
path = tuple(l.split('@')[0].strip()[1:-1].split(', '))
weight = l.split('@')[1].strip()
paths[(src,dst)][path] = float(weight)
return paths
def plot_latency(rtt_file, paths_dir, out_file, rtt_hop):
paths_file = dict()
for alg in algs:
paths_file[alg] = paths_dir + '/' + alg + '_0'
rtts = parse_rtt_file(rtt_file)
alg_sd_wtrtt = defaultdict(dict) # alg -> sd -> weighted_rtt
for alg in algs:
paths = parse_path_file(paths_file[alg])
for sd,sdpaths in paths.iteritems():
weighted_rtts = 0
for path,weight in sdpaths.iteritems():
path_rtt = 0
if rtt_hop == 'rtt':
for link in path:
path_rtt += rtts.get(link, 0)
else:
path_rtt = len(path) - 2
weighted_rtts += weight * path_rtt
alg_sd_wtrtt[alg][sd] = weighted_rtts
# sort hosts by ecmp weighted RTT
sorted_sd = sorted(alg_sd_wtrtt['ecmp'].items(), key=operator.itemgetter(1))
sorted_sd = [x[0] for x in sorted_sd]
alg_sorted_lats = dict() # alg -> list of latencies sorted by ecmp distance
for alg in algs:
if alg == BASE:
alg_sorted_lats[alg] = [alg_sd_wtrtt[BASE][sd] for sd in sorted_sd]
else:
alg_sorted_lats[alg] = [(alg_sd_wtrtt[alg][sd]-0*alg_sd_wtrtt[BASE][sd]) for sd in sorted_sd]
setupMPPDefaults()
colors = CommonConf.getLineColorsDict()
fmts = CommonConf.getLineFormatsDict()
linewidth = CommonConf.getLineMarkersLWDict()
mrkrs = CommonConf.getLineMarkersDict()
mrkrsize = CommonConf.getLineMarkersSizeDict()
for solver in algs:
fig = pp.figure(figsize=(12,6))
ax = fig.add_subplot(111)
#if solver == BASE:
# continue
ys = alg_sorted_lats[solver]
print solver
if solver == 'ecmp' or solver == 'edksp':
markeredgecolor = colors[solver]
else:
markeredgecolor = 'None'
ax.plot(
#alg_sorted_lats[BASE],
ys,
alpha=0.5,
color=colors[solver],
label=CommonConf.gen_label(solver),
linewidth=linewidth[solver],
linestyle='None',
markevery=1,
markeredgecolor=markeredgecolor,
markeredgewidth=mrkrsize[solver]/4,
marker=mrkrs[solver],
markersize=mrkrsize[solver]*1.5)
ax.set_xlabel("Node Pairs")
if rtt_hop == 'rtt':
ax.set_ylabel("RTT (ms)")
else:
ax.set_ylabel("hop count")
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.xaxis.set_ticks([])
# handles, labels = ax.get_legend_handles_labels()
# # or sort them by labels
# hl = sorted(zip(handles, labels), key=operator.itemgetter(1))
# hl = hl[1:5]+[hl[0]]+[hl[5]] # Put racke in correct position
# handles2, labels2 = zip(*hl)
# ax.legend(handles2, labels2, loc='best', borderaxespad=0., fancybox=True, ncol=3)
pp.locator_params(nbins=4)
pp.tight_layout(pad=0)
pp.savefig(out_file.split('.')[0]+solver+'.pdf')
if __name__ == "__main__":
if len(sys.argv) < 5:
print "Usage: " + sys.argv[0] + " rtt_file paths_dir out_file rtt/hop"
rtt_file = sys.argv[1]
paths_dir = sys.argv[2]
out_file = sys.argv[3]
rtt_hop = sys.argv[4]
plot_latency(rtt_file, paths_dir, out_file, rtt_hop)
|
masom/doorbot-api-python
|
doorbot/views/dashboard/account.py
|
Python
|
mit
| 610 | 0.001639 |
from flask import Blueprint, render_template
from ...middlewares import auth_manager
from .middlewares import s
from ...container import container
account = Blueprint('account', __name__, url_prefix='/account')
def view():
account = container.account
return render_template('accounts/view.html', account=account)
def update():
|
account = container.account
return render_template('accounts/view.html', account=account)
account.add_url_rule(
'', 'view',
s(view),
methods=['GET']
)
account.add_url_rule(
'/edit
|
', 'update',
s(auth_manager),
methods=['GET', 'POST']
)
|
okolisny/integration_tests
|
cfme/tests/networks/test_sdn_crud.py
|
Python
|
gpl-2.0
| 1,613 | 0.00124 |
import pytest
from cfme.cloud.provider.azure import AzureProvider
from cfme.cloud.provider.ec2 import EC2Provider
from cfme.cloud.provider.openstack import OpenStackProvider
from cfme.networks.provider import NetworkProviderCollection
from cfme.utils import testgen
from cfme.utils.appliance.implementations.ui import navigate_to
pytest_generate_tests = testgen.generate(
classes=[EC2Provider, AzureProvider, OpenStackProvider], scope='module')
pytestmark = pytest.mark.usefixtures('setup_provider')
@pytest.mark.tier(1)
def test_sdn_crud(provider, appliance):
""" Test for functional addition of network manager with cloud provider
and functional references to components on detail page
Prerequisites: Cloud provider in cfme
|
"""
view = navigate_to(provider, 'Details')
net_prov_name = view.contents.relationships.get_text_of("Network Manage
|
r")
collection = NetworkProviderCollection(appliance)
network_provider = collection.instantiate(name=net_prov_name)
view = navigate_to(network_provider, 'Details')
parent_name = view.entities.relationships.get_text_of("Parent Cloud Provider")
assert parent_name == provider.name
testing_list = ["Cloud Networks", "Cloud Subnets", "Network Routers",
"Security Groups", "Floating IPs", "Network Ports", "Load Balancers"]
for testing_name in testing_list:
view = navigate_to(network_provider, 'Details')
view.entities.relationships.click_at(testing_name)
provider.delete_if_exists(cancel=False)
provider.wait_for_delete()
assert not network_provider.exists
|
sam-m888/gprime
|
gprime/filters/rules/media/_hasnotematchingsubstringof.py
|
Python
|
gpl-2.0
| 1,769 | 0.005653 |
#
# gPrime - A web-based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gprime modules
#
#-------------------------------------------------------------------------
from .._hasnotes
|
ubstrbase import HasNoteSubstrBase
#-------------------------------------------------------------------------
# "Media having notes that contain a substring"
#-------------------------------------------------------------------------
class HasNoteMatchingSubstringOf(HasNoteSubstrBase):
|
"""Media having notes containing <substring>"""
name = _('Media objects having notes containing <substring>')
description = _("Matches media objects whose notes contain text "
"matching a substring")
|
adrianholovaty/django
|
django/templatetags/tz.py
|
Python
|
bsd-3-clause
| 5,488 | 0 |
from datetime import datetime, tzinfo
try:
import pytz
except ImportError:
pytz = None
from django.template import Node
from django.template import TemplateSyntaxError, Library
from django.utils import timezone
register = Library()
# HACK: datetime is an old-style class, create a new-style equivalent
# so we can define additional attributes.
class datetimeobject(datetime, object):
pass
# Template filters
@register.filter
def localtime(value):
"""
Converts a datetime to local time in the active time zone.
This only makes sense within a {% localtime off %} block.
"""
return do_timezone(value, timezone.get_current_timezone())
@register.filter
def utc(value):
"""
Converts a datetime to UTC.
"""
return do_timezone(value, timezone.utc)
@register.filter('timezone')
def do_timezone(value, arg):
"""
Converts a datetime to local time in a given time zone.
The argument must be an instance of a tzinfo subclass or a time zone name.
If it is a time zone name, pytz is required.
Naive datetimes are assumed to be in local time in the default time zone.
"""
if not isinstance(value, datetime):
return ''
# Obtain a timezone-aware datetime
try:
if timezone.is_naive(value):
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
# Filters must never raise exceptions, and pytz' exceptions inherit
# Exception directly, not a specific subclass. So catch everything.
except Exception:
return ''
# Obtain a tzinfo instance
if isinstance(arg, tzinfo):
tz = arg
elif isinstance(arg, basestring) and pytz is not None:
try:
tz = pytz.timezone(arg)
except pytz.UnknownTimeZoneError:
return ''
else:
return ''
result = timezone.localtime(value, tz)
# HACK: the convert_to_local_time flag will prevent
# automatic conversion of the value to local time.
result = datetimeobject(result.year, result.month, result.day,
result.hour, result.minute, result.second,
result.microsecond, result.tzinfo)
result.convert_to_local_time = False
return result
# Template tags
class LocalTimeNode(Node):
"""
Template node class used by ``localtime_tag``.
"""
def __init__(self, nodelist, use_tz):
self.nodelist = nodelist
self.us
|
e_tz = use_tz
def render(self, context):
old_setting = context.use_tz
context.use
|
_tz = self.use_tz
output = self.nodelist.render(context)
context.use_tz = old_setting
return output
class TimezoneNode(Node):
"""
Template node class used by ``timezone_tag``.
"""
def __init__(self, nodelist, tz):
self.nodelist = nodelist
self.tz = tz
def render(self, context):
with timezone.override(self.tz.resolve(context)):
output = self.nodelist.render(context)
return output
class GetCurrentTimezoneNode(Node):
"""
Template node class used by ``get_current_timezone_tag``.
"""
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = timezone.get_current_timezone_name()
return ''
@register.tag('localtime')
def localtime_tag(parser, token):
"""
Forces or prevents conversion of datetime objects to local time,
regardless of the value of ``settings.USE_TZ``.
Sample usage::
{% localtime off %}{{ value_in_utc }}{% endlocaltime %}
"""
bits = token.split_contents()
if len(bits) == 1:
use_tz = True
elif len(bits) > 2 or bits[1] not in ('on', 'off'):
raise TemplateSyntaxError("%r argument should be 'on' or 'off'" %
bits[0])
else:
use_tz = bits[1] == 'on'
nodelist = parser.parse(('endlocaltime',))
parser.delete_first_token()
return LocalTimeNode(nodelist, use_tz)
@register.tag('timezone')
def timezone_tag(parser, token):
"""
Enables a given time zone just for this block.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If is it a time zone name, pytz is required.
If it is ``None``, the default time zone is used within the block.
Sample usage::
{% timezone "Europe/Paris" %}
It is {{ now }} in Paris.
{% endtimezone %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument (timezone)" %
bits[0])
tz = parser.compile_filter(bits[1])
nodelist = parser.parse(('endtimezone',))
parser.delete_first_token()
return TimezoneNode(nodelist, tz)
@register.tag("get_current_timezone")
def get_current_timezone_tag(parser, token):
"""
Stores the name of the current time zone in the context.
Usage::
{% get_current_timezone as TIME_ZONE %}
This will fetch the currently active time zone and put its name
into the ``TIME_ZONE`` context variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_timezone' requires "
"'as variable' (got %r)" % args)
return GetCurrentTimezoneNode(args[2])
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/wearables/armor/stormtrooper/shared_armor_stormtrooper_chest_plate.py
|
Python
|
mit
| 507 | 0.043393 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DO
|
CUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/wearables/armor/stormtrooper/shared_armor_stormtrooper_chest_plate.iff"
result.attribute_template_id = 0
result.stfName("wearables_name","armor_stormtrooper_chest_plate")
#### BEGIN MODIFICATIONS ####
#### E
|
ND MODIFICATIONS ####
return result
|
google/pigweed
|
pw_presubmit/py/presubmit_test.py
|
Python
|
apache-2.0
| 1,944 | 0 |
#!/usr/bin/env python3
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests for presubmit tools."""
import unittest
from pw_presubmit import presubmit
def _fake_function_1(_):
"""Fake presubmit function."""
def _fake_function_2(_):
"""Fake presubmit function."""
class ProgramsTest(unittest.TestCase):
"""Tests the presubmit Programs abstraction."""
def setUp(self):
self._programs = presubmit.Programs(
first=[_fake_function_1, (), [(_fake_function_2, )]],
second=[_fake_
|
function_2],
)
def test_empty(self):
self.assertEqual({}, presubmit.Programs())
def test_access_present_members(self):
self.assertEqual('first', self._programs['first'].name)
self.assertE
|
qual((_fake_function_1, _fake_function_2),
tuple(self._programs['first']))
self.assertEqual('second', self._programs['second'].name)
self.assertEqual((_fake_function_2, ), tuple(self._programs['second']))
def test_access_missing_member(self):
with self.assertRaises(KeyError):
_ = self._programs['not_there']
def test_all_steps(self):
self.assertEqual(
{
'_fake_function_1': _fake_function_1,
'_fake_function_2': _fake_function_2,
}, self._programs.all_steps())
if __name__ == '__main__':
unittest.main()
|
NAMD/pypln.backend
|
tests/test_worker_bigrams.py
|
Python
|
gpl-3.0
| 2,288 | 0.000874 |
# coding: utf-8
#
# Copyright 2012 NAMD-EMAP-FGV
#
# This file is part of PyPLN. You can get more information at: http://pypln.org/.
#
# PyPLN is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyPLN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyPLN. If not, see <http://www.gnu.org/licenses/>.
import nltk
from pypln.backend.workers.bigrams import Bigrams
from utils import TaskTest
bigram_measures = nltk.collocations.BigramAssocMeasures()
class TestBigramWorker(TaskTest):
def test_bigrams_should_return_correct
|
_score(self):
# We need this list comprehension because we need to save the word list
# in mongo (thus, it needs to be json serializable). Also, a list is
# what will be available to the worker in real situations.
|
tokens = [w for w in
nltk.corpus.genesis.words('english-web.txt')]
doc_id = self.collection.insert({'tokens': tokens}, w=1)
Bigrams().delay(doc_id)
refreshed_document = self.collection.find_one({'_id': doc_id})
bigram_rank = refreshed_document['bigram_rank']
result = bigram_rank[0][1][0]
# This is the value of the chi_sq measure for this bigram in this
# colocation
expected_chi_sq = 95.59393417173634
self.assertEqual(result, expected_chi_sq)
def test_bigrams_could_contain_dollar_signs_and_dots(self):
tokens = ['$', '.']
doc_id = self.collection.insert({'tokens': tokens}, w=1)
Bigrams().delay(doc_id)
refreshed_document = self.collection.find_one({'_id': doc_id})
bigram_rank = refreshed_document['bigram_rank']
result = bigram_rank[0][1][0]
# 2.0 is the value of the chi_sq measure for this bigram in this
# colocation
expected_chi_sq = 2.0
self.assertEqual(result, expected_chi_sq)
|
RedhawkSDR/framework-codegen
|
redhawk/codegen/jinja/python/ports/templates/generic.provides.py
|
Python
|
lgpl-3.0
| 1,314 | 0.005327 |
#{#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of REDHAWK core.
#
# REDHAWK core is free software: you can redistri
|
bute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# REDHAWK core is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#}
#% set className = portgen.className()
class ${className}(${component.baseclass.name}.${portgen.templateClass()}):
def __init__(self, parent, name):
self.parent = parent
self.name = name
self.sri = None
self.queue = Queue.Queue()
self.port_lock = threading.Lock()
#{% for operation in portgen.operations() %}
#{% set arglist = ['self'] + operation.args %}
def ${operation.name}(${arglist|join(', ')}):
# TODO:
pass
#{% endfor %}
|
LaurentClaessens/phystricks
|
testing/demonstration/exCircleThree.py
|
Python
|
gpl-3.0
| 652 | 0.015337 |
from phystricks import *
def exCircleThree():
pspict,fig = SinglePicture("exCircleThree")
circle = Circle(Point(0,0),1.5)
circle.angleI = 45
circle.angleF = 380
circle.wave(0.1,0.1)
circle.parameters.color = "green"
circleB = Circle(Point(0,0),1.5)
circleB.angleI = circle.angleF-360
circleB.angleF = circle.angleI
circleB.wave(circle.waviness.dx,circle.waviness.dy)
circleB.parameters.color = "red"
pspict.DrawGraphs(circle,circleB)
|
pspict.DrawDefaultAxes()
pspict.comment="A large green wavy part and a small red wavy part."
fig.no_figure()
fig.conclude()
fig.write_the_file()
|
|
AEDA-Solutions/matweb
|
backend/Models/Grau/RespostaEditar.py
|
Python
|
mit
| 179 | 0.039106 |
from Framework.Resposta import Res
|
posta
from Models.Grau.Grau import Grau as ModelGrau
class Re
|
spostaEditar(Resposta):
def __init__(self,mensagem):
self.corpo = mensagem
|
1flow/1flow
|
oneflow/core/admin/__init__.py
|
Python
|
agpl-3.0
| 2,818 | 0.00284 |
# -*- coding: utf-8 -*-
u"""
Copyright 2013-2014 Olivier Cortès <oc@1flow.io>.
This file is part of the 1flow project.
1flow is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
1flow is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with 1flow. If not, see http://www.gnu.org/licenses/
"""
from django.conf import settings
from django.contrib import admin
from ..models.reldb import (
# DjangoUser as User,
HistoryEn
|
try,
MailAccount,
Author,
Folder,
SyncNode,
NodePermissions,
)
if settings.FULL_ADMIN:
from helpcontent import HelpContent, HelpContentAdmin
admin.site.register(HelpConte
|
nt, HelpContentAdmin)
from website import WebSite, WebSiteAdmin
admin.site.register(WebSite, WebSiteAdmin)
admin.site.register(Author)
admin.site.register(Folder)
from processor import (
Processor, ProcessorAdmin,
ProcessingChain,
ChainedItem, ChainedItemAdmin,
ChainedItemParameter,
ProcessingError,
ProcessorCategory,
)
admin.site.register(Processor, ProcessorAdmin)
admin.site.register(ProcessingChain)
admin.site.register(ChainedItem, ChainedItemAdmin)
admin.site.register(ChainedItemParameter)
admin.site.register(ProcessingError)
admin.site.register(ProcessorCategory)
from language import Language, LanguageAdmin
admin.site.register(Language, LanguageAdmin)
admin.site.register(MailAccount)
# admin.site.register(CombinedFeedRule)
admin.site.register(HistoryEntry)
from userimport import UserImport, UserImportAdmin
admin.site.register(UserImport, UserImportAdmin)
admin.site.register(SyncNode)
admin.site.register(NodePermissions)
from feed import (
RssAtomFeed, RssAtomFeedAdmin,
MailFeed,
MailFeedRule, MailFeedRuleAdmin,
# CombinedFeed
# TwitterFeed
)
admin.site.register(RssAtomFeed, RssAtomFeedAdmin)
admin.site.register(MailFeed)
admin.site.register(MailFeedRule, MailFeedRuleAdmin)
# admin.site.register(CombinedFeed)
from subscription import Subscription, SubscriptionAdmin
admin.site.register(Subscription, SubscriptionAdmin)
from article import Article, ArticleAdmin, OriginalData
admin.site.register(Article, ArticleAdmin)
admin.site.register(OriginalData)
from read import Read, ReadAdmin
admin.site.register(Read, ReadAdmin)
from tag import Tag, TagAdmin
admin.site.register(Tag, TagAdmin)
# TODO: remove this when migration is finished
import mongo # NOQA
|
BlueDragonX/fm-dot
|
i3/lib/i3.py
|
Python
|
bsd-3-clause
| 4,205 | 0 |
"""
API for communicating with the i3 window manager.
"""
import json
import subprocess
class I3Msg(object):
"""Send messages to i3."""
def __init__(self, socket=None, msgbin=None):
"""
Initialize the messager.
@param socket The socket to connect to i3 via.
@param msgbin The path to i3-msg.
"""
if msgbin is None:
msgbin = "i3-msg"
self.cmd = (msgbin,)
if socket is not None:
self.cmd = self.cmd + ('s', socket)
def call(self, *args):
"""
Call i3msg and return the parsed output.
@param args The set of command line arguments to pass to i3-msg.
@return An object containing the command's response.
"""
data = subprocess.check_output(self.cmd + args)
if len(data) == 0:
return None
return json.loads(data.decode('utf-8'))
def command(self, *cmd):
"""
Return the result of an i3 command.
@param cmd A list representing the command to execute.
@return The output of the command.
"""
return self.call(*cmd)
def get_workspaces(self):
"""
Return a list of workspace objects.
@return Parsed output of i3-msg -t get_workspaces.
"""
return self.call('-t', 'get_workspaces')
def get_outputs(self):
"""
Return a list of output objects.
@return Parsed output of i3-msg -t get_outputs.
"""
return self.call('-t', 'get_outputs')
def get_tree(self):
"""
Return the i3 container tree.
@return Parsed output of i3-msg -t get_tree.
"""
return self.call('-t', 'get_tree')
def get_marks(self):
"""
Return a list of marks.
@return Parsed output of i3-msg -t get_marks.
"""
return self.call('-t', 'get_marks')
def get_bar_config(self, id=None):
"""
Return i3bar config with the given ID or all if no ID is provided.
@param id The ID of the bar to retrieve configuration for.
@return Parsed output of i3-msg -t get_bar_config [ID].
"""
args = ('-t', 'get_bar_config')
if id is not None:
args = args + (id,)
return self.call(*args)
def get_version(self):
"""
Return the version of i3 we're connected to.
@return Parsed output of i3-msg -t get_version.
"""
return self.call('-t', 'get_version')
class I3Runner(object):
"""Run a series of commands from a file stream."""
def __init__(self, i3msg=None):
"""
Initialize the runner.
@param i3msg The i3msg object. If None then a new object will be
created with the
|
default arguments.
"""
if i3msg is None:
i3msg = I3Msg()
self.i3 = i3msg
def commands(self, cmds, ignore=None):
"""
Run multiple of commands.
@param cmds An iterable containing commands to run.
@param ignore A regex used to ignore certain lines. Defaults to
|
None.
@return A list of results, one for each command.
"""
results = []
for cmd in cmds:
if len(cmd.strip()) == 0 or (
ignore is not None and ignore.match(cmd)):
results.append(None)
else:
results.append(self.i3.command(cmd))
return results
def loads(self, data, ignore=None):
"""
Load commands from a string.
@param data The string to process. Commands should be on individual
lines.
@param ignore A regex used to ignore certain lines. Defaults to None.
@return A list of results, one for each command.
"""
return self.commands(data.split("\n"))
def load(self, file, ignore=None):
"""
Load commands from a file-like object.
@param file A file-like object to read commands from.
@param ignore A regex used to ignore certain lines. Defaults to None.
@return A list of results, one for each command.
"""
return self.commands(file, ignore)
|
Aurora0000/descant
|
forums/migrations/0018_auto_20150518_1634.py
|
Python
|
mit
| 467 | 0.002141 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migra
|
tions
class Migration(migrations.Migration):
dependencies = [
('forums', '0017_auto_20150517_1552'),
]
operations = [
migrations.AlterField(
|
model_name='post',
name='reply_to',
field=models.ForeignKey(related_name='replies', blank=True, editable=False, to='forums.Post', null=True),
),
]
|
jimi-c/ansible
|
lib/ansible/modules/storage/netapp/sf_snapshot_schedule_manager.py
|
Python
|
gpl-3.0
| 13,004 | 0.002538 |
#!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: sf_snapshot_schedule_manager
short_description: Manage SolidFire snapshot schedules
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.3'
author: Sumit Kumar (sumit4@netapp.com)
description:
- Create, destroy, or update accounts on SolidFire
options:
state:
description:
- Whether the specified schedule should exist or not.
required: true
choices: ['present', 'absent']
paused:
description:
- Pause / Resume a schedule.
required: false
recurring:
description:
- Should the schedule recur?
required: false
time_interval_days:
description: Time interval in days.
required: false
default: 1
time_interval_hours:
description: Time interval in hours.
required: false
default: 0
time_interval_minutes:
description: Time interval in minutes.
required: false
default: 0
name:
description:
- Name for the snapshot schedule.
required: true
snapshot_name:
description:
- Name for the created snapshots.
required: false
volumes:
description:
|
- Volume IDs that you want to set the snapshot schedule for.
- At
|
least 1 volume ID is required for creating a new schedule.
- required when C(state=present)
required: false
retention:
description:
- Retention period for the snapshot.
- Format is 'HH:mm:ss'.
required: false
schedule_id:
description:
- The schedule ID for the schedule that you want to update or delete.
required: false
starting_date:
description:
- Starting date for the schedule.
- Required when C(state=present).
- Please use two '-' in the above format, or you may see an error- TypeError, is not JSON serializable description.
- "Format: C(2016--12--01T00:00:00Z)"
required: false
'''
EXAMPLES = """
- name: Create Snapshot schedule
sf_snapshot_schedule_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: present
name: Schedule_A
time_interval_days: 1
starting_date: 2016--12--01T00:00:00Z
volumes: 7
- name: Update Snapshot schedule
sf_snapshot_schedule_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: present
schedule_id: 6
recurring: True
snapshot_name: AnsibleSnapshots
- name: Delete Snapshot schedule
sf_snapshot_schedule_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: absent
schedule_id: 6
"""
RETURN = """
schedule_id:
description: Schedule ID of the newly created schedule
returned: success
type: string
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_SF_SDK = netapp_utils.has_sf_sdk()
class SolidFireSnapShotSchedule(object):
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(required=True, type='str'),
time_interval_days=dict(required=False, type='int', default=1),
time_interval_hours=dict(required=False, type='int', default=0),
time_interval_minutes=dict(required=False, type='int', default=0),
paused=dict(required=False, type='bool'),
recurring=dict(required=False, type='bool'),
starting_date=dict(type='str'),
snapshot_name=dict(required=False, type='str'),
volumes=dict(required=False, type='list'),
retention=dict(required=False, type='str'),
schedule_id=dict(type='int'),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['starting_date', 'volumes'])
],
supports_check_mode=True
)
p = self.module.params
# set up state variables
self.state = p['state']
self.name = p['name']
# self.interval = p['interval']
self.time_interval_days = p['time_interval_days']
self.time_interval_hours = p['time_interval_hours']
self.time_interval_minutes = p['time_interval_minutes']
self.paused = p['paused']
self.recurring = p['recurring']
self.starting_date = p['starting_date']
if self.starting_date is not None:
self.starting_date = self.starting_date.replace("--", "-")
self.snapshot_name = p['snapshot_name']
self.volumes = p['volumes']
self.retention = p['retention']
self.schedule_id = p['schedule_id']
self.create_schedule_result = None
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
def get_schedule(self):
schedule_list = self.sfe.list_schedules()
for schedule in schedule_list.schedules:
if schedule.name == self.name:
# Update self.schedule_id:
if self.schedule_id is not None:
if schedule.schedule_id == self.schedule_id:
return schedule
else:
self.schedule_id = schedule.schedule_id
return schedule
return None
def create_schedule(self):
try:
sched = netapp_utils.Schedule()
# if self.interval == 'time_interval':
sched.frequency = netapp_utils.TimeIntervalFrequency(days=self.time_interval_days,
hours=self.time_interval_hours,
minutes=self.time_interval_minutes)
# Create schedule
sched.name = self.name
sched.schedule_info = netapp_utils.ScheduleInfo(
volume_ids=self.volumes,
snapshot_name=self.snapshot_name,
retention=self.retention
)
sched.paused = self.paused
sched.recurring = self.recurring
sched.starting_date = self.starting_date
self.create_schedule_result = self.sfe.create_schedule(schedule=sched)
except Exception as e:
self.module.fail_json(msg='Error creating schedule %s: %s' % (self.name, to_native(e)),
exception=traceback.format_exc())
def delete_schedule(self):
try:
get_schedule_result = self.sfe.get_schedule(schedule_id=self.schedule_id)
sched = get_schedule_result.schedule
sched.to_be_deleted = True
self.sfe.modify_schedule(schedule=sched)
except Exception as e:
self.module.fail_json(msg='Error deleting schedule %s: %s' % (self.name, to_native(e)),
exception=traceback.format_exc())
def update_schedule(self):
try:
get_schedule_result = self.sfe.get_schedule(schedule_id=self.schedule_id)
sched = get_schedule_result.schedule
# Update schedule properties
|
tersmitten/ansible
|
lib/ansible/modules/remote_management/lxca/lxca_cmms.py
|
Python
|
gpl-3.0
| 4,442 | 0.001576 |
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'supported_by': 'community',
'status': ['preview']
}
DOCUMENTATION
|
= '''
---
version_added: "2.8"
author:
- Naval Patel (@navalkp)
- Prashant Bhosale (@prabhosa)
module: lxca_cmms
short_description: Custom module for lxca cmms invent
|
ory utility
description:
- This module returns/displays a inventory details of cmms
options:
uuid:
description:
uuid of device, this is string with length greater than 16.
command_options:
description:
options to filter nodes information
default: cmms
choices:
- cmms
- cmms_by_uuid
- cmms_by_chassis_uuid
chassis:
description:
uuid of chassis, this is string with length greater than 16.
extends_documentation_fragment:
- lxca_common
'''
EXAMPLES = '''
# get all cmms info
- name: get nodess data from LXCA
lxca_cmms:
login_user: USERID
login_password: Password
auth_url: "https://10.243.15.168"
# get specific cmms info by uuid
- name: get nodes data from LXCA
lxca_cmms:
login_user: USERID
login_password: Password
auth_url: "https://10.243.15.168"
uuid: "3C737AA5E31640CE949B10C129A8B01F"
command_options: cmms_by_uuid
# get specific cmms info by chassis uuid
- name: get nodes data from LXCA
lxca_cmms:
login_user: USERID
login_password: Password
auth_url: "https://10.243.15.168"
chassis: "3C737AA5E31640CE949B10C129A8B01F"
command_options: cmms_by_chassis_uuid
'''
RETURN = r'''
result:
description: cmms detail from lxca
returned: success
type: dict
sample:
cmmList:
- machineType: ''
model: ''
type: 'CMM'
uuid: '118D2C88C8FD11E4947B6EAE8B4BDCDF'
# bunch of properties
- machineType: ''
model: ''
type: 'CMM'
uuid: '223D2C88C8FD11E4947B6EAE8B4BDCDF'
# bunch of properties
# Multiple cmms details
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.lxca.common import LXCA_COMMON_ARGS, has_pylxca, connection_object
try:
from pylxca import cmms
except ImportError:
pass
UUID_REQUIRED = 'UUID of device is required for cmms_by_uuid command.'
CHASSIS_UUID_REQUIRED = 'UUID of chassis is required for cmms_by_chassis_uuid command.'
SUCCESS_MSG = "Success %s result"
def _cmms(module, lxca_con):
return cmms(lxca_con)
def _cmms_by_uuid(module, lxca_con):
if not module.params['uuid']:
module.fail_json(msg=UUID_REQUIRED)
return cmms(lxca_con, module.params['uuid'])
def _cmms_by_chassis_uuid(module, lxca_con):
if not module.params['chassis']:
module.fail_json(msg=CHASSIS_UUID_REQUIRED)
return cmms(lxca_con, chassis=module.params['chassis'])
def setup_module_object():
"""
this function merge argument spec and create ansible module object
:return:
"""
args_spec = dict(LXCA_COMMON_ARGS)
args_spec.update(INPUT_ARG_SPEC)
module = AnsibleModule(argument_spec=args_spec, supports_check_mode=False)
return module
FUNC_DICT = {
'cmms': _cmms,
'cmms_by_uuid': _cmms_by_uuid,
'cmms_by_chassis_uuid': _cmms_by_chassis_uuid,
}
INPUT_ARG_SPEC = dict(
command_options=dict(default='cmms', choices=['cmms', 'cmms_by_uuid',
'cmms_by_chassis_uuid']),
uuid=dict(default=None),
chassis=dict(default=None)
)
def execute_module(module):
"""
This function invoke commands
:param module: Ansible module object
"""
try:
with connection_object(module) as lxca_con:
result = FUNC_DICT[module.params['command_options']](module, lxca_con)
module.exit_json(changed=False,
msg=SUCCESS_MSG % module.params['command_options'],
result=result)
except Exception as exception:
error_msg = '; '.join((e) for e in exception.args)
module.fail_json(msg=error_msg, exception=traceback.format_exc())
def main():
module = setup_module_object()
has_pylxca(module)
execute_module(module)
if __name__ == '__main__':
main()
|
philgyford/django-spectator
|
spectator/events/migrations/0042_auto_20200407_1039.py
|
Python
|
mit
| 1,302 | 0 |
# Generated by Django 3.0.5 on 2020-04-07 10:39
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("spectator_events", "0041_event_ticket"),
]
operations = [
migrations.AlterField(
model_name="venue",
name="cinema_treasures_id",
field=models.PositiveIntegerField(
|
blank=True,
help_text='Optional. ID of a cinema at\n<a href="http://cinematreasures.org/">Cinema Treasures</a>.', # noqa: E501
null=True,
),
),
migrations.AlterField(
model_name="w
|
ork",
name="imdb_id",
field=models.CharField(
blank=True,
help_text="Starts with 'tt', e.g. 'tt0100842'.\nFrom <a href=\"https://www.imdb.com\">IMDb</a>.", # noqa: E501
max_length=12,
validators=[
django.core.validators.RegexValidator(
code="invalid_imdb_id",
message='IMDb ID should be like "tt1234567"',
regex="^tt\\d{7,10}$",
)
],
verbose_name="IMDb ID",
),
),
]
|
Tianyi94/EC601Project_Somatic-Parkour-Game-based-on-OpenCV
|
Old Code/ControlPart/Create_pos&neg.py
|
Python
|
mit
| 444 | 0.006757 |
def create_pos_n_neg():
for file_type in ['neg']:
for img in os.listdir(file_type):
if file_type
|
== 'pos':
|
line = file_type+'/'+img+' 1 0 0 50 50\n'
with open('info.dat','a') as f:
f.write(line)
elif file_type == 'neg':
line = file_type+'/'+img+'\n'
with open('bg.txt','a') as f:
f.write(line)
|
googleapis/python-policy-troubleshooter
|
google/cloud/policytroubleshooter_v1/services/iam_checker/async_client.py
|
Python
|
apache-2.0
| 11,763 | 0.00204 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.policytroubleshooter_v1.types import checker
from google.cloud.policytroubleshooter_v1.types import explanations
from .transports.base import IamCheckerTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import IamCheckerGrpcAsyncIOTransport
from .client import IamCheckerClient
class IamCheckerAsyncClient:
"""IAM Policy Troubleshooter service.
This service helps you troubleshoot access issues for Google
Cloud resources.
"""
_client: IamCheckerClient
DEFAULT_ENDPOINT = IamCheckerClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = IamCheckerClient.DEFAULT_MTLS_ENDPOINT
common_billing_account_path = staticmethod(
IamCheckerClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
IamCheckerClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(IamCheckerClient.common_folder_path)
parse_common_folder_path = staticmethod(IamCheckerClient.parse_common_folder_path)
common_organization_path = staticmethod(IamCheckerClient.common_organization_path)
parse_common_organization_path = staticmethod(
IamCheckerClient.parse_common_organization_path
)
common_project_path = staticmethod(IamCheckerClient.common_project_path)
parse_common_project_path = staticmethod(IamCheckerClient.parse_common_project_path)
common_location_path = staticmethod(IamCheckerClient.common_location_path)
parse_common_location_path = staticmethod(
IamCheckerClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
IamCheckerAsyncClient: The constructed client.
"""
return IamCheckerClient.from_service_account_info.__func__(IamCheckerAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
IamCheckerAsyncClient: The constructed client.
"""
return IamCheckerClient.from_service_account_file.__func__(IamCheckerAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return IamCheckerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> IamCheckerTransport:
"""Returns the transport used by the client instance.
Returns:
IamCheckerTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(IamCheckerClient).get_transport_class, type(IamCheckerClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, IamCheckerTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the iam checker client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.IamCheckerTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default
|
regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the `
|
`client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
|
imbstack/pasquino
|
migrations/env.py
|
Python
|
mit
| 2,643 | 0.001135 |
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
from pasquino import app
target_metadata = app.db.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.readthedocs.org/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_fr
|
om_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
process_re
|
vision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
|
zaffra/Inquire
|
GAE/api.py
|
Python
|
bsd-3-clause
| 17,740 | 0.003439 |
##
# api.py
#
# This file is the workhorse for the the entire web application.
# It implements and provides the API required for the iOS portion
# of the project as well as interacting with Google's datastore
# for persistent storage of our models.
##
# for sending mail
from google.appengine.api import mail
# Used in conjunction with the geomodel library for doing
# proximity based searches
from google.appengine.ext.db import GeoPt
from geo import geotypes
# HttpResponse is what all Django-based views must return
# to render the output. In our web application the
# _json* methods build and return HttpResponse objects
# for rendering JSON dat
from django.http import HttpResponse
# For encoding Python objects into JSON strings
from django.utils import simplejson
# Our datastore models
from model import *
# For handling user sessions
from appengine_utilities.sessions import Session
# Provides the sha1 module we use for hashing passwords
import hashlib
# The Python loggin module. We use the basicConfig method
# to setup to log to the console (or GoogleAppEngineLauncher
# logs screen)
import logging
logging.basicConfig(level=logging.DEBUG)
##
# CONSTANTS
##
"""
The email address to send from. See the Notes section of the README
for more information on what to set this to.
"""
SENDER_EMAIL_ADDRESS = "VALID@APPENGINE_ADDRESS.COM"
##
# UTILITY METHODS
##
def _hash_password(password):
"""
Returns a sha1-hashed version of the given plaintext password.
"""
return hashlib.sha1(password).hexdigest()
def _json_response(success=True, msg="OK", **kwargs):
"""
Helper method to build an HTTPResponse with a stock
JSON object.
@param success=True: indicates success or failure of the API method
@param msg: string with details on success or failure
@kwargs: any number of key/value pairs to be sent with the JSON object
"""
# build up the response data and convert it to a string using the
# simplejson module
response_data = dict(success=success, msg=msg)
response_data.update(kwargs)
response_string = simplejson.dumps(response_data)
# All views must return a valid HttpResponse object so build it and
# set the JSON string and mimetype indicating that the result is
# JSON
return HttpResponse(response_string, mimetype="application/json")
def _json_unauthorized_response(**kwargs):
"""
Helper method to build an HTTPResponse with a stock JSON object
that represents unauthorized access to an API method.
NOTE: Always returns success=false and msg="Unauthorized"
@kwargs: any number of key/value pairs to be sent with the JSON object
"""
# Same process as _json_response method, accept always return false and
# an Unauthorized message with a status code of 401
response_data = dict(success=False, msg="Unauthorized")
response_data.update(kwargs)
response_string = simplejson.dumps(response_data)
return HttpResponse(response_string, status=401, mimetype="application/json")
##
# DECORATORS
#
# For more information about decorators in Python see:
#
# http://www.python.org/dev/peps/pep-0318/
# http://wiki.python.org/moin/PythonDecorators
# http://www.ibm.com/developerworks/linux/library/l-cpdecor.html
# Google...
##
# Usage: @validate_request(method, p1, p2, ...)
def validate_request(method, *params):
"""
Decorator for validating the required request method for an API call as
well as enforcing any required parameters in the request. If either the
method or parameter checks fail a stock failure JSON object is returned
with the exact issue in the msg field. If all checks pass then the
API call proceeds.
"""
def _dec(view_func):
def _view(request, *args, **kwargs):
# check the required method
if request.method == method:
# check that each parameter exists and has a value
for param in params:
value = request.REQUEST.get(param, "")
if not value:
# failed parameter check
return _json_response(success=False,
msg="'%s' is required." % param)
# return the original API call through
return view_func(request, *args, **kwargs)
else:
# failed method check
return _json_response(success=False,
msg="%s requests are not allowed." % request.method)
return _view
return _dec
# Usage: @validate_session()
def validate_session():
"""
Decorator for validating that a user is authenticated by checking the
session for a user object. If this fails the stock json_unauthorized_response
is returned or else the API call is allowed to proceed.
"""
def _dec(view_func):
def _view(request, *args, **kwargs):
# get the session and check for a user, fail if it doesn't exist
if Session().get("user") is None:
# failed request
return _json_unauthorized_response()
# return the original API call through
return view_func(request, *args, **kwargs)
return _view
return _dec
##
# API METHODS
##
@validate_session()
@validate_request("POST", "question", "latitude", "longitude", "pay_key")
def ask(request):
"""
API Method - /ask
Creates a new Question and adds it to the datastore
@method POST
@param question: the text of the question
@param latitude: latitude of the location
@param longitude: longitude of the location
@param pay_key: the pay key from a successful PayPal purchase
@returns stock success or failure JSON response along with
the question and user objects.
"""
# authenticated user
user = Session().get("user")
# required parameters
question = request.REQUEST.get("question")
latitude = float(request.REQUEST.get("latitude"))
longitude = float(request.REQUEST.get("longitude"))
pay_key = request.REQUEST.get("pay_key")
# Using the PayKey you could validate it using PayPal APIs
# to confirm that a user paid and the transaction is complete.
# This is left up to the curious coder to implement :)
# Create the question with the required fields and tie it
# to the authenticated user
q = Question(question=question,
location=GeoPt(latitude, longitude),
user=user)
q.update_location()
q.put()
# return stock JSON with the Question object details
return _json_response(question=q.to_json(), user=user.to_json())
@validate_session()
@validate_request("POST", "question_id", "answer")
def answer(request):
"""
API Method - /answer
Creates a new Answer object and adds it to the datastore. Validates
that the question exists and does not have an accepted answer before
accepting the answer.
This method also takes care of sending the owner of the question
an email saying a new answer has been given with the answer in the
body of the message.
@method POST
@param
|
question_id: id of an existing question
@param answer: the text for the answe
|
r to a question
@returns one answer object
"""
# session and authenticated user
user = Session().get("user")
# required parameters
question_id = int(request.REQUEST.get("question_id"))
answer = request.REQUEST.get("answer")
# find the question associated with the question_id parameter
question = Question.get_by_id(question_id)
# no question with the given id
if question is None:
return _json_response(success=False, msg="Question does not exist.")
# question has already been answered
if question.closed:
return _json_response(success=False, msg="Question has an accepted answer and is now closed.")
# create a new answer and save it to the datastore
a = Answer(user=user,
question=question,
answer=answer)
a.put()
# send an email to the owner of the questio
|
annayqho/TheCannon
|
code/apokasc_lamost/kepler_apogee_lamost_overlap.py
|
Python
|
mit
| 2,584 | 0.011223 |
import numpy as np
import os
# APOGEE-APOKASC overlap
inputf = "/home/annaho/TheCannon/examples/example_apokasc/apokasc_DR12_overlap.npz"
apogee_apokasc = np.load(inputf)['arr_0']
# APOGEE-LAMOST overlap
inputf = "/home/annaho/TheCannon/examples/example_DR12/Data"
apogee_lamost = np.array(os.listdir(inputf))
# APOGEE-APOKASC-LAMOST
overlap = np.intersect1d(apogee_lamost, apogee_apokasc) # 530 stars
apogee_key = np.loadtxt("apogee_sorted_by_ra.txt", dtype=str)
lamost_key = np.loadtxt("lamost_sorted_by_ra.txt", dtype=str)
inds = np.array([np.where(apogee_key==a)[0][0] for a in overlap])
overlap_lamost = lamost_key[inds]
np.savez("apogee_apokasc_lamost_overlap.npz", overlap)
# get all APOGEE parameters
label_file = "apogee_dr12_labels.csv"
apogee_id_all = np.loadtxt(label_file, usecols=(1,), delimiter=',', dtype=str)
apogee_labe
|
ls_all = np.loadtxt(
label_file, usecols=(2,3,4,5), delimiter=',', dtype=float)
inds = np.array([np.where(apogee_id_all==a)[0][0] for a in overlap])
apogee_id = apogee_id_all[inds]
apogee_labels = apogee_labels_all[inds,:]
# get all APOKASC parameters
apokasc_id_all = np.load("example_apokasc/apokasc_DR12_overlap.npz")['arr_0']
apokasc_labels_all = np.load("exam
|
ple_apokasc/tr_label.npz")['arr_0']
inds = np.array([np.where(apokasc_id_all==a)[0][0] for a in overlap])
apokasc_id = apokasc_id_all[inds]
apokasc_labels = apokasc_labels_all[inds]
# get all LAMOST parameters
inputf = "/home/annaho/TheCannon/examples/test_training_overlap/lamost_sorted_by_ra_with_dr2_params.txt"
lamost_id_all = np.loadtxt(inputf, usecols=(0,), dtype=str)
lamost_labels_all = np.loadtxt(inputf, usecols=(3,4,5), dtype=float)
inds = np.array([np.where(lamost_id_all==a)[0][0] for a in overlap_lamost])
lamost_id = lamost_id_all[inds]
lamost_labels = lamost_labels_all[inds]
# plot them against each other
from matplotlib import rc
rc('font', family='serif')
rc('text', usetex=True)
names = [r"$T_{eff}$", r"$\log g$", r"$[Fe/H]$", r"$[\alpha/Fe]$"]
def plot(ax, x, y, i):
ax.scatter(x[:,i], y[:,i], c='k')
xlim = ax.get_xlim()
ylim = ax.get_ylim()
ax.plot([-10000,10000],[-10000,10000], c='r')
ax.set_xlim(xlim)
ax.set_ylim(ylim)
ax.set_title(names[i])
x = apokasc_labels
y = lamost_labels
fig,axarr = subplots(2,2)
ax = axarr[0,0]
plot(ax, x, y, 0)
ax = axarr[0,1]
plot(ax, x, y, 1)
ax = axarr[1,0]
plot(ax, x, y, 2)
#ax = axarr[1,1]
#plot(ax, x, y, 3)
fig.text(0.5,0.01, "Kepler APOKASC", ha='center', va='bottom', fontsize=18)
fig.text(0.01, 0.5, "LAMOST", ha='left', va='center', rotation=90, fontsize=18)
|
rcbops/opencenter
|
tests/test_expressions.py
|
Python
|
apache-2.0
| 11,034 | 0 |
#
# OpenCenter(TM) is Copyright 2013 by Rackspace US, Inc.
##############################################################################
#
# OpenCenter is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. This
# version of OpenCenter includes Rackspace trademarks and logos, and in
# accordance with Section 6 of the License, the provision of commercial
# support services in conjunction with a version of OpenCenter which includes
# Rackspace trademarks and logos is prohibited. OpenCenter source code and
# details are available at: # https://github.com/rcbops/opencenter or upon
# written request.
#
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 and a copy, including this
# notice, is available in the LICENSE file accompanying this software.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the # specific language governing permissions and limitations
# under the License.
#
##############################################################################
from util import OpenCenterTestCase
import opencenter.db.api as db_api
from opencenter.webapp import ast
api = db_api.api_from_models()
class ExpressionTestCase(OpenCenterTestCase):
def setUp(self):
self.nodes = {}
|
self.interfaces = {}
self.nodes['node-1'] = self._model_create('nodes', name='node-1')
self.interf
|
aces['chef'] = self._model_create('filters', name='chef',
filter_type='interface',
expr='facts.x = true')
self.nodes['container'] = self._model_create('nodes', name='container')
def tearDown(self):
self._clean_all()
def _run_expression(self, node, expression, ns={}):
builder = ast.FilterBuilder(ast.FilterTokenizer(), expression,
api=api)
root_node = builder.build()
return root_node.eval_node(node, symbol_table=ns)
def _simple_expression(self, expression):
node = self._model_get_by_id('nodes', self.nodes['node-1']['id'])
return self._run_expression(node,
'nodes: %s' % expression)
def _invert_expression(self, expression, ns={}):
builder = ast.FilterBuilder(ast.FilterTokenizer(), expression)
root_node = builder.build()
return root_node.invert()
def _eval_expression(self, expression, node_id, ns={}):
ephemeral_api = db_api.ephemeral_api_from_api(api)
builder = ast.FilterBuilder(ast.FilterTokenizer(), expression,
api=ephemeral_api)
node = ephemeral_api._model_get_by_id('nodes', node_id)
builder.eval_node(node, symbol_table=ns)
new_node = ephemeral_api._model_get_by_id('nodes', node_id)
return new_node
def test_bad_interface(self):
expression = "ifcount('blahblah') > 0"
self.assertRaises(SyntaxError, self._run_expression,
self.nodes['node-1'], expression)
def test_zero_ifcount(self):
expression = "ifcount('chef') > 0"
result = self._run_expression(self.nodes['node-1'], expression)
self.logger.debug('Got result: %s' % result)
self.assertEquals(result, False)
def test_valid_ifcount(self):
expression = "ifcount('chef') > 0"
self._model_create('facts', node_id=self.nodes['node-1']['id'],
key='x', value=True)
result = self._run_expression(self.nodes['node-1'], expression)
self.logger.debug('Got result: %s' % result)
self.assertEquals(result, True)
def test_invert_equals(self):
expression = "facts.test = 'test'"
result = self._invert_expression(expression)
self.assertEquals(result, ["facts.test := 'test'"])
def test_invert_and(self):
expression = "facts.test='test' and facts.x='x'"
result = self._invert_expression(expression)
self.assertTrue("facts.test := 'test'" in result)
self.assertTrue("facts.x := 'x'" in result)
def test_invert_in(self):
expression = "'test' in facts.foo"
result = self._invert_expression(expression)
self.assertTrue("facts.foo := union(facts.foo, 'test')" in result)
self.assertEquals(len(result), 1)
def test_invert_not_in(self):
expression = "'test' !in facts.foo"
result = self._invert_expression(expression)
self.assertTrue("facts.foo := remove(facts.foo, 'test')" in result)
self.assertEquals(len(result), 1)
def test_eval_assign(self):
node_id = self.nodes['node-1']['id']
expression = "facts.parent_id := %d" % int(
self.nodes['container']['id'])
node = self._eval_expression(expression, node_id)
self.assertEquals(node['facts'].get('parent_id', None),
self.nodes['container']['id'])
def test_eval_union(self):
node_id = self.nodes['node-1']['id']
expression = "facts.woof := union(facts.woof, 3)"
node = self._eval_expression(expression, node_id)
self.assertEquals(node['facts']['woof'], [3])
def test_eval_remove(self):
node_id = self.nodes['node-1']['id']
fact = self._model_create('facts', node_id=node_id,
key='array_fact', value=[1, 2])
expression = 'facts.array_fact := remove(facts.array_fact, 2)'
node = self._eval_expression(expression, node_id)
self.assertEquals(node['facts']['array_fact'], [1])
# verify removing from none returns none. This is perhaps
# questionable, but is inline with the rest of the none/empty
# behavior. It could probably also return [], but enforce
# current behavior
self._model_delete('facts', fact['id'])
expression = 'facts.array_fact := remove(facts.array_fact, "test")'
node = self._eval_expression(expression, node_id)
self.assertEquals(node['facts']['array_fact'], None)
# verify removing from a non-list raises SyntaxError
self._model_create('facts', node_id=node_id,
key='array_fact', value='non-array')
expression = 'facts.array_fact := remove(facts.array_fact, "whoops")'
self.assertRaises(SyntaxError, self._eval_expression,
expression, node_id)
def test_eval_namespaces(self):
node_id = self.nodes['node-1']['id']
expression = "facts.parent_id := value"
ns = {"value": self.nodes['container']['id']}
node = self._eval_expression(expression, node_id, ns)
self.assertEquals(node['facts'].get('parent_id', None),
self.nodes['container']['id'])
# test the inverter and regularizer functions
def test_regularize_expression(self):
expression = 'foo=value'
regular = ast.regularize_expression(expression)
self.logger.debug('Got regularized expression "%s" for "%s"' %
(regular, expression))
self.assertEquals(regular, 'foo = value')
def test_inverted_expression(self):
expression = 'foo=value'
inverted = ast.invert_expression(expression)
self.logger.debug('Got inverted expression "%s" for "%s"' %
(inverted, expression))
self.assertEquals(len(inverted), 1)
self.assertEquals(inverted[0], 'foo := value')
def test_inverted_union(self):
expression = 'facts.test := union(facts.test, test)'
inverted = ast.invert_expression(expression)
self.logger.debug('Got inverted expression "%s" for "%s"' %
(inverted, expression))
self.assertEquals(len(inverted), 1)
self.assertEquals(inverted[0], 'test in facts.test')
def test_inverted_remove(self):
|
mdhaber/scipy
|
scipy/fftpack/tests/test_real_transforms.py
|
Python
|
bsd-3-clause
| 23,941 | 0.000877 |
from os.path import join, dirname
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_equal
import pytest
from pytest import raises as assert_raises
from scipy.fftpack._realtransforms import (
dct, idct, dst, idst, dctn, idctn, dstn, idstn)
# Matlab reference data
MDATA = np.load(join(dirname(__file__), 'test.npz'))
X = [MDATA['x%d' % i] for i in range(8)]
Y = [MDATA['y%d' % i] for i in range(8)]
# FFTW reference data: the data are organized as follows:
# * SIZES is an array containing all available sizes
# * for every type (1, 2, 3, 4) and every size, the array dct_type_size
# contains the output of the DCT applied to the input np.linspace(0, size-1,
# size)
FFTWDATA_DOUBLE = np.load(join(dirname(__file__), 'fftw_double_ref.npz'))
FFTWDATA_SINGLE = np.load(join(dirname(__file__), 'fftw_single_ref.npz'))
FFTWDATA_SIZES = FFTWDATA_DOUBLE['sizes']
def fftw_dct_ref(type, size, dt):
x = np.linspace(0, size-1, size).astype(dt)
dt = np.result_type(np.float32, dt)
if dt == np.double:
data = FFTWDATA_DOUBLE
elif dt == np.float32:
data = FFTWDATA_SINGLE
else:
raise ValueError()
y = (data['dct_%d_%d' % (type, size)]).astype(dt)
return x, y, dt
def fftw_dst_ref(type, size, dt):
x = np.linspace(0, size-1, size).astype(dt)
dt = np.result_type(np.float32, dt)
if dt == np.double:
data = FFTWDATA_DOUBLE
elif dt == np.float32:
data = FFTWDATA_SINGLE
else:
raise ValueError()
y = (data['dst_%d_%d' % (type, size)]).astype(dt)
return x, y, dt
def dct_2d_ref(x, **kwargs):
"""Calculate reference values for testing dct2."""
x = np.array(x, copy=True)
for row in range(x.shape[0]):
x[row, :] = dct(x[row, :], **kwargs)
for col in range(x.shape[1]):
x[:, col] = dct(x[:, col], **kwargs)
return x
def idct_2d_ref(x, **kwargs):
"""Calculate reference values for testing idct2."""
x = np.array(x, copy=True)
for row in range(x.shape[0]):
x[row, :] = idct(x[row, :], **kwargs)
for col in range(x.shape[1]):
x[:, col] = idct(x[:, col], **kwargs)
return x
def dst_2d_ref(x, **kwargs):
"""Calculate reference values for testing dst2."""
x = np.array(x, copy=True)
for row in range(x.shape[0]):
x[row, :] = dst(x[row, :], **kwargs)
for col in range(x.shape[1]):
x[:, col] = dst(x[:, col], **kwargs)
return x
def idst_2d_ref(x, **kwargs):
"""Calculate reference values for testing idst2."""
x = np.array(x, copy=True)
for row in range(x.shape[0]):
x[row, :] = idst(x[row, :], **kwargs)
for col in range(x.shape[1]):
x[:, col] = idst(x[:, col], **kwargs)
return x
def naive_dct1(x, norm=None):
"""Calculate textbook definition version of DCT-I."""
x = np.array(x, copy=True)
N = len(x)
M = N-1
y = np.zeros(N)
m0, m = 1, 2
if norm == 'ortho':
m0 = np.sqrt(1.0/M)
m = np.sqrt(2.0/M)
for k in range(N):
for n in range(1, N-1):
y[k] += m*x[n]*np.cos(np.pi*n*k/M)
y[k] += m0 * x[0]
y[k] += m0 * x[N-1] * (1 if k % 2 == 0 else -1)
if norm == 'ortho':
y[0] *= 1/np.sqrt(2)
y[N-1] *= 1/np.sqrt(2)
return y
def naive_dst1(x, norm=None):
"""Calculate textbook definition version of DST-I."""
x = np.array(x, copy=True)
N = len(x)
M = N+1
y = np.zeros(N)
for k in range(N):
for n in range(N):
y[k] += 2*x[n]*np.sin(np.pi*(n+1.0)*(k+1.0)/M)
if norm == 'ortho':
y *= np.sqrt(0.5/M)
return y
def naive_dct4(x, norm=None):
"""Calculate textbook definition version of DCT-IV."""
x = np.array(x, copy=True)
N = len(x)
y = np.zeros(N)
for k in range(N):
for n in range(N):
y[k] += x[n]*np.cos(np.pi*(n+0.5)*(k+0.5)/(N))
if norm == 'ortho':
y *= np.sqrt(2.0/N)
else:
y *= 2
return y
def naive_dst4(x, norm=None):
"""Calculate textbook definition version of DST-IV."""
x = np.array(x, copy=True)
N = len(x)
y = np.zeros(N)
for k in range(N):
for n in range(N):
y[k] += x[n]*np.sin(np.pi*(n+0.5)*(k+0.5)/(N))
if norm == 'ortho':
y *= np.sqrt(2.0/N)
else:
y *= 2
return y
class TestComplex:
def test_dct_complex64(self):
y = dct(1j*np.arange(5, dtype=np.complex64))
x = 1j*dct(np.arange(5))
assert_array_almost_equal(x, y)
def test_dct_complex(self):
y = dct(np.arange(5)*1j)
x = 1j*dct(np.arange(5))
assert_array_almost_equal(x, y)
def test_idct_complex(self):
y = idct(np.arange(5)*1j)
x = 1j*idct(np.arange(5))
assert_array_almost_equal(x, y)
def test_dst_complex64(self):
y = dst(np.arange(5, dtype=np.complex64)*1j)
x = 1j*dst(np.arange(5))
assert_array_almost_equal(x, y)
def test_dst_complex(self):
y = dst(np.arange(5)*1j)
x = 1j*dst(np.arange(5))
assert_array_almost_equal(x, y)
def test_idst_complex(self):
y = idst(np.arange(5)*1j)
x = 1j*idst(np.arange(5))
assert_array_almost_equal(x, y)
class _TestDCTBase:
def setup_method(self):
self.rdt = None
self.dec = 14
self.type = None
def test_definition(self):
for i in FFTWDATA_SIZES:
x, yr, dt = fftw_dct_ref(self.type, i, self.rdt)
y = dct(x, type=self.type)
assert_equal(y.dtype, dt)
# XXX: we divide by np.max(y) because the tests fail otherwise. We
# should really use something like assert_array_approx_equal. The
# difference is due to fftw using a better algorithm w.r.t error
# propagation compared to the ones from fftpack.
assert_array_almost_equal(y / np.max(y), yr / np.max(y), decimal=self.dec,
err_msg="Size %d failed" % i)
def test_axis(self):
nt = 2
for i in [7, 8, 9, 16, 32, 64]:
x = np.random.randn(nt, i)
y = dct(x, type=self.type)
for j in range(nt):
assert_array_almost_eq
|
ual(y[j], dct(x[j], type=self.type),
decimal=self.dec)
x = x.T
y = dct(x, axis=0, type=self.type)
for j in range(nt):
assert_array_almost_equal(y[:,j], dct(x[:,j], type=self.type),
decimal=self.dec)
class _TestDCTIBase(_TestDCTBase):
def test_definition_ortho(self):
# Test orthornomal mode.
dt = np.result_type(np.floa
|
t32, self.rdt)
for xr in X:
x = np.array(xr, dtype=self.rdt)
y = dct(x, norm='ortho', type=1)
y2 = naive_dct1(x, norm='ortho')
assert_equal(y.dtype, dt)
assert_array_almost_equal(y / np.max(y), y2 / np.max(y), decimal=self.dec)
class _TestDCTIIBase(_TestDCTBase):
def test_definition_matlab(self):
# Test correspondence with MATLAB (orthornomal mode).
dt = np.result_type(np.float32, self.rdt)
for xr, yr in zip(X, Y):
x = np.array(xr, dtype=dt)
y = dct(x, norm="ortho", type=2)
assert_equal(y.dtype, dt)
assert_array_almost_equal(y, yr, decimal=self.dec)
class _TestDCTIIIBase(_TestDCTBase):
def test_definition_ortho(self):
# Test orthornomal mode.
dt = np.result_type(np.float32, self.rdt)
for xr in X:
x = np.array(xr, dtype=self.rdt)
y = dct(x, norm='ortho', type=2)
xi = dct(y, norm="ortho", type=3)
assert_equal(xi.dtype, dt)
assert_array_almost_equal(xi, x, decimal=self.dec)
class _TestDCTIVBase(_TestDCTBase):
def test_definition_ortho(self):
# Test orthornomal mode.
dt = np.result_type(np.float32, self.rdt)
for xr in X:
x = np.array(xr, dtype=self.rdt)
y = dct(x, norm='ortho', type=4)
y2 = naive_dct4(x, norm='ortho')
assert_equal(y.dtype, dt)
assert
|
Micronaet/micronaet-mx8
|
sale_box_volume/__openerp__.py
|
Python
|
agpl-3.0
| 1,492 | 0.002011 |
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is fre
|
e software: you can redistribute it and/or modify
|
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
'name': 'Volume box for DDT',
'version': '0.1',
'category': 'Account',
'description': '''
Add volume box for DDT volume info
''',
'author': 'Micronaet S.r.l. - Nicola Riolini',
'website': 'http://www.micronaet.it',
'license': 'AGPL-3',
'depends': [
'base',
'l10n_it_ddt',
],
'init_xml': [],
'demo': [],
'data': [
'security/ir.model.access.csv',
'volume_view.xml',
],
'active': False,
'installable': True,
'auto_install': False,
}
|
MicroTrustRepos/microkernel
|
src/l4/pkg/python/contrib/Doc/includes/tzinfo-examples.py
|
Python
|
gpl-2.0
| 5,063 | 0.003555 |
from datetime import tzinfo, timedelta, datetime
ZERO = timedelta(0)
HOUR = timedelta(hours=1)
# A UTC class.
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
utc = UTC()
# A class building tzinfo objects for fixed-offset time zones.
# Note that FixedOffset(0, "UTC") is a different way to build a
# UTC tzinfo object.
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = timedelta(minutes = offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
# A class capturing the platform's idea of local time.
import time as _time
STDOFFSET = timedelta(seconds = -_time.timezone)
if _time.daylight:
DSTOFFSET = timedelta(seconds = -_time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
class LocalTimezone(tzinfo):
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
else:
return STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return DSTDIFF
|
else:
return ZERO
def tzname(self, dt):
return _time.tzname[self._isdst(dt)]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = _time.mktime(tt)
tt = _time.localtime(stamp)
return tt.tm_isdst > 0
Local = LocalTimezo
|
ne()
# A complete implementation of current DST rules for major US time zones.
def first_sunday_on_or_after(dt):
days_to_go = 6 - dt.weekday()
if days_to_go:
dt += timedelta(days_to_go)
return dt
# US DST Rules
#
# This is a simplified (i.e., wrong for a few cases) set of rules for US
# DST start and end times. For a complete and up-to-date set of DST rules
# and timezone definitions, visit the Olson Database (or try pytz):
# http://www.twinsun.com/tz/tz-link.htm
# http://sourceforge.net/projects/pytz/ (might not be up-to-date)
#
# In the US, since 2007, DST starts at 2am (standard time) on the second
# Sunday in March, which is the first Sunday on or after Mar 8.
DSTSTART_2007 = datetime(1, 3, 8, 2)
# and ends at 2am (DST time; 1am standard time) on the first Sunday of Nov.
DSTEND_2007 = datetime(1, 11, 1, 1)
# From 1987 to 2006, DST used to start at 2am (standard time) on the first
# Sunday in April and to end at 2am (DST time; 1am standard time) on the last
# Sunday of October, which is the first Sunday on or after Oct 25.
DSTSTART_1987_2006 = datetime(1, 4, 1, 2)
DSTEND_1987_2006 = datetime(1, 10, 25, 1)
# From 1967 to 1986, DST used to start at 2am (standard time) on the last
# Sunday in April (the one on or after April 24) and to end at 2am (DST time;
# 1am standard time) on the last Sunday of October, which is the first Sunday
# on or after Oct 25.
DSTSTART_1967_1986 = datetime(1, 4, 24, 2)
DSTEND_1967_1986 = DSTEND_1987_2006
class USTimeZone(tzinfo):
def __init__(self, hours, reprname, stdname, dstname):
self.stdoffset = timedelta(hours=hours)
self.reprname = reprname
self.stdname = stdname
self.dstname = dstname
def __repr__(self):
return self.reprname
def tzname(self, dt):
if self.dst(dt):
return self.dstname
else:
return self.stdname
def utcoffset(self, dt):
return self.stdoffset + self.dst(dt)
def dst(self, dt):
if dt is None or dt.tzinfo is None:
# An exception may be sensible here, in one or both cases.
# It depends on how you want to treat them. The default
# fromutc() implementation (called by the default astimezone()
# implementation) passes a datetime with dt.tzinfo is self.
return ZERO
assert dt.tzinfo is self
# Find start and end times for US DST. For years before 1967, return
# ZERO for no DST.
if 2006 < dt.year:
dststart, dstend = DSTSTART_2007, DSTEND_2007
elif 1986 < dt.year < 2007:
dststart, dstend = DSTSTART_1987_2006, DSTEND_1987_2006
elif 1966 < dt.year < 1987:
dststart, dstend = DSTSTART_1967_1986, DSTEND_1967_1986
else:
return ZERO
start = first_sunday_on_or_after(dststart.replace(year=dt.year))
end = first_sunday_on_or_after(dstend.replace(year=dt.year))
# Can't compare naive to aware objects, so strip the timezone from
# dt first.
if start <= dt.replace(tzinfo=None) < end:
return HOUR
else:
return ZERO
Eastern = USTimeZone(-5, "Eastern", "EST", "EDT")
Central = USTimeZone(-6, "Central", "CST", "CDT")
Mountain = USTimeZone(-7, "Mountain", "MST", "MDT")
Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
|
hashinisenaratne/HSTML
|
lib/scripts/include_bib.py
|
Python
|
gpl-2.0
| 2,972 | 0.01817 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# file include_bib.py
# This file is part of LyX, the document processor.
# Licence details can be found in the file COPYING.
# authors Richard Heck and [SchAirport]
# Full author contact details are available in file CREDITS
# This script is intended to include a BibTeX-generated biblography
# in a LaTeX file, as publishers often want. It can be run manually
# on an exported LaTeX file, though it needs to be compiled first,
# so the bbl file will exist.
#
# It should also be possible to create a LyX converter to run this
# automatically. To set it up, create a format "ltxbbl"; make sure to
# check it as a document format. Then create a LaTeX-->ltxbbl converter,
# with the command:
# python -tt $$s/scripts/include_bib.py $$i $$o
# and give it the flags:
# needaux,nice
# You'll the
|
n have it in the export menu.
#
# We do not activate this converter by default, because there are problems
# when one tries to use multiple bibliographies.
#
# Please report any problems on the devel list.
import sys, os
class secbib:
def __init__(self
|
, start = -1, end = -1):
self.start = start
self.end = end
class BibError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
def InsertBib(fil, out):
''' Inserts the contents of the .bbl file instead of the bibliography in a new .tex file '''
texlist = open(fil, 'r').readlines()
# multiple bibliographies
biblist = []
stylist = []
for i, line in enumerate(texlist):
if "\\bibliographystyle" in line:
stylist.append(i)
elif "\\bibliography" in line:
biblist.append(i)
elif "\\begin{btSect}" in line:
raise BibError("Cannot export sectioned bibliographies")
if len(biblist) > 1:
raise BibError("Cannot export multiple bibliographies.")
if not biblist:
raise BibError("No biliography found!")
bibpos = biblist[0]
newlist = texlist[0:bibpos]
bblfile = fil[:-4] + ".bbl"
bbllist = open(bblfile, 'r').readlines()
newlist += bbllist
newlist += texlist[bibpos + 1:]
outfile = open(out, 'w')
outfile.write("".join(newlist))
outfile.close()
return out
def usage():
print r'''
Usage: python include_bib.py file.tex [outfile.tex]
Includes the contents of file.bbl, which must exist in the
same directory as file.tex, in place of the \bibliography
command, and creates the new file outfile.tex. If no name
for that file is given, we create: file-bbl.tex.
'''
if __name__ == "__main__":
args = len(sys.argv)
if args <= 1 or args > 3:
usage()
sys.exit(0)
# we might should make sure this is a tex file....
infile = sys.argv[1]
if infile[-4:] != ".tex":
print "Error: " + infile + " is not a TeX file"
usage()
sys.exit(1)
if args == 3:
outfile = sys.argv[2]
else:
outfile = infile[:-4] + "-bbl.tex"
newfile = InsertBib(infile, outfile)
print "Wrote " + outfile
|
Endika/event
|
event_project/wizard/project_template_wizard.py
|
Python
|
agpl-3.0
| 1,219 | 0 |
# -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, fields, exceptions, api
from openerp.tools.translate import _
class ProjectTemplateWizard(models.TransientModel):
_name = 'project.template.wizard'
project_id = fields.Many2one(
comodel_name='project.project', string='Template project',
domain="[('state', '=', 'template')]")
event_id = fields.Many2one(comodel_name='event.event')
@api.one
def project_template_duplicate(self):
if not self.project_id:
raise exceptions.ValidationError(
_('Template project is required.'))
parent_id = self.project_id.parent_id.id
res = self.project_id.with_context(
self.env.context, parent_id=parent_id).duplicate_template()
self.with_context(
{'no_recalculate': True}).event_id.project_id = res['res_id']
self.e
|
vent_id.project_id.write({
'name': self.event_id.name,
'date_start': self.event_id.date_begin,
'date': self.event_id.date_begin,
'calculation_type': 'date_end',
})
self.event_id.project_id.project_recalculate()
| |
richardcornish/smsweather
|
gunicorn.py
|
Python
|
bsd-3-clause
| 95 | 0.010526 |
bind = 'unix:/run/emojiweather.so
|
ck'
pythonpath = 'emojiweather'
workers = 2
accesslog
|
= '-'
|
wiki-ai/wb-vandalism
|
wb_vandalism/feature_lists/wikidata.py
|
Python
|
mit
| 6,094 | 0.003446 |
from revscoring.features import user
from revscoring.features.modifiers import not_, log
from ..features import diff, revision
class properties:
"""
Mapping of english descriptions to property identifiers
"""
IMAGE = 'P18'
SEX_OR_GENDER = 'P21'
COUNTRY_OF_CITIZENSHIP = 'P27'
INSTANCE_OF = 'P31'
MEMBER_OF_SPORTS_TEAM = 'P54'
SIGNATURE = 'P109'
COMMONS_CATEGORY = 'P373'
DATE_OF_BIRTH = 'P569'
DATE_OF_DEATH = 'P570'
OFFICIAL_WEBSITE = 'P856'
class items:
"""
Mapping of english descriptions to item idenifiers
"""
HUMAN = 'Q5'
# Comment features
is_client_delete = revision.comment_matches(r"^\/\* clientsitelink\-remove\:",
name='revision.is_client_delete')
is_client_move = revision.comment_matches(r"^\/\* clientsitelink\-update\:",
name='revision.is_client_move')
is_merge_into = revision.comment_matches(r"^\/\* wbmergeitems\-to\:",
name='revision.is_merge_into')
is_merge_from = revision.comment_matches(r"^\/\* wbmergeitems\-from\:",
name='revision.is_merge_from')
is_revert = \
revision.comment_matches(r"^Reverted edits by \[\[Special\:Contributions",
name='revision.is_revert')
is_rollback = revision.comment_matches(r"^Undid revision ",
name='revision.is_rollback')
is_restore = revision.comment_matches(r"^Restored revision ",
name='revision.is_restore')
is_item_creation = revision.comment_matches(r"^\/\* (wbsetentity|"
r"wbeditentity-create\:0\|) \*\/",
name='revision.is_item_creation')
# Properties changed
sex_or_gender_changed = \
diff.property_changed(properties.SEX_OR_GENDER,
name='diff.sex_or_gender_changed')
country_of_citizenship_changed = \
diff.property_changed(properties.COUNTRY_OF_CITIZENSHIP,
name='diff.country_of_citizenship_changed')
member_of_sports_team_changed = \
diff.property_changed(properties.MEMBER_OF_SPORTS_TEAM,
name='diff.member_of_sports_team_changed')
date_of_birth_changed = \
diff.property_changed(properties.DATE_OF_BIRTH,
name='diff.date_of_birth_changed')
image_changed = \
diff.property_changed(properties.IMAGE,
name='diff.image_changed')
signature_changed = \
diff.property_changed(properties.SIGNATURE,
name='diff.signature_changed')
commons_category_changed = \
diff.property_changed(properties.COMMONS_CATEGORY,
name='diff.commons_category_changed')
official_website_changed = \
diff.property_changed(properties.OFFICIAL_WEBSITE,
name='diff.official_website_changed')
# Status
is_human = \
revision.has_property_value(properties.INST
|
ANCE_OF, items.HUMAN,
name='revision.is_human')
has_birthday = \
revision.has_property(properties.DATE_OF_BIRTH,
name='revision.has_birthday')
dead = \
revision.has_property(properties.DATE_OF_DEATH,
name='revision.dead')
is_blp = has_birthday.and_(not_(dead))
reverted = [
# revscoring.features.diff.longest_repeated_char_added,
# revscoring.features.diff.longest_token_added,
# log(revs
|
coring.features.diff.numeric_chars_added + 1),
# log(revscoring.features.diff.numeric_chars_removed + 1),
# revscoring.features.diff.proportion_of_chars_added,
# revscoring.features.diff.proportion_of_chars_removed,
# revscoring.features.diff.proportion_of_numeric_chars_added,
# revscoring.features.diff.proportion_of_symbolic_chars_added,
# revscoring.features.diff.proportion_of_uppercase_chars_added,
# log(revscoring.features.diff.symbolic_chars_added + 1),
# log(revscoring.features.diff.symbolic_chars_removed + 1),
# log(revscoring.features.diff.uppercase_chars_added + 1),
# log(revscoring.features.diff.uppercase_chars_removed + 1),
# revscoring.features.diff.bytes_changed + 1,
# revscoring.featuresdiff.bytes_changed_ratio,
# page.is_content_namespace,
# parent_revision.was_same_user,
log(user.age + 1),
diff.number_added_sitelinks,
diff.number_removed_sitelinks,
diff.number_changed_sitelinks,
diff.number_added_labels,
diff.number_removed_labels,
diff.number_changed_labels,
diff.number_added_descriptions,
diff.number_removed_descriptions,
diff.number_changed_descriptions,
diff.number_added_aliases,
diff.number_removed_aliases,
diff.number_added_claims,
diff.number_removed_claims,
diff.number_changed_claims,
diff.number_changed_identifiers,
diff.en_label_touched,
diff.number_added_sources,
diff.number_removed_sources,
diff.number_added_qualifiers,
diff.number_removed_qualifiers,
diff.number_added_badges,
diff.number_removed_badges,
# diff.mean_distance_descriptions,
# diff.mean_distance_labels,
diff.proportion_of_qid_added,
diff.proportion_of_language_added,
diff.proportion_of_links_added,
is_client_move,
is_client_delete,
is_merge_into,
is_merge_from,
is_revert,
is_rollback,
is_restore,
is_item_creation,
sex_or_gender_changed,
country_of_citizenship_changed,
member_of_sports_team_changed,
date_of_birth_changed,
image_changed,
signature_changed,
commons_category_changed,
official_website_changed,
log(revision.number_claims + 1),
log(revision.number_aliases + 1),
log(revision.number_sources + 1),
log(revision.number_qualifiers + 1),
log(revision.number_badges + 1),
log(revision.number_labels + 1),
log(revision.number_sitelinks + 1),
log(revision.number_descriptions + 1),
is_human,
is_blp,
user.is_bot,
user.is_anon,
]
|
trailofbits/manticore
|
manticore/core/smtlib/__init__.py
|
Python
|
agpl-3.0
| 268 | 0 |
from .expression impo
|
rt Expression, Bool, BitVec, Array, BitVecConstant, issymbolic # noqa
from .constraints import ConstraintSet # noqa
from .solver import * # noqa
from . import operators as Operators # noqa
import logging
logger = log
|
ging.getLogger(__name__)
|
comodit/combox
|
combox/control.py
|
Python
|
mit
| 569 | 0.010545 |
import time, sys,
|
os, helper
from comodit_client.api import Client
from comodit_client.api.exceptions import PythonApiException
from comodit_client.rest.exceptions import ApiException
from combox.config import
|
config
from helper import create_host, get_short_hostname, exec_cmd, exec_cmds, fork_cmd
def stop():
print "Stopping virtual machine"
success = exec_cmds(['VBoxManage controlvm "%s" poweroff' % config['vm']['name']])
def start():
print "Starting virtual machine"
fork_cmd('VBoxManage startvm --type headless "%s"' % config['vm']['name'])
|
FabienPean/sofa
|
applications/plugins/SofaPython/examples/sceneDataIO_write.py
|
Python
|
lgpl-2.1
| 5,843 | 0.004963 |
import sys, os, platform, math
import Sofa
import Flexible.IO
import Flexible.sml
import SofaPython.Tools
from SofaPython.Tools import listToStr as concat
import numpy
from numpy import linalg
# variables
__file = __file__.replace('\\', '/') # windows
CURRENTDIR = os.path.dirname(os.path.abspath(__file__))+'/'
CURRENTDIR = CURRENTDIR.replace('//', '/') # windows compatible filename
#=====================================================================================
# Scene lauch
#=====================================================================================
def createScene(root_node):
root_node.createObject('RequiredPlugin', name='image')
root_node.createObject('RequiredPlugin', name='Flexible')
root_node.createObject('RequiredPlugin', name='Compliant')
root_node.createObject('CompliantAttachButtonSetting')
root_node.createObject('PythonScriptController', name='MyClass', filename=__file, classname='MyClass')
# ================================================================= #
# Creation of the scene
# ================================================================= #
class MyClass(Sofa.PythonScriptController):
# Setup of class attributes
def setup(self):
return
def createGraph(self, root):
self.setup()
self.node = root
self.node.createObject('VisualStyle', displayFlags='showVisual hideWireframe showBehaviorModels showForceFields showInteractionForceFields')
self.node.createObject('BackgroundSetting',color='1 1 1')
self.node.gravity = '0 -9.81 0'
self.node.dt = .1
# compliant solver
self.node.createObject('CompliantImplicitSolver', stabilization=1)
self.node.createObject('SequentialSolver', iterations=75, precision=1E-15, iterateOnBilaterals=1)
self.node.createObject('LDLTResponse', schur=0)
# beam creation
self.mainNode = self.node.createChild('deformable')
self.mainNode.createObject('RegularGrid', name='grid', n='25 5 5', min='0. 0. 0.', max='4. 1. 1.')
self.mainNode.createObject('MeshToImageEngine', template='ImageUC', name='rasterizer', src='@grid', value=1, insideValue=1, voxelSize=0.025, padSize=0, rotateImage='false')
self.mainNode.createObject('ImageContainer', template='ImageUC', name='image', src='@rasterizer', drawBB='false')
self.mainNode.createObject('ImageSampler', template='ImageUC', name='sampler', src='@image', method=1, param='2 0', clearData=0)
self.mainNode.createObject('MechanicalObject', template='Affine', name='parent', position='@sampler.position', rest_position='@sampler.position', showObject=1, showObjectScale='0.1')
self.mainNode.createObject('VoronoiShapeFunction', template='ShapeFunctiond,ImageUC', name='SF', position='@parent.rest_position', image='@image.image', transform='@image.transform', nbRef=4, clearData=1, bias=0)
self.mainNode.createObject('FixedConstraint', template='Affine', indices='0')
# behavior
behaviorNode = self.mainNode.createChild('behavior')
behaviorNode.createObject('ImageGaussPointSampler', name='sampler', indices='@../SF.indices', weights='@../SF.weights', transform='@../SF.transform', method=2, order=4, targetNumber=10)
behaviorNode.createObject('MechanicalObject', template='F332')
behaviorNode.createObject('LinearMapping', template='Affine,F332')
eNode = behaviorNode.createChild('E')
eNod
|
e.cr
|
eateObject('MechanicalObject', template='E332', name='E')
eNode.createObject('CorotationalStrainMapping', template='F332,E332', method='polar')
eNode.createObject('HookeForceField', template='E332', name='ff', youngModulus='1E3', poissonRatio='0', viscosity='0')
# contact and visual model
contactNode = self.mainNode.createChild('registration')
contactNode.createObject('MeshTopology', name='topo', src='@../grid')
contactNode.createObject('MechanicalObject', name='DOFs')
contactNode.createObject('UniformMass', totalMass=1)
contactNode.createObject('TriangleModel')
contactNode.createObject('LinearMapping', template='Affine,Vec3d')
visuNode = contactNode.createChild('visual')
visuNode.createObject('OglModel', template='ExtVec3f', name='visual', src='@../topo', color='0.8 0.2 0.2 1')
visuNode.createObject('IdentityMapping', template='Vec3d,ExtVec3f')
global sceneDataIO
sceneDataIO = SofaPython.Tools.SceneDataIO(self.node)
sceneDataIO.classNameList = ['MechanicalObject', 'OglModel', 'VisualModel']
def cleanup(self):
print 'cleanup: the scene has been close'
self.saveState('SceneDataIO')
print 'The scene state has been save at t=', self.node.getTime()
def onEndAnimationStep(self, dt):
if self.node.getTime() >= dt*5 and self.node.getTime() < dt*6:
self.saveState('SceneDataIO')
print 'The scene state has been save at t=', self.node.getTime()
# ===============================================================================
# Scene methods
# ===============================================================================
def saveState(self, directory=None):
# create the directory where the simulation state will be store
if directory == None:
directory = os.path.basename(__file__).split('.')[0] + '_at_t_' + str(self.node.getTime())
if not os.path.isdir(directory):
try:
os.makedirs(directory)
except OSError:
if not os.path.isdir(directory):
raise
# store the data
sceneDataIO.writeData(directory)
|
Makerblaker/Sprinkler
|
server.py
|
Python
|
gpl-3.0
| 4,160 | 0.040625 |
#!/usr/bin/python3
import _thread
import RPi.GPIO as GPIO
import socket
import time
from time import sleep
from sys import exit
import datetime
#import MySQLdb
# Start task command
# sleep 30 && python /home/pi/Scripts/Sprinkler/Sprinkler.py > /home/pi/Scripts/Sprinkler/log.txt 2>&1
# Set GPIO output points
Zones = [5, 6, 13, 19]
StatusLED = 16
# Set GPIO input points
CancelButton = 18
WaterSensor = 10
# Water Sensor Enabled?
Sensor = False
#Is it currently raining
isRaining = False
defaultWaitDuration = 0
def setup():
global serversocket,t
# Setup GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(True)
# Input Cancel Button
GPIO.setup(CancelButton, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Input Rain Sensor
if Sensor:
GPIO.setup(WaterSensor, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Setup 4 zones on GPIO
# Turn all Zones "OFF"
for i in Zones:
GPIO.setup(i, GPIO.OUT)
GPIO.output(i, GPIO.HIGH)
# Setup status LED
GPIO.setup(StatusLED, GPIO.OUT)
# Setup Sockets
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = socket.gethostname()
port = 9999
serversocket.bind((host, port))
serversocket.listen(5)
addLog("System", "Setup complete")
de
|
f mainRun():
global isRaining
addLog("System", "Main Thread started")
# Always check the switch
_thread.start_new_thread(checkSwitch, ((),))
while True:
global se
|
rversocket
clientsocket,addr = serversocket.accept()
fromClient = clientsocket.recv(1024)
clientsocket.close()
strFromClient = str(fromClient.decode("ascii"))
addLog("Recived", strFromClient)
# Split incoming message
requestType = strFromClient.split(":")
# Do something with that message
# What was the command?
if(requestType[0] == "WATER"):
# Is it raining
if(isRaining == False):
# Turn off LED if it was raining
statusLED("off")
# Start watering
_thread.start_new_thread(water, (requestType[1], requestType[2], ) )
elif(requestType[0] == "ZONE"):
if(requestType[1] == "ON"):
zone(int(requestType[2]), "ON")
else:
zone(int(requestType[2]), "OFF")
elif(requestType[0] == "RainStatus"):
# Some day we will send something back
print("nothing")
elif(requestType[0] == "QUIT"):
destroy()
# Check switch
def checkSwitch(self):
global isRaining
while True:
state = GPIO.input(CancelButton)
if(state):
if(state != isRaining):
addLog("System", "Switch TRUE")
statusLED("solid")
isRaining = True
else:
if(state != isRaining):
addLog("System", "Switch FALSE")
statusLED("off")
isRaining = False
sleep(2)
# Water the lawn
def water(zoneNum, duration):
# Turn on zone
zone(int(zoneNum), "ON")
statusLED("on")
# Sleep for that amount
sleep(int(duration) * 60)
# Turn off zone
zone(int(zoneNum), "OFF")
statusLED("off")
# Zone Control Setup
def zone(zoneSelect, onoff):
if(onoff == "ON"):
GPIO.output(Zones[zoneSelect], 0)
addLog('Zone ' + str(zoneSelect), 'ON')
else:
GPIO.output(Zones[zoneSelect], 1)
addLog('Zone ' + str(zoneSelect), 'OFF')
def rain():
global isRaining
# Check if it's raining
if Sensor:
if GPIO.input(WaterSensor):
isRaining = True
else:
isRaining = False
def statusLED(status):
if status == "blink":
GPIO.output(StatusLED, GPIO.HIGH)
sleep(0.5)
GPIO.output(StatusLED, GPIO.LOW)
sleep(0.5)
elif status == "solid":
GPIO.output(StatusLED, GPIO.HIGH)
elif status == "off":
GPIO.output(StatusLED, GPIO.LOW)
def addLog(currentZone, addedText):
now = datetime.datetime.now()
print ("{0}: {1}: {2}".format(now, currentZone, addedText))
def destroy():
global serversocket
serversocket.shutdown(socket.SHUT_RDWR)
for i in Zones:
GPIO.output(i, GPIO.LOW)
GPIO.output(StatusLED, GPIO.LOW)
addLog('System', 'Sprinkler Script OFF')
exit()
if __name__ == '__main__':
setup()
try:
mainRun()
except KeyboardInterrupt:
destroy()
finally:
GPIO.cleanup()
exit()
else:
destroy()
|
LordDarkula/chess_py
|
chess_py/core/algebraic/notation_const.py
|
Python
|
mit
| 330 | 0 |
# -*- coding: utf-8 -*-
"""
Class stores integer values for various types of moves in algebraic notation.
Copyright © 2016 Aubhro Sengupta. All rights reserved.
"""
MOVE
|
MENT = 0
CAPTURE = 1
KING_SIDE_CASTLE = 2
QUEEN_SIDE_CASTL
|
E = 3
EN_PASSANT = 4
PROMOTE = 5
CAPTURE_AND_PROMOTE = 6
NOT_IMPLEMENTED = 7
LONG_ALG = 8
|
google/openhtf
|
openhtf/util/xmlrpcutil.py
|
Python
|
apache-2.0
| 4,133 | 0.008226 |
# Copyright 2016 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility helpers for xmlrpclib."""
import http.client
import socketserver
import sys
import threading
import xmlrpc.client
import xmlrpc.server
import six
from six.moves import collections_abc
DEFAULT_P
|
ROXY_TIMEOUT_S = 3
# https://github.com/PythonCharmers/python-future/issues/280
# pylint: disable=g-import-not-at-top,g-importing-member
if sys.version_info[0] < 3:
from SimpleXMLRPCServer import SimpleXMLRPCServer # pytype: disable=import-error
else:
from xmlrpc.server import SimpleXMLRPCServer # pytype: disable=import-error
# pylint: enable=g-import-not-at-top,g-importing-member
class TimeoutHTTPConnection(http.client.HTTPConnection): # pylint: di
|
sable=missing-class-docstring
def __init__(self, timeout_s, *args, **kwargs):
http.client.HTTPConnection.__init__(self, *args, **kwargs)
self.timeout_s = timeout_s
def settimeout(self, timeout_s):
self.timeout_s = timeout_s
self.sock.settimeout(self.timeout_s)
def connect(self):
http.client.HTTPConnection.connect(self)
self.sock.settimeout(self.timeout_s)
class TimeoutTransport(xmlrpc.client.Transport): # pylint: disable=missing-class-docstring
def __init__(self, timeout_s, *args, **kwargs):
xmlrpc.client.Transport.__init__(self, *args, **kwargs)
self._connection = None
self.timeout_s = timeout_s
def settimeout(self, timeout_s):
self.timeout_s = timeout_s
if self._connection:
self._connection[1].settimeout(timeout_s)
def make_connection(self, host):
if not self._connection or host != self._connection[0]:
self._connection = host, TimeoutHTTPConnection(self.timeout_s, host)
return self._connection[1]
class BaseServerProxy(xmlrpc.client.ServerProxy, object):
"""New-style base class for ServerProxy, allows for use of Mixins below."""
class TimeoutProxyMixin(object):
"""Timeouts for ServerProxy objects."""
def __init__(self, *args, **kwargs):
kwargs.update(
transport=TimeoutTransport(
kwargs.pop('timeout_s', DEFAULT_PROXY_TIMEOUT_S)))
super(TimeoutProxyMixin, self).__init__(*args, **kwargs)
def __settimeout(self, timeout_s):
if six.PY3:
self._transport.settimeout(timeout_s) # pytype: disable=attribute-error
else:
self.__transport.settimeout(timeout_s) # pytype: disable=attribute-error
class TimeoutProxyServer(TimeoutProxyMixin, BaseServerProxy):
"""A BaseServerProxy plus timeouts."""
class LockedProxyMixin(object):
"""A ServerProxy that locks calls to methods."""
def __init__(self, *args, **kwargs):
super(LockedProxyMixin, self).__init__(*args, **kwargs)
self._lock = threading.Lock()
def __getattr__(self, attr):
method = super(LockedProxyMixin, self).__getattr__(attr) # pytype: disable=attribute-error
if isinstance(method, collections_abc.Callable):
# xmlrpc doesn't support **kwargs, so only accept *args.
def _wrapper(*args):
with self._lock:
return method(*args)
# functools.wraps() doesn't work with _Method internal type within
# xmlrpclib. We only care about the name anyway, so manually set it.
_wrapper.__name__ = attr
return _wrapper
return method
class LockedTimeoutProxy(TimeoutProxyMixin, LockedProxyMixin, BaseServerProxy):
"""ServerProxy with additional features we use."""
class SimpleThreadedXmlRpcServer(socketserver.ThreadingMixIn,
SimpleXMLRPCServer):
"""Helper for handling multiple simultaneous RPCs in threads."""
daemon_threads = True
|
Kotaimen/stonemason
|
stonemason/formatbundle/exceptions.py
|
Python
|
mit
| 259 | 0 |
# -*- encoding: utf-8 -*-
__author__ = 'kotaimen'
__date__ = '2/19/15'
class FormatError(Exception):
pass
class InvalidMapType(FormatError):
pass
class InvalidTileFormat(FormatError):
pass
class NoMatch
|
ingMapWrit
|
er(FormatError):
pass
|
srottem/indy-sdk
|
docs/how-tos/negotiate-proof/python/step2.py
|
Python
|
apache-2.0
| 2,916 | 0.006859 |
# 1.
print_log('\n1. Creates Issuer wallet and opens it to get handle.\n')
await
wallet.create_wallet(pool_name, issuer_wallet_name, None, None, None)
issuer_wallet_handle = await
wallet.open_wallet(issuer_wallet_name, None, None)
# 2.
print_log('\n2. Creates Prover wallet and opens it to get handle.\n')
await
wallet.create_wallet(pool_name, prover_wallet_name, None, None, None)
prover_wallet_handle = await
wallet.open_wallet(prover_wallet_name, None, None)
# 3.
print_log('\n3. Issuer creates Claim Definition for Schema\n')
schema = {
'seqNo': seq_no,
'dest': issuer_did,
'data': {
|
'name': 'gvt',
'version': '1.0',
'attr_names': ['age', 'sex', 'height', 'name']
}
}
schema_json = json.dumps(schema)
schema_key = {
'name': schema['data']['name'],
'version': schema['data']['version'],
'did': schema['dest'],
}
claim_def_json = await
anoncreds.issuer_create_and_store_claim_def(issuer_wallet_handle, issuer_did, schema_json, 'CL', False)
|
print_log('Claim Definition: ')
pprint.pprint(json.loads(claim_def_json))
# 4.
print_log('\n4. Prover creates Link Secret\n')
link_secret_name = 'link_secret'
await
anoncreds.prover_create_master_secret(prover_wallet_handle, link_secret_name)
# 5.
print_log('\n5. Issuer create Cred Offer\n')
claim_offer_json = await
anoncreds.issuer_create_claim_offer(issuer_wallet_handle, schema_json, issuer_did, prover_did)
print_log('Claim Offer: ')
pprint.pprint(json.loads(claim_offer_json))
# 6.
print_log('\n6. Prover creates and stores Cred Request\n')
claim_req_json = await
anoncreds.prover_create_and_store_claim_req(prover_wallet_handle, prover_did, claim_offer_json,
claim_def_json, link_secret_name)
print_log('Claim Request: ')
pprint.pprint(json.loads(claim_req_json))
# 7.
print_log('\n7. Issuer creates Credential for received Cred Request\n')
claim_json = json.dumps({
'sex': ['male', '5944657099558967239210949258394887428692050081607692519917050011144233115103'],
'name': ['Alex', '1139481716457488690172217916278103335'],
'height': ['175', '175'],
'age': ['28', '28']
})
(_, claim_json) = await
anoncreds.issuer_create_claim(issuer_wallet_handle, claim_req_json, claim_json, -1)
# 8.
print_log('\n8. Prover processes and stores received Credential\n')
await
anoncreds.prover_store_claim(prover_wallet_handle, claim_json, None)
|
pwndbg/pwndbg
|
pwndbg/prompt.py
|
Python
|
mit
| 1,730 | 0.001734 |
import gdb
import pwndbg.decorators
import pwndbg.events
import pwndbg.gdbutils
import pwndbg.memoize
from pwndbg.color import disable_colors
from pwndbg.color import message
funcs_list_str = ', '.join(message.notice('$' + f.name) for f in pwndbg.gdbutils.functions.functions)
hint_lines = (
'loaded %i commands. Type %s for a list.' % (len(pwndbg.commands.commands), message.notice('pwndbg [filter]')),
'created %s gdb functions (can be used with print/break)' % funcs_list_str
)
for line in hint_lines:
print(message.prompt('pwndbg: ') + message.system(line))
cur = None
def prompt_hook(*a):
global cu
|
r
pwndbg.decorators.first_prompt = True
ne
|
w = (gdb.selected_inferior(), gdb.selected_thread())
if cur != new:
pwndbg.events.after_reload(start=cur is None)
cur = new
if pwndbg.proc.alive and pwndbg.proc.thread_is_stopped:
prompt_hook_on_stop(*a)
@pwndbg.memoize.reset_on_stop
def prompt_hook_on_stop(*a):
pwndbg.commands.context.context()
@pwndbg.config.Trigger([message.config_prompt_color, disable_colors])
def set_prompt():
prompt = "pwndbg> "
if not disable_colors:
prompt = "\x02" + prompt + "\x01" # STX + prompt + SOH
prompt = message.prompt(prompt)
prompt = "\x01" + prompt + "\x02" # SOH + prompt + STX
gdb.execute('set prompt %s' % prompt)
if pwndbg.events.before_prompt_event.is_real_event:
gdb.prompt_hook = prompt_hook
else:
# Old GDBs doesn't have gdb.events.before_prompt, so we will emulate it using gdb.prompt_hook
def extended_prompt_hook(*a):
pwndbg.events.before_prompt_event.invoke_callbacks()
return prompt_hook(*a)
gdb.prompt_hook = extended_prompt_hook
|
Azure/azure-sdk-for-python
|
sdk/cognitiveservices/azure-cognitiveservices-search-visualsearch/azure/cognitiveservices/search/visualsearch/models/structured_value.py
|
Python
|
mit
| 3,167 | 0.000316 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .intangible import Intangible
class StructuredValue(Intangible):
"""StructuredValue.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: Point2D, NormalizedQuadrilateral
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param _type: Required. Constant filled by server.
:type _type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL,
append query parameters as appropriate and include the
Ocp-Apim-Subscription-Key header.
:vartype read_link: str
|
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by
this object.
:vartyp
|
e url: str
:ivar image: An image of the item.
:vartype image:
~azure.cognitiveservices.search.visualsearch.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
"""
_validation = {
'_type': {'required': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
}
_attribute_map = {
'_type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
}
_subtype_map = {
'_type': {'Point2D': 'Point2D', 'NormalizedQuadrilateral': 'NormalizedQuadrilateral'}
}
def __init__(self, **kwargs):
super(StructuredValue, self).__init__(**kwargs)
self._type = 'StructuredValue'
|
mschuurman/FMSpy
|
nomad/initconds/explicit.py
|
Python
|
lgpl-3.0
| 1,004 | 0.003984 |
"""
Sample a specific geometry or set of geometries.
"""
import numpy as np
import nomad.core.glbl as glbl
import nomad.core.trajectory as trajectory
import nomad.core.log as log
def set_initial_coords(wfn):
"""Takes initial position and momentum from geometry specified in input"""
coords = glbl.properties['init_coords']
ndim = coords.shape[-1]
log.print_message('string',[' Initial coordinates taken from input file(s).\n'])
for coord in coords:
itraj = trajectory
|
.Trajectory(glbl.properties['n_states'], ndim,
|
width=glbl.properties['crd_widths'],
mass=glbl.properties['crd_masses'],
parent=0, kecoef=glbl.modules['integrals'].kecoef)
# set position and momentum
itraj.update_x(np.array(coord[0]))
itraj.update_p(np.array(coord[1]))
# add a single trajectory specified by geometry.dat
wfn.add_trajectory(itraj)
|
gwind/YWeb
|
yweb/yweb/utils/i18n.py
|
Python
|
mit
| 142 | 0 |
# coding: utf-8
def
|
ugettext(message):
'''返回原字符串
为了使用 _('') 方式标记字符串
'''
ret
|
urn message
|
mgraffg/simplegp
|
SimpleGP/tests/test_gpmae.py
|
Python
|
apache-2.0
| 1,069 | 0.000935 |
# Copyright 2013 Mario Graff Guerrero
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the Lice
|
nse.
from SimpleGP import GPMAE
import numpy as np
def test_gpmae():
x = np.linspace(-10, 10, 100)
pol = np.array([0.2, -0.3, 0.2])
X = np.vstack((x**2, x, np.ones(x.shape[0])))
y = (X.T * pol).sum(axis=1)
x = x[:, np.newaxis]
gp = GPMAE.init_cl(verbose=True,
ge
|
nerations=30, seed=0,
max_length=1000).train(x, y)
gp.run()
fit = gp.fitness(gp.get_best())
print fit
assert fit >= -0.7906
|
phatblat/AbletonLiveMIDIRemoteScripts
|
Push2/session_recording.py
|
Python
|
mit
| 842 | 0.014252 |
# Source Generated with Decompyle
|
++
# File: session_recording.pyc (Python 2.5)
from __future__ import absolute_import
from pushbase.session_recording_component import FixedLengthSessionRecordingComponent
class SessionRecordingComponent(FixedLengthSessionRecordingCom
|
ponent):
def __init__(self, *a, **k):
super(SessionRecordingComponent, self).__init__(*a, **a)
self.set_trigger_recording_on_release(not (self._record_button.is_pressed))
def set_trigger_recording_on_release(self, trigger_recording):
self._should_trigger_recording = trigger_recording
def _on_record_button_pressed(self):
pass
def _on_record_button_released(self):
if self._should_trigger_recording:
self._trigger_recording()
self._should_trigger_recording = True
|
valsaven/md5hash
|
md5hash/__init__.py
|
Python
|
mit
| 103 | 0 |
__a
|
ll__ = ["md5", "size", "calculate", "scan"]
from md5hash.md5hash import md5, size, calcu
|
late, scan
|
sathnaga/avocado-vt
|
virttest/utils_libvirtd.py
|
Python
|
gpl-2.0
| 15,212 | 0.000197 |
"""
Module to control libvirtd service.
"""
import re
import logging
import aexpect
from avocado.utils import path
from avocado.utils import process
from avocado.utils import wait
from virttest import libvirt_version
from virttest import utils_split_daemons
from . import remote as remote_old
from . import utils_misc
from .staging import service
from .utils_gdb import GDB
try:
path.find_command("libvirtd")
LIBVIRTD = "libvirtd"
except path.CmdNotFoundError:
LIBVIRTD = None
class Libvirtd(object):
"""
Class to manage libvirtd service on host or guest.
"""
def __init__(self, service_name=None, session=None):
"""
Initialize an service object for libvirtd.
|
:params service_name: Service name such as virtqemud or libvirtd.
If service_name is None, all sub daemons will be operated when
modular daemon environment is enabled.
|
Otherwise,if service_name is
a single string, only the given daemon/service will be operated.
:params session: An session to guest or remote host.
"""
self.session = session
if self.session:
self.remote_runner = remote_old.RemoteRunner(session=self.session)
runner = self.remote_runner.run
else:
runner = process.run
self.daemons = []
self.service_list = []
if LIBVIRTD is None:
logging.warning("Libvirtd service is not available in host, "
"utils_libvirtd module will not function normally")
self.service_name = "libvirtd" if not service_name else service_name
if libvirt_version.version_compare(5, 6, 0, self.session):
if utils_split_daemons.is_modular_daemon(session=self.session):
if self.service_name in ["libvirtd", "libvirtd.service"]:
self.service_list = ['virtqemud', 'virtproxyd',
'virtnetworkd', 'virtinterfaced',
'virtnodedevd', 'virtsecretd',
'virtstoraged', 'virtnwfilterd']
elif self.service_name == "libvirtd.socket":
self.service_name = "virtqemud.socket"
elif self.service_name in ["libvirtd-tcp.socket", "libvirtd-tls.socket"]:
self.service_name = re.sub("libvirtd", "virtproxyd",
self.service_name)
else:
self.service_name = re.sub("^virt.*d", "libvirtd",
self.service_name)
else:
self.service_name = "libvirtd"
if not self.service_list:
self.service_list = [self.service_name]
for serv in self.service_list:
self.daemons.append(service.Factory.create_service(serv, run=runner))
def _wait_for_start(self, timeout=60):
"""
Wait n seconds for libvirt to start. Default is 10 seconds.
"""
def _check_start():
virsh_cmd = "virsh list"
try:
if self.session:
self.session.cmd(virsh_cmd, timeout=2)
else:
process.run(virsh_cmd, timeout=2)
return True
except Exception:
return False
return utils_misc.wait_for(_check_start, timeout=timeout)
def start(self, reset_failed=True):
result = []
for daem_item in self.daemons:
if reset_failed:
daem_item.reset_failed()
if not daem_item.start():
return False
result.append(self._wait_for_start())
return all(result)
def stop(self):
result = []
for daem_item in self.daemons:
result.append(daem_item.stop())
return all(result)
def restart(self, reset_failed=True):
result = []
for daem_item in self.daemons:
if reset_failed:
daem_item.reset_failed()
if not daem_item.restart():
return False
result.append(self._wait_for_start())
return all(result)
def is_running(self):
result = []
for daem_item in self.daemons:
result.append(daem_item.status())
return all(result)
class DaemonSocket(object):
"""
Class to manage libvirt/virtproxy tcp/tls socket on host or guest.
"""
def __init__(self, daemon_name, session=None):
"""
Initialize an service object for virt daemons.
:param daemon_name: daemon name such as virtproxyd-tls.socket,
libvirtd-tcp.socket,etc,.
:param session: An session to guest or remote host.
"""
self.session = session
if self.session:
self.remote_runner = remote_old.RemoteRunner(session=self.session)
self.runner = self.remote_runner.run
else:
self.runner = process.run
self.daemon_name = daemon_name
supported_daemon = ["libvirtd-tcp.socket", "libvirtd-tls.socket",
"virtproxyd-tls.socket", "virtproxyd-tcp.socket"]
if self.daemon_name not in supported_daemon:
raise ValueError("Invalid daemon: %s" % self.daemon_name)
self.daemon_service_inst = Libvirtd("virtproxyd", session=self.session)
self.daemon_inst = Libvirtd(self.daemon_name, session=self.session)
self.daemon_socket = Libvirtd("virtproxyd.socket", session=self.session)
def stop(self):
self.daemon_socket.stop()
self.daemon_service_inst.stop()
self.daemon_inst.stop()
self.runner("systemctl daemon-reload")
self.daemon_socket.start()
def start(self):
self.daemon_socket.stop()
self.daemon_service_inst.stop()
self.runner("systemctl daemon-reload")
self.daemon_inst.start()
self.daemon_service_inst.start()
def restart(self, reset_failed=True):
self.daemon_socket.stop()
self.daemon_service_inst.stop()
self.runner("systemctl daemon-reload")
self.daemon_inst.restart()
self.daemon_service_inst.start()
self.daemon_inst._wait_for_start()
class LibvirtdSession(object):
"""
Interaction daemon session by directly call the command.
With gdb debugging feature can be optionally started.
It is recommended to use the service in the modular daemons for
initialization, because Libvirtd() class will switch to the
corresponding service according to the environment,
eg. If the value of "service_name" is "virtqemud",
it will take "virtqemud" if the modular daemon is enabled
and "libvirtd" if it's disabled.
"""
def __init__(self, gdb=False,
logging_handler=None,
logging_params=(),
logging_pattern=r'.*',
service_name=None):
"""
:param gdb: Whether call the session with gdb debugging support
:param logging_handler: Callback function to handle logging
:param logging_pattern: Regex for filtering specific log lines
:param service_name: Service name such as virtqemud or libvirtd
"""
self.gdb = None
self.tail = None
self.running = False
self.pid = None
self.service_name = service_name
self.bundle = {"stop-info": None}
# Get an executable program to debug by GDB
self.service_exec = Libvirtd(
service_name=self.service_name).service_list[0]
self.libvirtd_service = Libvirtd(service_name=self.service_exec)
self.was_running = self.libvirtd_service.is_running()
if self.was_running:
logging.debug('Stopping %s service', self.service_exec)
self.libvirtd_service.stop()
self.logging_handler = logging_handler
self.logging_params = logging_params
self.logging_pattern = logging_pattern
if gdb:
self.gdb = GDB(self.service_exec)
self.gdb.set_callback('stop', self._stop_callback
|
xzturn/caffe2
|
caffe2/python/rnn/__init__.py
|
Python
|
apache-2.0
| 821 | 0.001218 |
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import
|
absolute_import
from __f
|
uture__ import division
from __future__ import print_function
from __future__ import unicode_literals
|
Brutus5000/BiReUS
|
bireus/client/patch_tasks/base.py
|
Python
|
mit
| 3,803 | 0.003681 |
# coding=utf-8
import abc
import logging
import tempfile
from bireus.client.download_service import AbstractDownloadService
from bireus.client.notific
|
ation_service import NotificationService
from bireus.shared import *
from bireus.shared.diff_head import DiffHead
from bireus.shared.diff_item import DiffItem
from bireus.shared.repository import ProtocolException
logger = logging.getLogger(__name__)
class PatchTask(abc.ABC):
_patch_tasks = None
def __init__(self,
|
notification_service: NotificationService, download_service: AbstractDownloadService,
repository_url: str, repo_path: Path, patch_file: Path):
self._notification_service = notification_service
self._download_service = download_service
self._url = repository_url
self._repo_path = repo_path
self._patch_file = patch_file
self._target_version = None
def run(self) -> None:
# unpack the patch into a temp folder
temp_root = self._repo_path.joinpath(".bireus").joinpath("__temp__")
temp_root.mkdir(parents=True, exist_ok=True)
tempdir = tempfile.TemporaryDirectory(dir=str(temp_root))
unpack_archive(self._patch_file, tempdir.name)
diff_head = DiffHead.load_json_file(Path(tempdir.name).joinpath('.bireus'))
if diff_head.protocol != self.get_version():
logger.error(".bireus protocol version %s doesn't match patcher task version %s", diff_head.protocol,
self.get_version())
self._notification_service.error(".bireus protocol version %s doesn't match patcher task version %s" % (
diff_head.protocol, self.get_version()))
raise Exception(".bireus protocol version %s doesn't match patcher task version %s"
% (diff_head.protocol, self.get_version()))
self._target_version = diff_head.target_version
# begin the patching recursion
# note: a DiffHead's first and only item is the top folder itself
self.patch(diff_head.items[0], self._repo_path, Path(tempdir.name), False)
intermediate_folder = Path(self._repo_path.parent.joinpath(self._repo_path.name + ".patched"))
relative_temp_folder = Path(tempdir.name).relative_to(self._repo_path)
move_file(self._repo_path, intermediate_folder)
try:
move_file(intermediate_folder.joinpath(relative_temp_folder), self._repo_path)
self._repo_path.joinpath(".bireus").unlink() # remove the patch descriptor
move_file(intermediate_folder.joinpath(".bireus"), self._repo_path.joinpath(".bireus"))
finally:
remove_folder(intermediate_folder)
@classmethod
def get_factory(cls, protocol: int):
if cls._patch_tasks is None:
cls._patch_tasks = dict()
for patch_task_version in PatchTask.__subclasses__():
cls._patch_tasks[patch_task_version.get_version()] = patch_task_version.create
if protocol in cls._patch_tasks:
return cls._patch_tasks[protocol]
else:
raise ProtocolException("Protocol version `%s` is not supported in this client version", protocol)
@abc.abstractclassmethod
def get_version(cls) -> int:
pass
@abc.abstractclassmethod
def create(cls, notification_service: NotificationService, download_service: AbstractDownloadService, repository_url: str, repo_path: Path,
patch_file: Path) -> 'PatchTask':
"""
Abstract factory function for dynamic patcher initialization
same params as in constructor!
"""
pass
@abc.abstractmethod
def patch(self, diff: DiffItem, base_path: Path, patch_path: Path, inside_zip: bool = False) -> None:
pass
|
virtuald/pynsq
|
nsq/writer.py
|
Python
|
mit
| 7,774 | 0.002059 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import time
import functools
import random
import inspect
from ._compat import string_types
from .client import Client
from nsq import protocol
from . import async
logger = logging.getLogger(__name__)
class Writer(Client):
"""
A high-level producer class built on top of the `Tornado IOLoop <http://tornadoweb.org>`_
supporting async publishing (``PUB`` & ``MPUB`` & ``DPUB``) of messages to ``nsqd`` over the TCP protocol.
Example publishing a message repeatedly using a Tornado IOLoop periodic callback::
import nsq
import tornado.ioloop
import time
def pub_message():
writer.pub('test', time.strftime('%H:%M:%S'), finish_pub)
def finish_pub(conn, data):
print(data)
writer = nsq.Writer(['127.0.0.1:4150'])
tornado.ioloop.PeriodicCallback(pub_message, 1000).start()
nsq.run()
Example publshing a message from a Tornado HTTP request handler::
import functools
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
from nsq import Writer, Error
from tornado.options import define, options
class MainHandler(tornado.web.RequestHandler):
@property
def nsq(self):
return self.application.nsq
def get(self):
topic = 'log'
msg = 'Hello world'
msg_cn = 'Hello 世界'
self.nsq.pub(topic, msg) # pub
self.nsq.mpub(topic, [msg, msg_cn]) # mpub
self.nsq.dpub(topic, 60, msg) # dpub
# customize callback
callback = functools.partial(self.finish_pub, topic=topic, msg=msg)
self.nsq.pub(topic, msg, callback=callback)
self.write(msg)
def finish_pub(self, conn, data, topic, msg):
if isinstance(data, Error):
# try to re-pub message again if pub failed
self.nsq.pub(topic, msg)
class Application(tornado.web.Application):
def __init__(self, handlers, **settings):
self.nsq = Writer(['127.0.0.1:4150'])
super(Application, self).__init__(handlers, **settings)
:param nsqd_tcp_addresses: a sequence with elements of the form 'address:port' corresponding
to the ``nsqd`` instances this writer should publish to
:param name: a string that is used for logging messages (defaults to first nsqd address)
:param \*\*kwargs: passed to :class:`nsq.AsyncConn` initialization
"""
def __init__(self, nsqd_tcp_addresses, reconnect_interval=15.0, name=None, **kwargs):
super(Writer, self).__init__(**kwargs)
if not isinstance(nsqd_tcp_addresses, (list, set, tuple)):
assert isinstance(nsqd_tcp_addresses, string_types)
nsqd_tcp_addresses = [nsqd_tcp_addresses]
assert nsqd_tcp_addresses
self.name = name or nsqd_tcp_addresses[0]
self.nsqd_tcp_addresses = nsqd_tcp_addresses
self.conns = {}
# Verify keyword arguments
valid_args = inspect.getargspec(async.AsyncConn.__init__)[0]
diff = set(kwargs) - set(valid_args)
assert len(diff) == 0, 'Invalid keyword argument(s): %s' % list(diff)
self.conn_kwargs = kwargs
assert isinstance(reconnect_interval, (int, float))
self.reconnect_interval = reconnect_interval
self.io_loop.add_callback(self._run)
def _run(self):
logger.info('starting writer...')
self.connect()
def pub(self, topic, msg, callback=None):
self._pub('pub', topic, msg, callback=callback)
def mpub(self, topic, msg, callback=None):
if isinstance(msg, string_types):
msg = [msg]
assert isinstance(msg, (list, set, tuple))
self._pub('mpub', topic, msg, callback=callback)
def dpub(self, topic, delay_ms, msg, callback=None):
self._pub('dpub', topic, msg, delay_ms, callback=callback)
def _pub(self, command, topic, msg, delay_ms=None, callback=None):
if not callback:
callback = functools.partial(self._finish_pub, command=command,
topic=topic, msg=msg)
if not self.conns:
callback(None, protocol.SendError('no connections'))
return
conn = random.choice(list(self.conns.values()))
conn.callback_queue.append(callback)
cmd = getattr(protocol, command)
if command == 'dpub':
args = (topic, delay_ms, msg)
else:
args = (topic, msg)
try:
conn.send(cmd(*args))
except Exception:
logger.exception('[%s] failed to send %s' % (conn.id, command))
conn.close()
def _on_connection_error(self, conn, error, **kwargs):
super(Writer, self)._on_connection_error(conn, error, **kwargs)
while conn.callback_queue:
callback = conn.callback_queue.pop(0)
callback(conn, error)
def _on_connection_response(self, conn, data=None, **kwargs):
if conn.callback_queue:
callback = conn.callback_queue.pop(0)
callback(conn, data)
def connect(self):
for addr in self.nsqd_tcp_addresses:
host, port = addr.split(':')
self.connect_to_nsqd(host, int(port))
def connect_to_nsqd(self, host, port):
assert isinstance(host, string_types)
assert isinstance(port, int)
conn = async.AsyncConn(host, port, **self.conn_kwargs)
conn.on('identify', self._on_connection_identify)
conn.on('identify_response', self._on_connection_identify_response)
conn.on('auth', self._on_connection_auth)
conn.on('auth_response', self._on_connection_auth_response)
conn.on('error', self._on_connection_error)
conn.on('response', self._on_connection_response)
conn.on('close', self._on_connection_close)
conn.on('ready', self._on_connection_ready)
conn.on('heartbeat', self.heartbeat)
if conn.id in self.conns:
return
logger.info('[%s] connecting to nsqd', conn.id)
conn.connect()
conn.callback_queue = []
def _on_connection_ready(self, conn, **kwargs):
# re-check to make sure another connection didn't beat this one
if conn.id in self.conns:
logger.warning(
'[%s] connected but another matching connection already exists', conn.id)
conn.close()
return
self.conns[conn.id] = conn
def _on_connection_close(self, conn, **kwargs):
if conn.id in self.conns:
del self.conns[conn.id]
for callback in conn.callback_queue:
try:
callback(conn, protocol.ConnectionClosedError())
except Exception:
logger.exception('[%s] uncaught exception in callback', conn.id)
logger.warning('[%s] connection closed', conn.id)
logger.info('[%s] attempting to reconnect in %0.2fs', conn.id, self.reconnect_interval)
reconnect_callback = functools.partial(self.connect_to_nsqd,
host=conn.host, port=conn.port)
self.io_loop.add_t
|
imeout(time.time() + self.r
|
econnect_interval, reconnect_callback)
def _finish_pub(self, conn, data, command, topic, msg):
if isinstance(data, protocol.Error):
logger.error('[%s] failed to %s (%s, %s), data is %s',
conn.id if conn else 'NA', command, topic, msg, data)
|
pegasus-isi/pegasus
|
packages/pegasus-python/src/Pegasus/init-old.py
|
Python
|
apache-2.0
| 14,973 | 0.001069 |
import errno
import os
import pwd
import shutil
import sys
from jinja2 import Environment, FileSystemLoader
class TutorialEnv:
LOCAL_MACHINE = ("Local Machine Condor Pool", "submit-host")
USC_HPCC_CLUSTER = ("USC HPCC Cluster", "usc-hpcc")
OSG_FROM_ISI = ("OSG from ISI submit node", "osg")
XSEDE_BOSCO = ("XSEDE, with Bosco", "xsede-bosco")
BLUEWATERS_GLITE = ("Bluewaters, with Glite", "bw-glite")
TACC_WRANGLER = ("TACC Wrangler with Glite", "wrangler-glite")
OLCF_TITAN = ("OLCF TITAN with Glite", "titan-glite")
OLCF_SUMMIT_KUBERNETES_BOSCO = (
"OLCF Summit from Kubernetes using BOSCO",
"summit-kub-bosco",
)
class TutorialExample:
PROCESS = ("Process", "process")
PIPELINE = ("Pipeline", "pipeline")
SPLIT = ("Split", "split")
MERGE = ("Merge", "merge")
EPA = ("EPA (requires R)", "r-epa")
DIAMOND = ("Diamond", "diamond")
CONTAINER = ("Popul
|
ation Modeling using Containers", "population")
MPI = ("MPI Hello World", "mpi-hw")
def choice(question, options, default):
"Ask the user to choose from a short list of named options"
while True:
sys.stdout.write("{} ({}) [{}]: ".format(question, "/".join(options), default))
|
answer = sys.stdin.readline().strip()
if len(answer) == 0:
return default
for opt in options:
if answer == opt:
return answer
def yesno(question, default="y"):
"Ask the user a yes/no question"
while True:
sys.stdout.write("{} (y/n) [{}]: ".format(question, default))
answer = sys.stdin.readline().strip().lower()
if len(answer) == 0:
answer = default
if answer == "y":
return True
elif answer == "n":
return False
def query(question, default=None):
"Ask the user a question and return the response"
while True:
if default:
sys.stdout.write("{} [{}]: ".format(question, default))
else:
sys.stdout.write("%s: " % question)
answer = sys.stdin.readline().strip().replace(" ", "_")
if answer == "":
if default:
return default
else:
return answer
def optionlist(question, options, default=0):
"Ask the user to choose from a list of options"
for i, option in enumerate(options):
print("%d: %s" % (i + 1, option[0]))
while True:
sys.stdout.write("%s (1-%d) [%d]: " % (question, len(options), default + 1))
answer = sys.stdin.readline().strip()
if len(answer) == 0:
return options[default][1]
try:
optno = int(answer)
if optno > 0 and optno <= len(options):
return options[optno - 1][1]
except Exception:
pass
class Workflow:
def __init__(self, workflowdir, sharedir):
self.jinja = Environment(loader=FileSystemLoader(sharedir), trim_blocks=True)
self.name = os.path.basename(workflowdir)
self.workflowdir = workflowdir
self.sharedir = sharedir
self.properties = {}
self.home = os.environ["HOME"]
self.user = pwd.getpwuid(os.getuid())[0]
self.tutorial = None
self.generate_tutorial = False
self.tutorial_setup = None
self.compute_queue = "default"
self.project = "MYPROJ123"
sysname, _, _, _, machine = os.uname()
if sysname == "Darwin":
self.os = "MACOSX"
else:
# Probably Linux
self.os = sysname.upper()
self.arch = machine
def copy_template(self, template, dest, mode=0o644):
"Copy template to dest in workflowdir with mode"
path = os.path.join(self.workflowdir, dest)
t = self.jinja.get_template(template)
t.stream(**self.__dict__).dump(path)
os.chmod(path, mode)
def copy_dir(self, src, dest):
# self.mkdir(dest)
if not src.startswith("/"):
src = os.path.join(self.sharedir, src)
try:
dest = os.path.join(self.workflowdir, dest)
shutil.copytree(src, dest)
except OSError as exc: # python >2.5
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
raise
def mkdir(self, path):
"Make relative directory in workflowdir"
path = os.path.join(self.workflowdir, path)
if not os.path.exists(path):
os.makedirs(path)
def configure(self):
# The tutorial is a special case
if yesno("Do you want to generate a tutorial workflow?", "n"):
self.config = "tutorial"
self.daxgen = "tutorial"
self.generate_tutorial = True
# determine the environment to setup tutorial for
self.tutorial_setup = optionlist(
"What environment is tutorial to be setup for?",
[
TutorialEnv.LOCAL_MACHINE,
TutorialEnv.USC_HPCC_CLUSTER,
TutorialEnv.OSG_FROM_ISI,
TutorialEnv.XSEDE_BOSCO,
TutorialEnv.BLUEWATERS_GLITE,
TutorialEnv.TACC_WRANGLER,
TutorialEnv.OLCF_TITAN,
TutorialEnv.OLCF_SUMMIT_KUBERNETES_BOSCO,
],
)
# figure out what example options to provide
examples = [
TutorialExample.PROCESS,
TutorialExample.PIPELINE,
TutorialExample.SPLIT,
TutorialExample.MERGE,
TutorialExample.EPA,
TutorialExample.CONTAINER,
]
if self.tutorial_setup != "osg":
examples.append(TutorialExample.DIAMOND)
if self.tutorial_setup in [
"bw-glite",
"wrangler-glite",
"titan-glite",
"summit-kub-bosco",
]:
examples.append(TutorialExample.MPI)
self.project = query(
"What project your jobs should run under. For example on TACC there are like : TG-DDM160003 ?"
)
self.tutorial = optionlist("What tutorial workflow do you want?", examples)
self.setup_tutorial()
return
# Determine which DAX generator API to use
self.daxgen = choice(
"What DAX generator API do you want to use?",
["python", "perl", "java", "r"],
"python",
)
# Determine what kind of site catalog we need to generate
self.config = optionlist(
"What does your computing infrastructure look like?",
[
("Local Machine Condor Pool", "condorpool"),
("Remote Cluster using Globus GRAM", "globus"),
("Remote Cluster using CREAMCE", "creamce"),
("Local PBS Cluster with Glite", "glite"),
("Remote PBS Cluster with BOSCO and SSH", "bosco"),
],
)
# Find out some information about the site
self.sitename = query("What do you want to call your compute site?", "compute")
self.os = choice(
"What OS does your compute site have?", ["LINUX", "MACOSX"], self.os
)
self.arch = choice(
"What architecture does your compute site have?",
["x86_64", "x86"],
self.arch,
)
def setup_tutorial(self):
"""
Set up tutorial for pre-defined computing environments
:return:
"""
if self.tutorial_setup is None:
self.tutorial_setup = "submit-host"
if self.tutorial_setup == "submit-host":
self.sitename = "condorpool"
elif self.tutorial_setup == "usc-hpcc":
self.sitename = "usc-hpcc"
self.config = "glite"
self.compute_queue = "quick"
# for running the whole workflow as mpi job
self.properties["pegasus.job.aggregator"] = "mpiexec"
elif self.tutorial_setup == "osg":
|
grnet/mupy
|
muparse/migrations/0004_auto__chg_field_graph_slug__chg_field_graph_name.py
|
Python
|
gpl-3.0
| 4,828 | 0.007457 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Graph.slug'
db.alter_column('muparse_graph', 'slug', self.gf('django.db.models.fields.SlugField')(max_length=128, null=True))
# Adding index on 'Graph', fields ['slug']
db.create_index('muparse_graph', ['slug'])
# Changing field 'Graph.name'
db.alter_column('muparse_graph', 'name', self.gf('django.db.models.fields.CharField')(max_length=255))
# Removing index on 'Graph', fields ['name']
db.delete_index('muparse_graph', ['name'])
def backwards(self, orm):
# Adding index on 'Graph', fields ['name']
db.create_index('muparse_graph', ['name'])
# Removing index on 'Graph', fields ['slug']
db.delete_index('muparse_graph', ['slug'])
# Changing field 'Graph.slug'
db.alter_column('muparse_graph', 'slug', self.gf('django.db.models.fields.CharField')(max_length=128, null=True))
# Changing field 'Graph.name'
db.alter_column('muparse_graph', 'name', self.gf('django.db.models.fields.SlugField')(max_length=255))
models = {
'muparse.graph': {
'Meta': {'ordering': "['name']", 'object_name': 'Graph'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['muparse.GraphCategory']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'muparse.graphcategory': {
'Meta': {'ordering': "['name']", 'object_name': 'GraphCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'muparse.node': {
'Meta': {'ordering': "['name']", 'object_name': 'Node'},
'graphs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['muparse.Graph']", 'null': 'True', 'through': "orm['muparse.NodeGraphs']", 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['muparse.NodeGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'muparse.nodegraphs': {
'Meta': {'object_name': 'NodeGraphs'},
'baseurl': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'graph': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['muparse.Graph']"}),
|
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'node': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm[
|
'muparse.Node']"}),
'pageurl': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'muparse.nodegroup': {
'Meta': {'ordering': "['name']", 'object_name': 'NodeGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'muparse.savedsearch': {
'Meta': {'object_name': 'SavedSearch'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'display_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'graphs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['muparse.NodeGraphs']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['muparse']
|
scripnichenko/glance
|
glance/db/sqlalchemy/models_artifacts.py
|
Python
|
apache-2.0
| 12,959 | 0 |
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo_db.sqlalchemy import models
from oslo_utils import timeutils
from sqlalchemy import BigInteger
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy.ext import declarative
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import Numeric
from sqlalchemy.orm import backref
from sqlalchemy.orm import composite
from sqlalchemy.orm import relationship
from sqlalchemy import String
from sqlalchemy import Text
import glance.artifacts as ga
from glance.common import semver_db
from glance import i18n
from oslo_log import log as os_logging
BASE = declarative.declarative_base()
LOG = os_logging.getLogger(__name__)
_LW = i18n._LW
class ArtifactBase(models.ModelBase, models.TimestampMixin):
"""Base class for Artifact Models."""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
__table_initialized__ = False
__protected_attributes__ = set([
"created_at", "updated_at"])
created_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False)
updated_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False, onupdate=lambda: timeutils.utcnow())
def save(self, session=None):
from glance.db.sqlalchemy import api as db_api
super(ArtifactBase, self).save(session or db_api.get_session())
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def items(self):
return self.__dict__.items()
def to_dict(self):
d = {}
for c in self.__table__.columns:
d[c.name] = self[c.name]
return d
def _parse_property_type_value(prop, show_text_properties=True):
columns = [
'int_value',
'string_value',
'bool_value',
'numeric_value']
if show_text_properties:
columns.append('text_value')
for prop_type in columns:
if getattr(prop, prop_type) is not None:
return prop_type.rpartition('_')[0], getattr(prop, prop_type)
return None, None
class Artifact(BASE, ArtifactBase):
__tablename__ = 'artifacts'
__table_args__ = (
Index('ix_artifact_name_and_version', 'name', 'version_prefix',
'version_suffix'),
Index('ix_artifact_type', 'type_name', 'type_version_prefix',
'type_version_suffix'),
Index('ix_artifact_state', 'state'),
Index('ix_artifact_owner', 'owner'),
Index('ix_artifact_visibility', 'visibility'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'})
__protected_attributes__ = ArtifactBase.__protected_attributes__.union(
set(['published_at', 'deleted_at']))
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = Column(String(255), nullable=False)
type_name = Column(String(255), nullable=False)
type_version_prefix = Column(BigInteger, nullable=False)
type_version_suffix = Column(String(255))
type_version_meta = Column(String(255))
type_version = composite(semver_db.DBVersion, type_version_prefix,
type_version_suffix, type_version_meta)
version_prefix = Column(BigInteger, nullable=False)
version_suffix = Column(String(255))
version_meta = Column(String(255))
version = composite(semver_db.DBVersion, version_prefix,
version_suffix, version_meta)
description = Column(Text)
visibility = Column(String(32), nullable=False)
state = Column(String(32), nullable=False)
owner = Column(String(255), nullable=False)
published_at = Column(DateTime)
deleted_at = Column(DateTime)
def to_dict(self, show_level=ga.Showlevel.BASIC,
show_text_properties=True):
d = super(Artifact, self)
|
.to_dict()
d.pop('type_version_prefix')
|
d.pop('type_version_suffix')
d.pop('type_version_meta')
d.pop('version_prefix')
d.pop('version_suffix')
d.pop('version_meta')
d['type_version'] = str(self.type_version)
d['version'] = str(self.version)
tags = []
for tag in self.tags:
tags.append(tag.value)
d['tags'] = tags
if show_level == ga.Showlevel.NONE:
return d
properties = {}
# sort properties
self.properties.sort(key=lambda elem: (elem.name, elem.position))
for prop in self.properties:
proptype, propvalue = _parse_property_type_value(
prop, show_text_properties)
if proptype is None:
continue
if prop.position is not None:
# make array
for p in properties.keys():
if p == prop.name:
# add value to array
properties[p]['value'].append(dict(type=proptype,
value=propvalue))
break
else:
# create new array
p = dict(type='array',
value=[])
p['value'].append(dict(type=proptype,
value=propvalue))
properties[prop.name] = p
else:
# make scalar
properties[prop.name] = dict(type=proptype,
value=propvalue)
d['properties'] = properties
blobs = {}
# sort blobs
self.blobs.sort(key=lambda elem: elem.position)
for blob in self.blobs:
locations = []
# sort locations
blob.locations.sort(key=lambda elem: elem.position)
for loc in blob.locations:
locations.append(dict(value=loc.value,
status=loc.status))
if blob.name in blobs:
blobs[blob.name].append(dict(size=blob.size,
checksum=blob.checksum,
locations=locations,
item_key=blob.item_key))
else:
blobs[blob.name] = []
blobs[blob.name].append(dict(size=blob.size,
checksum=blob.checksum,
locations=locations,
item_key=blob.item_key))
d['blobs'] = blobs
return d
class ArtifactDependency(BASE, ArtifactBase):
__tablename__ = 'artifact_dependencies'
__table_args__ = (Index('ix_artifact_dependencies_source_id',
'artifact_source'),
Index('ix_artifact_dependencies_origin_id',
'artifact_origin'),
Index('ix_artifact_dependencies_dest_id',
'artifact_dest'),
Index('ix_artifact_dependencies_direct_dependencies',
'artifact_source', 'is_direct'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'})
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_source = Column(String(36), ForeignKey('artifacts.id'),
nullable=False
|
bardia-heydarinejad/Graph
|
home/views.py
|
Python
|
mit
| 852 | 0.038732 |
from django.shortcuts import render_to_response,RequestContext
from django.http import HttpResponse,HttpResponseRedirect
from urllib import urlencode
from time import sleep
from vertex.models import *
from django.core.exceptions import ObjectDoesNotExist
from login.views import authDetail
def home(request):
if not authDetail(request)[0]:
sleep(3)
d = {'server_message':authDetail(request)[1]}
query_str = urlencode(d)
return HttpResponseRedirect('/login/?'+query_str)
else:
client =
|
authDetail(request)[1]
vertex = Vertex.objects.get(email = client.email)
flows = vertex.flow_set.order_by('-last_forward_date')[:5]
return render_to_response('home.html',
{"USER_EMAIL":client.email,"login":True,'VERTEX_DETAIL':client,'flows':flows,'VERTEX_ID':client.id},
context_instance=RequestContext(reques
|
t))
# Create your views here.
|
sbergot/python
|
efront/test/testTarget.py
|
Python
|
bsd-3-clause
| 696 | 0.002874 |
import unittest
from efront import repo
from mock import Mock
class TestTarget(unittest.TestCase):
def setUp(self):
self.target = repo.Target("myTarget")
self.target
|
.root_names = ["git", "svn"]
def test_print_svn(self):
self.target.add("svn")
self.assertEqual(str(self.target), " svn myTarget")
def test_print_git(self):
self.target.add("git")
self.assertEqual(str(self.target), "git myTarget")
de
|
f test_print_both(self):
self.target.add("git")
self.target.add("svn")
self.assertEqual(str(self.target), "git svn myTarget")
if __name__ == '__main__':
unittest.main()
|
DarioGT/docker-carra
|
src/protoExt/views/protoActionAction.py
|
Python
|
mit
| 3,186 | 0.015066 |
# -*- coding: utf-8 -*-
from protoLib.getStuff import getDjangoModel
from protoExt.utils.utilsWeb import JsonError , doReturn
from . import validateRequest
import json
from django.contrib.admin.sites import site
from protoExt.utils.utilsBase import traceError
def protoExecuteAction(request):
""" Ejecuta una opcion
"""
cBase, message = validateRequest( request )
if message: return None, message
cBase.actionName = request.POST.get('actionName', '')
cBase.selectedKeys = request.POST.get('selectedKeys', '')
cBase.selectedKeys = json.loads(cBase.selectedKeys)
cBase.parameters = request.POST.get('parameters', [])
cBase.parameters = json.loads(cBase.parameters)
cBase.actionDef = request.POST.get('actionDef', {})
cBase.actionDef = json.loads(cBase.actionDef)
# hace el QSet de los registros seleccionados
if cBase.actionDef.get('selectionMode', '') == 'optional':
if cBase.selectedKeys.__len__() > 1:
return JsonError( 'too many records selected')
elif cBase.actionDef.get('selectionMode', '') != 'none' and cBase.selectedKeys.__len__() == 0:
return JsonError( 'No record selected')
# Obtiene el modelo
try:
cBase.model = getDjangoModel(cBase.viewEntity)
cBase.modelAdmin = site._registry.get(cBase.model)
except :
return JsonError( 'Model notFound')
# details
if cBase.actionDef.get('selectionMode', '') == 'details':
cBase.detKeys = request.POST.get('detKeys', {} )
cBase.detKeys = json.loads(cBase.detKeys)
return doAdminDetailAction( request, cBase )
# elif cBase.actionDef.get('actionType', '') == 'wflow':
# return doWfAction( request, cBase )
elif hasattr(cBase.modelAdmin, 'actions'):
return doAdminAction (request, cBase )
else:
return JsonError( 'Action notFound')
def doAdminAction( request, cBase ):
try:
action = site.get_action( cBase.actionName )
actionFound = True
except:
action = None
actionFound = False
if not actionFound:
for action in cBase.modelAdmin.actions:
if action.__name__ == cBase.actionName:
actionFound = True
break
if not actionFound:
return JsonError( 'Action notFound')
Qs = cBase.model.objects.select_related()
Qs = Qs.filter(pk__in=cBase.selectedKeys)
try:
returnObj =
|
action(cBase.modelAdmin, request, Qs , cBase.parameters)
return doReturn (returnObj)
except Exc
|
eption as e:
traceError()
return JsonError( str(e) )
def doAdminDetailAction( request, cBase ):
for action in cBase.modelAdmin.actions:
if action.__name__ == cBase.actionName:
break
if not action:
return JsonError( 'Action notFound')
try:
returnObj = action( cBase.modelAdmin, request, cBase.selectedKeys, cBase.detKeys, cBase.parameters )
return doReturn(returnObj)
except Exception as e:
return JsonError( str(e) )
#TODO: Wf from .prototypeWfActions import doWfAction
# ----------------------------------------
|
adrianogil/SublimeUnityIntel
|
unityparser/csharp/csharp_reference.py
|
Python
|
mit
| 147 | 0 |
class CSharpReference():
def __init__(self,):
self.refere
|
nce_object = None
self.line_
|
in_file = -1
self.file_name = ''
|
SohKai/ChronoLogger
|
web/flask/lib/python2.7/site-packages/openid/extensions/__init__.py
|
Python
|
mit
| 117 | 0 |
"""OpenID Extension modules."""
__al
|
l__ = ['ax', 'pape', 'sreg']
from openid.extension
|
s.draft import pape5 as pape
|
imbasimba/astroquery
|
astroquery/solarsystem/jpl/__init__.py
|
Python
|
bsd-3-clause
| 235 | 0 |
# Licensed under a 3-clause BS
|
D style license - see LICENSE.rst
"""
astroquery.solarsystem.jpl
--------------------------
a collection of data services provided by JPL
"""
from .sbdb import *
from .horizons import *
from
|
. import *
|
raybrshen/pattern_recognition
|
noise_detection/tools/mix_wav.py
|
Python
|
apache-2.0
| 864 | 0.005787 |
__author__ = 'ray'
import wave
import numpy as np
wav_1_path = "origin.wav"
wav_2_path = "clap.wav"
wav_out_path = "mixed.wav"
wav_1 = wave.open(wav_1_path, 'rb')
wav_2 = wave.open(wav_2_path, 'rb')
wav_out = wave.open(wav_out_path, 'wb')
len_1 = wav_1.getnframes()
len_2 = wav_2.getnframes()
|
if len_1>len_2:
wav_out.setparams(wav_1.getpar
|
ams())
else:
wav_out.setparams(wav_2.getparams())
signal_1 = np.fromstring(wav_1.readframes(-1), 'Int16')
signal_2 = np.fromstring(wav_2.readframes(-1), 'Int16')
if len_1>len_2:
signal_out = np.append(signal_1[:len_2]+signal_2, signal_1[len_2:]).tostring()
elif len_2>len_1:
signal_out = np.append(signal_1+signal_2[:len_1], signal_2[len_1:]).tostring()
else:
signal_out = (signal_1+signal_2).tostring()
wav_out.writeframes(signal_out)
wav_1.close()
wav_2.close()
wav_out.close()
print 'done!'
|
zetaops/ulakbus
|
ulakbus/services/personel/hitap/hizmet_nufus_guncelle.py
|
Python
|
gpl-3.0
| 1,651 | 0.001821 |
# -*- coding: utf-8 -*-
# Copyright (C) 2015 ZetaOps Inc.
#
# This file is licensed under the GNU General Public License v3
# (GPLv3). See LICENSE.txt for details.
"""HITAP Nufus Guncelle
Hitap'a personelin Nufus bilgilerinin guncellenmesini yapar.
"""
from ulakbus.services.personel.hitap.hitap_service import ZatoHitapService
class HizmetNufusGuncelle(ZatoHitapService):
"""
HITAP Ekleme servisinden kalıtılmış Hizmet Nufus Bilgi Guncelleme servisi
"""
HAS_CHANNEL = True
service_dict = {
'service_name': 'HizmetNufusUpdate',
'fields': {
'ad': 'ad',
'cinsiyet': 'cinsiyet',
'dogumTarihi': 'dogum_tari
|
hi',
'durum': 'durum',
'emekliSicilNo': 'emekli_sicil_no',
'ilkSoyad': 'ilk_soy_ad',
'kurumSicili': 'kurum_sicil',
|
'maluliyetKod': 'maluliyet_kod',
'memuriyetBaslamaTarihi': 'memuriyet_baslama_tarihi',
'sebep': 'sebep',
'soyad': 'soyad',
'tckn': 'tckn',
'aciklama': 'aciklama',
'yetkiSeviyesi': 'yetki_seviyesi',
'kurumaBaslamaTarihi': 'kuruma_baslama_tarihi',
'gorevTarihi6495': 'gorev_tarihi_6495',
'emekliSicil6495': 'emekli_sicil_6495'
},
'date_filter': ['dogum_tarihi', 'memuriyet_baslama_tarihi', 'kuruma_baslama_tarihi'],
'required_fields': ['tckn', 'ad', 'soyad', 'dogumTarihi', 'cinsiyet', 'emekliSicilNo',
'memuriyetBaslamaTarihi', 'durum', 'kurumSicili', 'maluliyetKod',
'sebep', 'yetkiSeviyesi']
}
|
bl4ckdu5t/registron
|
tests/interactive/test_pyqt4.py
|
Python
|
mit
| 1,490 | 0.00472 |
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2013, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import sys
from PyQt4 import Qt
from PyQt4 import QtCore
from PyQt4 import QtGui
class MyDialog(QtGui.QDialog):
def __init__(self):
super(MyDialog, self).__init__()
self.label = Qt.QLabel(
u"Press <ESC> to exit. Some non-ascii chars: řčšěíáŘ",
self)
self
|
.setWindowTitle("Hello World from PyQt4")
#self.resize(500, 300)
self.show()
def sizeHint(self):
return self.label.sizeHint()
def keyPressEvent(self, event):
if event.key() == QtCore.Qt.Key_Escape:
self.close()
def main():
app = Qt.QApplication(sys.argv)
read_formats = ', '.join([unicode(format).lower() \
for format in QtGui.QImageReader.supportedImageFormats()])
|
print("Qt4 plugin paths: " + unicode(list(app.libraryPaths())))
print("Qt4 image read support: " + read_formats)
print('Qt4 Libraries path: ' + unicode(QtCore.QLibraryInfo.location(QtCore.QLibraryInfo.LibrariesPath)))
ex = MyDialog()
app.exec_()
if __name__ == "__main__":
main()
|
Python3Development/KodiAPI
|
api.py
|
Python
|
gpl-3.0
| 3,710 | 0 |
from functools import wraps
from flask import Flask, request, jsonify, g
import base64
import libs.db_connector as dbconn
import libs.db_query as dao
import libs.json_keys as jkey
import libs.json_builder as jparse
import config.api_config as apiconf
# region constants
DEBUG = True
LOCAL_NETWORK = "0.0.0.0"
V = apiconf.VERSION
# endregion
app = Flask(__name__)
# region getter
def db(autocommit=True):
if not hasattr(g, 'db'):
g.db = dbconn.connect(autocommit)
return g.db
# endregion
# region lifecycle
@app.teardown_request
def teardown_request(exception):
if hasattr(g, 'db'):
dbconn.disconnect(g.db)
# endregion
# region decorator
def requires_authorization(f):
@wraps(f)
def wrapper(*args, **kwargs):
b64_device_auth_id = request.headers.get('authorization')
if not b64_device_auth_id:
return unauthorized()
try:
device_auth_id = base64.b64decode(b64_device_auth_id)
if device_auth_id in apiconf.AUTHORIZED_DEVICE_IDS:
return f(*args, **kwargs)
except TypeError as e:
pass
return unauthorized()
return wrapper
# endregion
# region routes
@app.route("/api/" + V + "/shows")
@requires_authorization
def get_all_shows():
search = request.args.get('search', '')
rows = dao.get_all_shows(db(), search)
return response(jparse.parse_shows(rows))
@app.route("/api/" + V + "/shows/<id_show>")
@requires_authorization
def get_show(id_show):
rows = dao.get_show_info(db(), id_show)
tv_json = jparse.parse_show_info(rows)
if tv_json[jkey.SEASON_COUNT] > 1:
rows = dao.get_show_seasons(db(), id_show)
jparse.add_parsed_seasons(tv_json, rows)
else:
rows = dao.get_show_episodes(db(), id_show)
jparse.add_parsed_episodes(tv_json, rows)
return response(tv_json)
@app.route("/api/" + V + "/seasons/<id_season>")
@requires_authorization
def get_season(id_season):
rows = dao.get_season(db(), id_season)
season_json = jparse.parse_season(rows)
e_rows = dao.get_season_episodes(db(), id_season)
jparse.add_parsed_episodes(season_json, e_rows)
return response(season_json)
@app.route("/api/" + V + "/episodes/<id_episode>")
@requires_authorization
def get_episode(id_episode):
rows = dao.get_episode(db(), id_episode)
return response(jparse.parse_episode(rows))
@app.route("/api/" + V + "/episodes/recent")
@requires_authorization
def get_recently_played():
|
offset = int(request.args.get('offset', 0))
amount = int(request.args.get('amount', 20))
rows = dao.get_recently_played(db(), offset, amount)
return response(jparse.parse_recently_played(rows))
@app.route("/api/" + V + "/shows/mark", methods=['POST'])
@requires_authorization
def mark_shows():
data = request.get_json()
success = dao.mark_shows(db(False), data)
return response({"success": success})
@app.route("/
|
api/" + V + "/seasons/mark", methods=['POST'])
@requires_authorization
def mark_seasons():
data = request.get_json()
success = dao.mark_seasons(db(False), data)
return response({"success": success})
@app.route("/api/" + V + "/episodes/mark", methods=['POST'])
@requires_authorization
def mark_episodes():
data = request.get_json()
success = dao.mark_episodes(db(False), data)
return response({"success": success})
# endregion
# region return type
def error(err='unknown error'):
return jsonify(error=err), 400
def response(data):
return jsonify(data), 200
def unauthorized():
return 'You are not authorized to perform this action', 401
# endregion
if __name__ == "__main__":
app.run(debug=DEBUG, host=LOCAL_NETWORK)
|
prakashksinha/playground
|
bookmarks/models.py
|
Python
|
apache-2.0
| 1,100 | 0.012727 |
# import needed models
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
# Create you
|
r models here.
# create user object
class Person(User):
internal_id = models.CharField(max_length=25, null=True, blank=True)
verified = models.NullBooleanField(default=False)
approval_date = models.DateTimeField(null=True, blank=True)
# create list object
class List(models.Model):
name = models.CharField('List Name', max_length=50)
created_date
|
= models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now=True)
links = models.ManyToManyField("Link")
def __str__(self):
return self.name
# create link object
class Link(models.Model):
name = models.CharField('Link Name', max_length=50)
created_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now=True)
tags = models.TextField(null=True, blank=True)
def __str__(self):
return self.name
|
mdmintz/SeleniumBase
|
seleniumbase/fixtures/words.py
|
Python
|
mit
| 4,708 | 0 |
# -*- coding: utf-8 -*-
''' Small Dictionary '''
class SD:
def translate_in(language):
words = {}
words["English"] = "in"
words["Chinese"] = "在"
words["Dutch"] = "in"
words["French"] = "dans"
words["Italian"] = "nel"
words["Japanese"] = "に"
words["Korean"] = "에"
words["Portuguese"] = "no"
words["Russian"] = "в"
words["Spanish"] = "en"
return words[language]
def translate_assert(language):
words = {}
words["English"] = "ASSERT"
words["Chinese"] = "断言"
words["Dutch"] = "CONTROLEREN"
words["French"] = "VÉRIFIER"
words["Italian"] = "VERIFICARE"
words["Japanese"] = "検証"
words["Korean"] = "확인"
words["Portuguese"] = "VERIFICAR"
words["Russian"] = "ПОДТВЕРДИТЬ"
words["Spanish"] = "VERIFICAR"
return words[language]
def translate_assert_text(language):
words = {}
words["English"] = "ASSERT TEXT"
words["Chinese"] = "断言文本"
words["Dutch"] = "CONTROLEREN TEKST"
words["French"] = "VÉRIFIER TEXTE"
words["Italian"] = "VERIFICARE TESTO"
words["Japanese"] = "テキストを確認する"
words["Korean"] = "텍스트 확인"
words["Portuguese"] = "VERIFICAR TEXTO"
words["Russian"] = "ПОДТВЕРДИТЬ ТЕКСТ"
words["Spanish"] = "VERIFICAR TEXTO"
return words[language]
def translate_assert_exact_text(language):
words = {}
words["English"] = "ASSERT EXACT TEXT"
words["Chinese"] = "确切断言文本"
words["Dutch"] = "CONTROLEREN EXACTE TEKST"
words["French"] = "VÉRIFIER EXACTEMENT TEXTE"
words["Italian"] = "VERIFICARE TESTO ESATTO"
words["Japanese"] = "正確なテキストを確認する"
words["Korean"] = "정확한 텍스트를 확인하는"
words["Portuguese"] = "VERIFICAR TEXTO EXATO"
words["Russian"] = "ПОДТВЕРДИТЬ ТЕКСТ ТОЧНО"
words["Spanish"] = "VERIFICAR TEXTO EXACTO"
return words[language]
def translate_assert_link_text(language):
words = {}
words["English"] = "ASSERT LINK TEXT"
words["Chinese"] = "断言链接文本"
words["Dutch"] = "CONTROLEREN LINKTEKST"
words["French"] = "VÉRIFIER TEXTE DU LIEN"
words["Italian"] = "VERIFICARE TESTO DEL COLLEGAMENTO"
words["Japanese"] = "リンクテキストを確認する"
words["Korean"] = "링크 텍스트 확인"
words["Portuguese"] = "VERIFICAR TEXTO DO LINK"
words["Russian"] = "ПОДТВЕРДИТЬ ССЫЛКУ"
words["Spanish"] = "VERIFICAR TEXTO DEL ENLACE"
return words[language]
def translate_assert_title(language):
words = {}
words["English"] = "ASSERT TITLE"
words["Chinese"] = "断言标题"
words["Dutch"] = "CONTROLEREN TITEL"
words["French"] = "VÉRIFIER TITRE"
words["Italian"] = "VERIFICARE TITOLO"
words["Japanese"]
|
= "タイトルを確認"
words["Korean"] = "제목 확인"
words["Portuguese"] = "VERIFICAR TÍTULO"
words["Russian"] = "ПОДТВЕРДИТЬ НАЗВАНИЕ"
words["Spanish"] = "VERIFICAR TÍTULO"
return words[language]
def translate_assert_no_404_errors(language):
words = {}
words["English"] = "ASSERT NO 404 ERRORS"
word
|
s["Chinese"] = "检查断开的链接"
words["Dutch"] = "CONTROLEREN OP GEBROKEN LINKS"
words["French"] = "VÉRIFIER LES LIENS ROMPUS"
words["Italian"] = "VERIFICARE I COLLEGAMENTI"
words["Japanese"] = "リンク切れを確認する"
words["Korean"] = "끊어진 링크 확인"
words["Portuguese"] = "VERIFICAR SE HÁ LINKS QUEBRADOS"
words["Russian"] = "ПРОВЕРИТЬ ОШИБКИ 404"
words["Spanish"] = "VERIFICAR SI HAY ENLACES ROTOS"
return words[language]
def translate_assert_no_js_errors(language):
words = {}
words["English"] = "ASSERT NO JS ERRORS"
words["Chinese"] = "检查JS错误"
words["Dutch"] = "CONTROLEREN OP JS FOUTEN"
words["French"] = "VÉRIFIER LES ERREURS JS"
words["Italian"] = "CONTROLLA ERRORI JS"
words["Japanese"] = "JSエラーを確認する"
words["Korean"] = "JS 오류 확인"
words["Portuguese"] = "VERIFICAR SE HÁ ERROS JS"
words["Russian"] = "ПРОВЕРИТЬ ОШИБКИ JS"
words["Spanish"] = "VERIFICAR SI HAY ERRORES JS"
return words[language]
|
benoitc/couchdbkit
|
couchdbkit/schema/properties.py
|
Python
|
mit
| 35,449 | 0.003836 |
# -*- coding: utf-8 -
#
# This file is part of couchdbkit released under the MIT license.
# See the NOTICE for more information.
""" properties used by Document object """
import decimal
import datetime
import re
import time
try:
from collections import MutableSet, Iterable
def is_iterable(c):
return isinstance(c, Iterable)
support_setproperty = True
except ImportError:
support_setproperty = False
from couchdbkit.exceptions import BadValueError
__all__ = ['ALLOWED_PROPERTY_TYPES', 'Property', 'StringProperty',
'IntegerProperty', 'DecimalProperty', 'BooleanProperty',
'FloatProperty', 'DateTimeProperty', 'DateProperty',
'TimeProperty', 'DictProperty', 'StringDictProperty',
'ListProperty', 'StringListProperty',
'dict_to_json', 'list_to_json',
'value_to_json', 'MAP_TYPES_PROPERTIES', 'value_to_python',
'dict_to_python', 'list_to_python', 'convert_property',
'value_to_property', 'LazyDict', 'LazyList']
if support_setproperty:
__all__ += ['SetProperty', 'LazySet']
ALLOWED_PROPERTY_TYPES = set([
basestring,
str,
unicode,
bool,
int,
long,
float,
datetime.datetime,
datetime.date,
datetime.time,
decimal.Decimal,
dict,
list,
set,
type(None)
])
re_date = re.compile('^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])$')
re_time = re.compile('^([01]\d|2[0-3])\D?([0-5]\d)\D?([0-5]\d)?\D?(\d{3})?$')
re_datetime = re.compile('^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])(\D?([01]\d|2[0-3])\D?([0-5]\d)\D?([0-5]\d)?\D?(\d{3})?([zZ]|([\+-])([01]\d|2[0-3])\D?([0-5]\d)?)?)?$')
re_decimal = re.compile('^(\d+)\.(\d+)$')
class Property(object):
""" Property base which all other properties
inherit."""
creation_counter = 0
def __init__(self, verbose_name=None, name=None,
default=None, required=False, validators=None,
choices=None):
""" Default constructor for a property.
:param verbose_name: str, verbose name of field, could
be use for description
:param name: str, name of field
:param default: default value
:param required: True if field is required, default is False
:param validators: list of callable or callable, field validators
function that are executed when document is saved.
"""
self.verbose_name = verbose_name
self.name = name
self.default = default
self.required = required
self.validators = validators
self.choices = choices
self.creation_counter = Property.creation_counter
Property.creation_counter += 1
def __property_config__(self, document_class, property_name):
self.document_class = document_class
if self.name is None:
self.name = property_name
def __property_init__(self, document_instance, value):
""" method used to set value of the property when
we create the document. Don't check required. """
if value is not None:
value = self.to_json(self.validate(value, required=False))
document_instance._doc[self.name] = value
def __get__(self, document_instance, document_class):
if document_instance is None:
return self
value = document_instance._doc.get(self.name)
if value is not None:
value = self._to_python(value)
return value
def __set__(self, document_instance, value):
value = self.validate(value, required=False)
document_instance._doc[self.name] = self._to_json(value)
def __delete__(self, document_instance):
pass
def default_value(self):
""" return default value """
default = self.default
if callable(default):
default = default()
return default
def validate(self, value, required=True):
""" validate value """
if required and self.empty(value):
if self.require
|
d:
raise BadValueError("Property %s is required." % self.name)
else:
if self.choices and value is not None:
if isinstance(
|
self.choices, list): choice_list = self.choices
if isinstance(self.choices, dict): choice_list = self.choices.keys()
if isinstance(self.choices, tuple): choice_list = [key for (key, name) in self.choices]
if value not in choice_list:
raise BadValueError('Property %s is %r; must be one of %r' % (
self.name, value, choice_list))
if self.validators:
if isinstance(self.validators, (list, tuple,)):
for validator in self.validators:
if callable(validator):
validator(value)
elif callable(self.validators):
self.validators(value)
return value
def empty(self, value):
""" test if value is empty """
return (not value and value != 0) or value is None
def _to_python(self, value):
if value == None:
return value
return self.to_python(value)
def _to_json(self, value):
if value == None:
return value
return self.to_json(value)
def to_python(self, value):
""" convert to python type """
return unicode(value)
def to_json(self, value):
""" convert to json, Converted value is saved in couchdb. """
return self.to_python(value)
data_type = None
class StringProperty(Property):
""" string property str or unicode property
*Value type*: unicode
"""
to_python = unicode
def validate(self, value, required=True):
value = super(StringProperty, self).validate(value,
required=required)
if value is None:
return value
if not isinstance(value, basestring):
raise BadValueError(
'Property %s must be unicode or str instance, not a %s' % (self.name, type(value).__name__))
return value
data_type = unicode
class IntegerProperty(Property):
""" Integer property. map to int
*Value type*: int
"""
to_python = int
def empty(self, value):
return value is None
def validate(self, value, required=True):
value = super(IntegerProperty, self).validate(value,
required=required)
if value is None:
return value
if value is not None and not isinstance(value, (int, long,)):
raise BadValueError(
'Property %s must be %s or long instance, not a %s'
% (self.name, type(self.data_type).__name__,
type(value).__name__))
return value
data_type = int
LongProperty = IntegerProperty
class FloatProperty(Property):
""" Float property, map to python float
*Value type*: float
"""
to_python = float
data_type = float
def validate(self, value, required=True):
value = super(FloatProperty, self).validate(value,
required=required)
if value is None:
return value
if not isinstance(value, float):
raise BadValueError(
'Property %s must be float instance, not a %s'
% (self.name, type(value).__name__))
return value
Number = FloatProperty
class BooleanProperty(Property):
""" Boolean property, map to python bool
*ValueType*: bool
"""
to_python = bool
data_type = bool
def validate(self, value, required=True):
value = super(BooleanProperty, self).validate(value,
required=required)
if value is None:
return value
if value is not None and not isinstance(value, bool):
raise BadValueError(
'Property %s must be bool instance, not a %s'
% (self.name, type(value).__name__))
return value
def empty(self, value):
"""test if boolean is empty"""
return value is None
class DecimalProperty(Property):
"""
|
mr-niels-christensen/finna-be-octo-archer
|
briefme/src/main/rdflib/plugins/parsers/notation3.py
|
Python
|
gpl-2.0
| 61,070 | 0.002554 |
#!/usr/bin/env python
u"""
notation3.py - Standalone Notation3 Parser
Derived from CWM, the Closed World Machine
Authors of the original suite:
* Dan Connolly <@@>
* Tim Berners-Lee <@@>
* Yosi Scharf <@@>
* Joseph M. Reagle Jr. <reagle@w3.org>
* Rich Salz <rsalz@zolera.com
|
>
http://www.w3.org/2000/1
|
0/swap/notation3.py
Copyright 2000-2007, World Wide Web Consortium.
Copyright 2001, MIT.
Copyright 2001, Zolera Systems Inc.
License: W3C Software License
http://www.w3.org/Consortium/Legal/copyright-software
Modified by Sean B. Palmer
Copyright 2007, Sean B. Palmer.
Modified to work with rdflib by Gunnar Aastrand Grimnes
Copyright 2010, Gunnar A. Grimnes
"""
# Python standard libraries
import types
import sys
import os
import re
import codecs
import warnings
from decimal import Decimal
from uuid import uuid4
from rdflib.term import URIRef, BNode, Literal, Variable, _XSD_PFX, _unique_id
from rdflib.graph import QuotedGraph, ConjunctiveGraph, Graph
from rdflib import py3compat
b = py3compat.b
__all__ = ['BadSyntax', 'N3Parser', 'TurtleParser',
"splitFragP", "join", "base",
"runNamespace", "uniqueURI", "hexify"]
from rdflib.parser import Parser
def splitFragP(uriref, punct=0):
"""split a URI reference before the fragment
Punctuation is kept.
e.g.
>>> splitFragP("abc#def")
('abc', '#def')
>>> splitFragP("abcdef")
('abcdef', '')
"""
i = uriref.rfind("#")
if i >= 0:
return uriref[:i], uriref[i:]
else:
return uriref, ''
@py3compat.format_doctest_out
def join(here, there):
"""join an absolute URI and URI reference
(non-ascii characters are supported/doctested;
haven't checked the details of the IRI spec though)
``here`` is assumed to be absolute.
``there`` is URI reference.
>>> join('http://example/x/y/z', '../abc')
'http://example/x/abc'
Raise ValueError if there uses relative path
syntax but here has no hierarchical path.
>>> join('mid:foo@example', '../foo') # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
raise ValueError(here)
ValueError: Base <mid:foo@example> has no slash
after colon - with relative '../foo'.
>>> join('http://example/x/y/z', '')
'http://example/x/y/z'
>>> join('mid:foo@example', '#foo')
'mid:foo@example#foo'
We grok IRIs
>>> len(%(u)s'Andr\\xe9')
5
>>> join('http://example.org/', %(u)s'#Andr\\xe9')
%(u)s'http://example.org/#Andr\\xe9'
"""
# assert(here.find("#") < 0), \
# "Base may not contain hash: '%s'" % here # why must caller splitFrag?
slashl = there.find('/')
colonl = there.find(':')
# join(base, 'foo:/') -- absolute
if colonl >= 0 and (slashl < 0 or colonl < slashl):
return there
bcolonl = here.find(':')
assert(bcolonl >= 0), \
"Base uri '%s' is not absolute" % here # else it's not absolute
path, frag = splitFragP(there)
if not path:
return here + frag
# join('mid:foo@example', '../foo') bzzt
if here[bcolonl + 1:bcolonl + 2] != '/':
raise ValueError(
("Base <%s> has no slash after "
"colon - with relative '%s'.") % (here, there))
if here[bcolonl + 1:bcolonl + 3] == '//':
bpath = here.find('/', bcolonl + 3)
else:
bpath = bcolonl + 1
# join('http://xyz', 'foo')
if bpath < 0:
bpath = len(here)
here = here + '/'
# join('http://xyz/', '//abc') => 'http://abc'
if there[:2] == '//':
return here[:bcolonl + 1] + there
# join('http://xyz/', '/abc') => 'http://xyz/abc'
if there[:1] == '/':
return here[:bpath] + there
slashr = here.rfind('/')
while 1:
if path[:2] == './':
path = path[2:]
if path == '.':
path = ''
elif path[:3] == '../' or path == '..':
path = path[3:]
i = here.rfind('/', bpath, slashr)
if i >= 0:
here = here[:i + 1]
slashr = i
else:
break
return here[:slashr + 1] + path + frag
def base():
"""The base URI for this process - the Web equiv of cwd
Relative or abolute unix-standard filenames parsed relative to
this yeild the URI of the file.
If we had a reliable way of getting a computer name,
we should put it in the hostname just to prevent ambiguity
"""
# return "file://" + hostname + os.getcwd() + "/"
return "file://" + _fixslash(os.getcwd()) + "/"
def _fixslash(s):
""" Fix windowslike filename to unixlike - (#ifdef WINDOWS)"""
s = s.replace("\\", "/")
if s[0] != "/" and s[1] == ":":
s = s[2:] # @@@ Hack when drive letter present
return s
CONTEXT = 0
PRED = 1
SUBJ = 2
OBJ = 3
PARTS = PRED, SUBJ, OBJ
ALL4 = CONTEXT, PRED, SUBJ, OBJ
SYMBOL = 0
FORMULA = 1
LITERAL = 2
LITERAL_DT = 21
LITERAL_LANG = 22
ANONYMOUS = 3
XMLLITERAL = 25
Logic_NS = "http://www.w3.org/2000/10/swap/log#"
NODE_MERGE_URI = Logic_NS + "is" # Pseudo-property indicating node merging
forSomeSym = Logic_NS + "forSome"
forAllSym = Logic_NS + "forAll"
RDF_type_URI = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"
RDF_NS_URI = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"
OWL_NS = "http://www.w3.org/2002/07/owl#"
DAML_sameAs_URI = OWL_NS + "sameAs"
parsesTo_URI = Logic_NS + "parsesTo"
RDF_spec = "http://www.w3.org/TR/REC-rdf-syntax/"
List_NS = RDF_NS_URI # From 20030808
_Old_Logic_NS = "http://www.w3.org/2000/10/swap/log.n3#"
N3_first = (SYMBOL, List_NS + "first")
N3_rest = (SYMBOL, List_NS + "rest")
N3_li = (SYMBOL, List_NS + "li")
N3_nil = (SYMBOL, List_NS + "nil")
N3_List = (SYMBOL, List_NS + "List")
N3_Empty = (SYMBOL, List_NS + "Empty")
runNamespaceValue = None
def runNamespace():
"Return a URI suitable as a namespace for run-local objects"
# @@@ include hostname (privacy?) (hash it?)
global runNamespaceValue
if runNamespaceValue is None:
runNamespaceValue = join(base(), _unique_id()) + '#'
return runNamespaceValue
nextu = 0
def uniqueURI():
"A unique URI"
global nextu
nextu += 1
# return runNamespace() + "u_" + `nextu`
return runNamespace() + "u_" + str(nextu)
tracking = False
chatty_flag = 50
# from why import BecauseOfData, becauseSubexpression
def BecauseOfData(*args, **kargs):
# print args, kargs
pass
def becauseSubexpression(*args, **kargs):
# print args, kargs
pass
N3_forSome_URI = forSomeSym
N3_forAll_URI = forAllSym
# Magic resources we know about
ADDED_HASH = "#" # Stop where we use this in case we want to remove it!
# This is the hash on namespace URIs
RDF_type = (SYMBOL, RDF_type_URI)
DAML_sameAs = (SYMBOL, DAML_sameAs_URI)
LOG_implies_URI = "http://www.w3.org/2000/10/swap/log#implies"
BOOLEAN_DATATYPE = _XSD_PFX + "boolean"
DECIMAL_DATATYPE = _XSD_PFX + "decimal"
DOUBLE_DATATYPE = _XSD_PFX + "double"
FLOAT_DATATYPE = _XSD_PFX + "float"
INTEGER_DATATYPE = _XSD_PFX + "integer"
option_noregen = 0 # If set, do not regenerate genids on output
# @@ I18n - the notname chars need extending for well known unicode non-text
# characters. The XML spec switched to assuming unknown things were name
# characaters.
# _namechars = string.lowercase + string.uppercase + string.digits + '_-'
_notQNameChars = \
"\t\r\n !\"#$&'()*,+/;<=>?@[\\]^`{|}~" # else valid qname :-/
_notKeywordsChars = _notQNameChars + "."
_notNameChars = _notQNameChars + ":" # Assume anything else valid name :-/
_rdfns = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
hexChars = 'ABCDEFabcdef0123456789'
escapeChars = "(_~.-!$&'()*+,;=/?#@%)" # valid for \ escapes in localnames
def unicodeExpand(m):
try:
return unichr(int(m.group(1), 16))
except:
raise Exception("Invalid unicode code point: " + m.group(1))
if py3compat.narrow_build:
def unicodeExpand(m):
try:
return unichr(int(m.group(1), 16))
except ValueError:
warnings.warn(
'Encountered a unicode char > 0xFFFF in a narrow python build. '
'Trying to degrade gracefu
|
jendap/tensorflow
|
tensorflow/python/data/experimental/kernel_tests/restructured_dataset_test.py
|
Python
|
apache-2.0
| 3,111 | 0.004179 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the private `_RestructuredDataset` transformation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.experimental.ops import batching
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
# TODO(b/117581999): Add eager specific test.
class RestructuredDatasetTest(test_base.DatasetTestBase):
@test_util.run_deprecated_v1
def testRestructureDataset(self):
components = (array_ops.placeholder(dtypes.int32),
(array_ops.placeholder(dtypes.int32, shape=[None]),
array_ops.placeholder(dtypes.int32, shape=[20, 30])))
dataset = dataset_ops.Dataset.from_tensors(components)
i32 = dtypes.int32
test_cases = [((i32, i32, i32), None),
(((i32, i32), i32), None),
((i32, i32, i32), (None, None, None)),
((i32, i32, i32), ([17], [17], [20, 30]))]
for new_types, new_shape_lists in test_cases:
# pylint: disable=protected-access
new = batching._RestructuredDataset(dataset, new_types, new_shape_lists)
# pylint: enable=protected-access
self.assertEqual(new_types, new.output_types)
if new_shape_lists is not None:
for expected_shape_list, shape in zip(
nest.flatten(new_shape_lists), nest.flatten(new.output_shapes)):
if expected_shap
|
e_list is None:
self.assertIs(None, shape.ndims)
else:
self.assertEqual(expected_shape_list, shape.as_list())
fail_cases = [((i32, dtypes.int64, i32), None),
((i32, i32, i32, i32), None),
|
((i32, i32, i32), ((None, None), None)),
((i32, i32, i32), (None, None, None, None)),
((i32, i32, i32), (None, [None], [21, 30]))]
for new_types, new_shape_lists in fail_cases:
with self.assertRaises(ValueError):
# pylint: disable=protected-access
new = batching._RestructuredDataset(dataset, new_types, new_shape_lists)
# pylint: enable=protected-access
if __name__ == "__main__":
test.main()
|
zmetcalf/fusionbox-demo-project
|
adaptive/models.py
|
Python
|
gpl-3.0
| 558 | 0 |
import widgy
from widgy.models import Content
from widgy.utils import update_context, render_to_string
@widgy.register
class Adaptive(Content):
def render(self, context):
template = 'widgy/adaptive/render.html'
size = context.get('device_info')
if size['type'] == 'tablet':
template = 'widgy/adaptive/tablet.html'
|
elif size['type'] == 'phone':
template = 'widgy/adaptive/phone.html'
|
with update_context(context, {'self': self}):
return render_to_string(template, context)
|
tkaitchuck/nupic
|
external/linux64/lib/python2.6/site-packages/PIL/GifImagePlugin.py
|
Python
|
gpl-3.0
| 10,996 | 0.003183 |
#
# The Python Imaging Library.
# $Id: GifImagePlugin.py 2134 2004-10-06 08:55:20Z fredrik $
#
# GIF file handling
#
# History:
# 1995-09-01 fl Created
# 1996-12-14 fl Added interlace support
# 1996-12-30 fl Added animation support
# 1997-01-05 fl Added write support, fixed local colour map bug
# 1997-02-23 fl Make sure to load raster data in getdata()
# 1997-07-05 fl Support external decoder (0.4)
# 1998-07-09 fl Handle all modes when saving (0.5)
# 1998-07-15 fl Renamed offset attribute to avoid name clash
# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6)
# 2001-04-17 fl Added palette optimization (0.7)
# 2002-06-06 fl Added transparency support for save (0.8)
# 2004-02-24 fl Disable interlacing for small images
#
# Copyright (c) 1997-2
|
004 by Secret Labs AB
# Copyright (c) 1995-2004 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
__version__ = "0.9"
import Image, ImageFile, ImagePalette
# --------------------------------------------------------------------
# Helpers
def i16(c):
return ord(c
|
[0]) + (ord(c[1])<<8)
def o16(i):
return chr(i&255) + chr(i>>8&255)
# --------------------------------------------------------------------
# Identify/read GIF files
def _accept(prefix):
return prefix[:6] in ["GIF87a", "GIF89a"]
##
# Image plugin for GIF images. This plugin supports both GIF87 and
# GIF89 images.
class GifImageFile(ImageFile.ImageFile):
format = "GIF"
format_description = "Compuserve GIF"
global_palette = None
def data(self):
s = self.fp.read(1)
if s and ord(s):
return self.fp.read(ord(s))
return None
def _open(self):
# Screen
s = self.fp.read(13)
if s[:6] not in ["GIF87a", "GIF89a"]:
raise SyntaxError, "not a GIF file"
self.info["version"] = s[:6]
self.size = i16(s[6:]), i16(s[8:])
self.tile = []
flags = ord(s[10])
bits = (flags & 7) + 1
if flags & 128:
# get global palette
self.info["background"] = ord(s[11])
# check if palette contains colour indices
p = self.fp.read(3<<bits)
for i in range(0, len(p), 3):
if not (chr(i/3) == p[i] == p[i+1] == p[i+2]):
p = ImagePalette.raw("RGB", p)
self.global_palette = self.palette = p
break
self.__fp = self.fp # FIXME: hack
self.__rewind = self.fp.tell()
self.seek(0) # get ready to read first frame
def seek(self, frame):
if frame == 0:
# rewind
self.__offset = 0
self.dispose = None
self.__frame = -1
self.__fp.seek(self.__rewind)
if frame != self.__frame + 1:
raise ValueError, "cannot seek to frame %d" % frame
self.__frame = frame
self.tile = []
self.fp = self.__fp
if self.__offset:
# backup to last frame
self.fp.seek(self.__offset)
while self.data():
pass
self.__offset = 0
if self.dispose:
self.im = self.dispose
self.dispose = None
self.palette = self.global_palette
while 1:
s = self.fp.read(1)
if not s or s == ";":
break
elif s == "!":
#
# extensions
#
s = self.fp.read(1)
block = self.data()
if ord(s) == 249:
#
# graphic control extension
#
flags = ord(block[0])
if flags & 1:
self.info["transparency"] = ord(block[3])
self.info["duration"] = i16(block[1:3]) * 10
try:
# disposal methods
if flags & 8:
# replace with background colour
self.dispose = Image.core.fill("P", self.size,
self.info["background"])
elif flags & 16:
# replace with previous contents
self.dispose = self.im.copy()
except (AttributeError, KeyError):
pass
elif ord(s) == 255:
#
# application extension
#
self.info["extension"] = block, self.fp.tell()
if block[:11] == "NETSCAPE2.0":
self.info["loop"] = 1 # FIXME
while self.data():
pass
elif s == ",":
#
# local image
#
s = self.fp.read(9)
# extent
x0, y0 = i16(s[0:]), i16(s[2:])
x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:])
flags = ord(s[8])
interlace = (flags & 64) != 0
if flags & 128:
bits = (flags & 7) + 1
self.palette =\
ImagePalette.raw("RGB", self.fp.read(3<<bits))
# image data
bits = ord(self.fp.read(1))
self.__offset = self.fp.tell()
self.tile = [("gif",
(x0, y0, x1, y1),
self.__offset,
(bits, interlace))]
break
else:
pass
# raise IOError, "illegal GIF tag `%x`" % ord(s)
if not self.tile:
# self.__fp = None
raise EOFError, "no more images in GIF file"
self.mode = "L"
if self.palette:
self.mode = "P"
def tell(self):
return self.__frame
# --------------------------------------------------------------------
# Write GIF files
try:
import _imaging_gif
except ImportError:
_imaging_gif = None
RAWMODE = {
"1": "L",
"L": "L",
"P": "P",
}
def _save(im, fp, filename):
if _imaging_gif:
# call external driver
try:
_imaging_gif.save(im, fp, filename)
return
except IOError:
pass # write uncompressed file
try:
rawmode = RAWMODE[im.mode]
imOut = im
except KeyError:
# convert on the fly (EXPERIMENTAL -- I'm not sure PIL
# should automatically convert images on save...)
if Image.getmodebase(im.mode) == "RGB":
imOut = im.convert("P")
rawmode = "P"
else:
imOut = im.convert("L")
rawmode = "L"
# header
for s in getheader(imOut, im.encoderinfo):
fp.write(s)
flags = 0
try:
interlace = im.encoderinfo["interlace"]
except KeyError:
interlace = 1
# workaround for @PIL153
if min(im.size) < 16:
interlace = 0
if interlace:
flags = flags | 64
try:
transparency = im.encoderinfo["transparency"]
except KeyError:
pass
else:
# transparency extension block
fp.write("!" +
chr(249) + # extension intro
chr(4) + # length
chr(1) + # transparency info present
o16(0) + # duration
chr(int(transparency)) # transparency index
+ chr(0))
# local image header
fp.write("," +
o16(0) + o16(0) + # bounding box
o16(im.size[0]) + # size
o16(im.size[1]) +
chr(flags) + # flags
chr(8)) # bits
imOut.encoderconfig = (8, interlace)
ImageFile._save(imOut, fp, [("gif", (0,0)+im.size, 0, rawmode)])
fp.write("\0") # end of image data
fp.write(";") # end of file
try:
fp.flush()
except: pass
def _save_netpbm(im, fp,
|
ravyg/algorithms
|
python/22_generateParentheses.py
|
Python
|
gpl-3.0
| 529 | 0.009452 |
class Solution(object):
def generateParenthesis(self, n):
if n < 1: return []
parens=[]
def generate(p, left, right):
if left:
generate(p + '(', left-1, right)
if right > left:
generate(p + ')', left, right-1)
|
if not right: # Base Condition.
parens.append(p),
return parens
output = generate('', n,
|
n)
return output
n=3
obj = Solution()
output = obj.generateParenthesis(n)
print(output)
|
Redder/Weather-App-Python
|
Weather.py
|
Python
|
mit
| 4,583 | 0.031857 |
'''
Name: Weather Application
Author: Redder04
Extra Requirements: Unirest, Mashape Key
Unirest: http://unirest.io/
Mashape: https://www.mashape.com/
Description: This application will connect to a Mashape Weather API. The user will
supply a City or State (I might add GPS Capabilites later) and send the request. The
API will return JSON data with the weather data.
Github: https://github.com/Redder/Weather-App-Python
P.S: I tried to heavily code my project, any questions feel free to post on Github.
P.S*2: Any "clear" commands can be changed to "cls" for windows
'''
#Import all the libraries we need
import unirest
import json
import os
#Assign X to 1 for our loop (We can use a While True Loop too, and break in the end, but I used x =1 loop and was to lazy to change it, AS long as it works, BUT! Is the while true loop more efficient?)
x = 1
#Prints Welcome Screen
os.system('clear')
print('================================')
print('Welcome to the Weather App!')
print('Press Enter to Continue!')
print('================================')
raw_input('')
#While Loops begins, You can use While True loop too
while x == 1:
#UserValue equals What the user inputs, the city or state
UserValue = raw_input('Please enter a City or State: ')
#Replace Space with a plus sign(So we can pass it onto the url)
UserValue = UserValue.replace(' ','+' )
#Make web request to the url(with url value attached) with the Mashape KEY and the content type
response = unirest.get("https://george-vustrey-weather.p.mashape.com/api.php?location=" + UserValue,
headers={
"X-Mashape-Key": "Mashape Key goes Here!!!",
"Accept": "application/json"
}
)
#Assigned the JSON Data we recieved with the varible data
data = json.loads(response.raw_body)
#Try to extract data and apply to varibles
try:
DOW1 = data[0]["day_of_week"]
DOW2 = data[1]["day_of_week"]
DOW3 = data[2]["day_of_week"]
DOW4 = data[3]["day_of_week"]
DOW5 = data[4]["day_of_week"]
DOW6 = data[5]["day_of_week"]
DOW7 = data[6]["day_of_week"]
H1 = data[0]["high"]
H2 = data[1]["high"]
H3 = data[2]["high"]
H4 = data[3]["high"]
H5 = data[4]["high"]
H6 = data[5]["high"]
H7 = data[6]
|
["high"]
L1 = data[0]["low"]
L2 = data[1]["low"]
L3 = data[2]["low"]
L4 = data[3]["low"]
L5 = data[4]["low"]
L6 = data[5]["low"]
L7 = data[6]["low"]
C1 = data[0]["condition"]
C2 = data[1]["condition"]
C3 = data[2]["condition"]
C4 = data[3]["condition"]
C5 = data[4]["condition"]
C6 = data[5]["condition"]
C7 = data[6]["condition"]
print('\n')
print('================================')
print(DOW1)
print('Condition: ' + C1)
print(
|
'High: ' + H1)
print('Low: ' + L1)
print('================================')
print('\n')
print('================================')
print(DOW2)
print('Condition: ' + C2)
print('High: ' + H2)
print('Low: ' + L2)
print('================================')
print('\n')
print('================================')
print(DOW3)
print('Condition: ' + C3)
print('High: ' + H3)
print('Low: ' + L3)
print('================================')
print('\n')
print('================================')
print(DOW4)
print('Condition: ' + C4)
print('High: ' + H4)
print('Low: ' + L4)
print('================================')
print('\n')
print('================================')
print(DOW5)
print('Condition: ' + C5)
print('High: ' + H5)
print('Low: ' + L5)
print('================================')
print('\n')
print('================================')
print(DOW6)
print('Condition: ' + C6)
print('High: ' + H6)
print('Low: ' + L6)
print('================================')
print('\n')
print('================================')
print(DOW7)
print('Condition: ' + C7)
print('High: ' + H7)
print('Low: ' + L7)
print('================================')
print('\n')
raw_input('')
pass
#If the data does not exist, it may be due to the user inputting something thats not a city or state, OR any error with the API
except KeyError, e:
#Clear Screen and show error message that we get from the API
os.system('clear')
print('Error ' + str(data[0]['code']) + ':' + ' ' + data[0]['message'])
raw_input('')
#Clear Screen and ask user if they want to quit or perform a search again
os.system('clear')
print('Would you like to search again? or Quit?')
print('1: Search again')
print('2: Quit')
ans = input('')
#If the quit, then x = 2 which breaks out of the loop, if Search again then do nothing and the Loop will restart
if ans == 2:
x = 2
|
adambrenecki/django
|
django/db/backends/oracle/base.py
|
Python
|
bsd-3-clause
| 40,557 | 0.001381 |
"""
Oracle database backend for Django.
Requires cx_Oracle: http://cx-oracle.sourceforge.net/
"""
from __future__ import unicode_literals
import decimal
import re
import platform
import sys
import warnings
def _setup_environment(environ):
# Cygwin requires some special voodoo to set the environment variables
# properly so that Oracle will see them.
if platform.system().upper().startswith('CYGWIN'):
try:
import ctypes
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading ctypes: %s; "
"the Oracle backend requires ctypes to "
"operate correctly under Cygwin." % e)
kernel32 = ctypes.CDLL('kernel32')
for name, value in environ:
kernel32.SetEnvironmentVariableA(name, value)
else:
import os
os.environ.update(environ)
_setup_environment([
# Oracle takes client-side character set encoding from the environment.
('NLS_LANG', '.UTF8'),
# This prevents unicode from getting mangled by getting encoded into the
# potentially non-unicode database character set.
('ORA_NCHAR_LITERAL_REPLACE', 'TRUE'),
])
try:
import cx_Oracle as Database
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e)
try:
import pytz
except ImportError:
pytz = None
from django.db import utils
from django.db.backends import *
from django.db.backends.oracle.client import DatabaseClient
from django.db.backends.oracle.creation import DatabaseCreation
from django.db.backends.oracle.introspection import DatabaseIntrospection
from django.db.backends.oracle.schema import DatabaseSchemaEditor
from django.utils.encoding import force_bytes, force_text
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
# Check whether cx_Oracle was compiled with the WITH_UNICODE option if cx_Oracle is pre-5.1. This will
# also be True for cx_Oracle 5.1 and in Python 3.0. See #19606
if int(Database.version.split('.', 1)[0]) >= 5 and \
(int(Database.version.split('.', 2)[1]) >= 1 or
not hasattr(Database, 'UNICODE')):
convert_unicode = force_text
else:
convert_unicode = force_bytes
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
needs_datetime_string_cast = False
interprets_empty_strings_as_nulls = True
uses_savepoints = True
has_select_for_update = True
has_select_for_update_nowait = True
can_return_id_from_insert = True
allow_sliced_subqueries = False
supports_subqueries_in_group_by = False
supports_transactions = True
supports_timezones = False
has_zoneinfo_database = pytz is not None
supports_bitwise_or = False
can_defer_constraint_checks = True
ignores_nulls_in_unique_constraints = False
has_bulk_insert = True
supports_tablespaces = True
supports_sequence_reset = False
supports_combined_alters = False
max_index_name_length = 30
nulls_order_largest = True
requires_literal_defaults = True
connection_persists_old_columns = True
nulls_order_largest = True
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = "django.db.backends.oracle.compiler"
def autoinc_sql(self, table, column):
# To simulate auto-incrementing primary keys in Oracle, we have to
# create a sequence and a trigger.
sq_name = self._get_sequence_name(table)
tr_name = self._get_trigger_name(table)
tbl_name = self.quote_name(table)
col_name = self.quote_name(column)
sequence_sql = """
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(*) INTO i FROM USER_CATALOG
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
IF i = 0 THEN
EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"';
END IF;
END;
/""" % locals()
trigger_sql = """
CREATE OR REPLACE TRIGGER "%(tr_name)s"
BEFORE INSERT ON %(tbl_name)s
FOR EACH ROW
WHEN (new.%(col_name)s IS NULL)
BEGIN
SELECT "%(sq_name)s".nextval
INTO :new.%(col_name)s FROM dual;
END;
/""" % locals()
return sequence_sql, trigger_sql
def cache_key_culling_sql(self):
return """
SELECT cache_key
FROM (SELECT cache_key, rank() OVER (ORDER BY cache_key) AS rank FROM %s)
WHERE rank = %%s + 1
"""
def date_extract_sql(self, lookup_type, field_name):
if lookup_type == 'week_day':
# TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday.
return "TO_CHAR(%s, 'D')" % field_name
else:
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_interval_sql(self, sql, connector, timedelta):
"""
Implements the interval functionality for expressions
format for Oracle:
(datefield + INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6))
"""
minutes, seconds = divmod(timedelta.seconds, 60)
hours, minutes = divmod(minutes, 60)
days = str(timedelta.days)
day_precision = len(days
|
)
fmt = "(%s %s INTERVAL '%s %02d:%02d:%02d.%06d' DAY(%d) TO SECOND(6))"
return fmt % (sql, connector, days, hours, minutes, seconds,
timedelta.microseconds, day_precision)
def date_trunc_sql(self, lookup_type, field_name):
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084
|
if lookup_type in ('year', 'month'):
return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
else:
return "TRUNC(%s)" % field_name
# Oracle crashes with "ORA-03113: end-of-file on communication channel"
# if the time zone name is passed in parameter. Use interpolation instead.
# https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ
# This regexp matches all time zone names from the zoneinfo database.
_tzname_re = re.compile(r'^[\w/:+-]+$')
def _convert_field_to_tz(self, field_name, tzname):
if not self._tzname_re.match(tzname):
raise ValueError("Invalid time zone name: %s" % tzname)
# Convert from UTC to local time, returning TIMESTAMP WITH TIME ZONE.
result = "(FROM_TZ(%s, '0:00') AT TIME ZONE '%s')" % (field_name, tzname)
# Extracting from a TIMESTAMP WITH TIME ZONE ignore the time zone.
# Convert to a DATETIME, which is called DATE by Oracle. There's no
# built-in function to do that; the easiest is to go through a string.
result = "TO_CHAR(%s, 'YYYY-MM-DD HH24:MI:SS')" % result
result = "TO_DATE(%s, 'YYYY-MM-DD HH24:MI:SS')" % result
# Re-convert to a TIMESTAMP because EXTRACT only handles the date part
# on DATE values, even though they actually store the time part.
return "CAST(%s AS TIMESTAMP)" % result
def datetime_extract_sql(self, lookup_type, field_name, tzname):
if settings.USE_TZ:
field_name = self._convert_field_to_tz(field_name, tzname)
if lookup_type == 'week_day':
# TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday.
sql = "TO_CHAR(%s, 'D')" % field_name
else:
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm
sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
return sql, []
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
if settings.USE_TZ:
field_name = self._convert_field_to_tz(field_name, tzname)
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084
if lookup_type in ('year', 'month'):
sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
elif lookup_type == 'day':
sql = "TRUNC(%s)" % field_name
elif lookup_type == 'hour'
|
alexaverill/make-snake
|
snake/game.py
|
Python
|
gpl-2.0
| 3,912 | 0.000256 |
#!/usr/bin/env python
# game.py
#
# Copyright (C) 2013, 2014 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU General Public License v2
#
import stage
import gameloop
import math
import random
import config
import gamestate as gs
direction = (0, 0)
lastPos = (0, 0)
snake = []
speed = 1
apples = []
grow = config.initial_size - 1
score = 0
lives = 1
livesMax = 1
def update():
moveSnake()
checkCatch()
checkPositionAllowed()
def checkCatch():
if not len(snake) or not len(apples):
return
for i, apple in enumerate(apples):
if (snake[0][0]) == apple[0] and (snake[0][1]) == apple[1]:
eatApple(i)
def eatApple(i):
global grow, score
apples.pop(i)
spawnApple()
grow += config.food_values['apple']
score_value = 1
score += score_value
# adjust total score
try:
gs.state['total_score'] += score_value
except Exception:
pass
# adjust highest sc
|
ore
try:
if score > gs.state['highest_score']:
gs.state['highest_score'] = score
except Exception:
pass
# adjust total number of apples
try:
gs.state['total_number_of_apples'] += 1
except Exception:
pass
def moveSnake():
global grow, lastPos
last_unchanged = None
lastPos = (snake[len(snake) - 1][0], snake[len(snake) - 1][1])
for i, part in enumerate(sn
|
ake):
if i == 0:
x = part[0] + speed * direction[0]
y = part[1] + speed * direction[1]
else:
x = last_unchanged[0]
y = last_unchanged[1]
last_unchanged = (snake[i][0], snake[i][1])
snake[i] = (x, y)
if grow:
snake.append(last_unchanged)
grow -= 1
# adjust longest snake
try:
if len(snake) > gs.state['longest_snake']:
gs.state['longest_snake'] = len(snake)
except Exception:
pass
# adjust total length
try:
gs.state['total_length'] += 1
except Exception:
pass
def getGameArea():
w = math.fabs(stage.boundaries['right'] - stage.boundaries['left'])
h = math.fabs(stage.boundaries['top'] - stage.boundaries['bottom'])
return int(math.floor(w * h))
def reset():
global direction, snake, apples_count, apples, score, grow
direction = (1, 0)
snake = [(0, 0)]
gameloop.frame = 1
apples_count = 1
apples = []
grow = config.initial_size - 1
apples_count += int(math.floor(getGameArea() / config.apple_domain))
for i in range(0, apples_count):
spawnApple()
def spawnApple():
if len(apples) >= getGameArea():
return
x = random.randrange(stage.boundaries['left'], stage.boundaries['right'])
y = random.randrange(stage.boundaries['top'], stage.boundaries['bottom'])
position_free = True
for apple in apples:
if apple[0] == x and apple[1] == y:
position_free = False
for part in snake:
if part[0] == x and part[1] == y:
position_free = False
if position_free and not isOutOfBoundaries(x, y):
apples.append((x, y))
else:
spawnApple()
def isOutOfBoundaries(x, y):
if x < stage.boundaries['left'] or x > stage.boundaries['right'] - 1:
return True
elif y < stage.boundaries['top'] or y > stage.boundaries['bottom'] - 1:
return True
return False
def checkPositionAllowed():
global lives
collides_with_body = False
x = snake[0][0]
y = snake[0][1]
for i in range(1, len(snake) - 1):
if x == snake[i][0] and y == snake[i][1]:
collides_with_body = True
break
if (collides_with_body or isOutOfBoundaries(x, y)):
gameloop.reset()
lives -= 1
if lives == 0:
lives = livesMax
gameloop.state = 2
|
marwahaha/python-fundamentals
|
challenges/04-Functions/B_script_conventions.py
|
Python
|
apache-2.0
| 610 | 0 |
# A funny, but common thing you'll see in python scipts is that if __name__ ...
# block below
# To start off, just run this script and see what happens.
# Then run the test and note that it fails in a curious way!
print "I was run - maybe by a test?"
if __name__ == '__main__':
# The problem is that this variable needs
|
to be defined OUTSIDE the if
# __name__ block. Can you move it above where it will be
|
picked up by the
# test?
# Don't forget to fix the indentation!
module_var = "I am totally defined"
print "I'm being run directly"
print "And module_var is:", module_var
|
tomcounsell/Cobra
|
apps/public/views/custom_order.py
|
Python
|
gpl-2.0
| 4,294 | 0.015137 |
import json
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from apps.admin.utils.exception_handling import ExceptionHandler
from apps.commission.models import Commission
from apps.seller.models.product import Product
from settings.people import support_team
import re
from apps.communication.views.email_class import Email
def estimate(request):
try:
product = Product.objects.get(id=request.GET['product_id'])
if product and product.weight and product.price:
product.length = product.length if product.length else 1
product.height = product.height if product.height else 1
product.width = product.width if product.width else 1
old_volume = product.length * product.width * product.height
#sort dimensions and update two biggest ones
dimensions = [product.length, product.width, product.height]
dimensions.sort() #sort numbers smallest to biggest
dimensions.reverse() #reverse order, so now biggest first
if request.GET.get('length') and int(request.GET['length']) > 0:
dimensions[0] = int(request.GET['length'])
if request.GET.get('width') and int(request.GET['width']) > 0:
dimensions[1] = int(request.GET['width'])
#get ratio from volume difference
new_volume = dimensions[0] * dimensions[1] * dimensions[2]
ratio = float(new_volume)/old_volume
#scale ratio with quantity
if request.GET.get('quantity') and request.GET['quantity'] > 1:
ratio = ratio * int(request.GET['quantity'])
#use ratio to scale price, weight
product.price = int(round(product.price * ratio))
product.weight = int(round(product.weight * ratio))
#increase weight a bit to bump estimate to next shipping price tier if close
product.weigh
|
t = int(round((product.weight * 1.05) + 100)) #add 5% + 100grams
response = {'display_price_estimate': product.display_price}
product.pk = None #DO NOT SAVE!!!
return HttpResponse(json.dumps(response
|
), content_type='application/json')
else:
return HttpResponse(status=500)
except Exception as e:
ExceptionHandler(e, "error in product.custom_order_estimate")
return HttpResponse(str(e), status=500)
@csrf_exempt
def request(request): #todo: change function name
if request.method == 'POST' and request.POST.get('email'):
try:
product = Product.objects.get(id=request.POST['product_id'])
data = {
'product': product,
'country': request.POST['country'],
'email': request.POST['email'],
'size_imperial': request.POST.get('size_imperial', ""),
'size_metric': request.POST.get('size_metric', ""),
'quantity': request.POST.get('quantity', ""),
'description': request.POST.get('description', ""),
'estimate': request.POST.get('estimate', ""),
}
try:
commission = Commission.objects.create()
commission.base_product = product
# size_string = request.POST.get('size_metric', "")
# pattern = re.compile('\D*(\d{1,6})\D*(\d{1,6})\D*')
# dimensions = commission.length = pattern.match(size_string).groups()
# commission.length = dimensions[0]
# if len(dimensions) > 1:
# commission.width = dimensions[1]
commission.length = int(float(request.POST.get('length', 0))) or None
commission.width = int(float(request.POST.get('width', 0))) or None
commission.quantity = request.POST.get('quantity', 1) or 1
commission.createProduct(save=False)#calculates estimated artisan price
commission.save()
print "commission saved. go sms artisan..."
commission.askArtisan()
except Exception as e:
ExceptionHandler(e, "in custom_order.request Commission")
else:
data['commission_id'] = commission.id
recipient_email_list = [data['email'],] + [person.email for person in support_team]
Email('custom_order/request', data).sendTo(recipient_email_list)
return HttpResponse(status=200)
except Exception as e:
ExceptionHandler(e, "error in custom_order.request")
return HttpResponse(status=500)
else:
return HttpResponse(status=400)
|
wheelcms/wheelcms_axle
|
wheelcms_axle/actions.py
|
Python
|
bsd-2-clause
| 3,462 | 0.003177 |
from .registry import Registry
from drole.types import Permission
def action(f):
"""
mark a method as being an action.
"""
if isinstance(f, Permission):
def decorator(decorated):
decorated.action = True
decorated.permission = f
return decorated
return decorator
else:
f.action = True
f.permission = None
return f
class ActionRegistry(dict):
def register(self, handler, action, path=None, spoke=None):
if action not in self:
self[action] = []
self[action].append((handler, path, spoke))
def get(self, action, path=None, spoke=None):
|
"""
Action resolution is as follows:
- A handler is registered on an action and optionally a spoke and
path
- spoke and path have to match if specified
if there are no entries at all, find a handler on the spoke
itself.
To consider: add priority when registering action
Een action / handler registreer je in een bepaalde context:
- globa
|
al (geld in iedere context)
- voor bepaalde spoke
- voor bepaald path
- spoke en path
Vervolgens zoek je een handler in die context. Als je een nauwkeurige
context specificeert, dan verwacht
Een action die op path P en spoke S geregistreerd is matcht dus
niet op path P' en spoke S
"""
entries = super(ActionRegistry, self).get(action)
if entries:
## Match spoke against an actual instance first
for (h, epath, espoke) in entries:
if epath and path != epath:
continue
if espoke and spoke != espoke:
continue
return h
## and then against a spoke type class
for (h, epath, espoke) in entries:
if epath and path != epath:
continue
if espoke and espoke != spoke.__class__:
continue
return h
## give up if there's no explicit spoke context passed
if not spoke:
return None
classhandler = getattr(spoke, action, None)
## A lookup by action id is actually preferable XXX
if classhandler and getattr(classhandler, 'action', False):
return classhandler
return None
class tab(object):
def __init__(self, permission=None, id=None, label=None, condition=None):
self.permission = permission
self.id = id
self.label = label
self.condition = condition
def __call__(self, f):
def wrapped(self, *a, **b):
res = f(self, *a, **b)
return res
name = f.func_name
if self.permission:
wrapped = action(self.permission)(wrapped)
else:
wrapped = action(wrapped)
wrapped.tab = True
wrapped.tab_id = self.id or name
wrapped.tab_label = self.label or wrapped.tab_id
wrapped.condition = self.condition
return wrapped
def tabaction(handler):
""" return the tab identifier of a handler if it's a tab, or else None """
if getattr(handler, 'action', False) and getattr(handler, 'tab', False):
return handler.tab_id
return None
action_registry = Registry(ActionRegistry())
|
dvklopfenstein/PrincetonAlgorithms
|
py/AlgsSedgewickWayne/testcode/order.py
|
Python
|
gpl-2.0
| 768 | 0.041667 |
#!/usr/bin/env python
import sys
def run_277853s():
N = 1
while N<16384:
run_277853(N)
N = N*2
def run_277853(N):
cnt = 0
#for (int i = 1; i*i <= N; i = i*2)
i = 1
while i*i <= N:
cnt += 1
print(N, i, cnt)
i = i*4
#print "{:>
|
5}=N {:>5}=cnt".format(N, cnt)
def run_605062s():
N = 1
while N<4096:
run_605062(N)
N = N*2
def run_605062(N):
"""N^(1/2).
The body of inner loop is executed 1 + 2 + 4 + 8 + ... + sqrt(N) ~ 2 sqrt(N)
"""
cnt = 0
#for (int i = 1; i*i <= N; i = i*2)
i = 1
while i <= N:
#for (int j = 0; j < i; j++)
for j in range(i):
cnt += 1
|
#print i, j, cnt
#print i
i = i*2
print("{:>5}=N {:>5}=cnt".format(N, cnt))
if __name__ == '__main__':
run_277853s()
|
prestona/qpid-proton
|
tests/java/shim/curl.py
|
Python
|
apache-2.0
| 1,678 | 0.004768 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIE
|
S OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License
#
from org.apache.qpid.proton.messenger.impl import Address
def pn_url():
return Address()
def pn_url_parse(urlstr):
return Address(urlstr)
def pn_url_free(url): pass
def pn_url_clear(url):
url.clear();
def pn_url_str(url): return url.toString()
def pn_url_get_scheme(url): return url.getScheme()
def pn_url_get_username(url):
|
return url.getUser()
def pn_url_get_password(url): return url.getPass()
def pn_url_get_host(url): return url.getHost() or None
def pn_url_get_port(url): return url.getPort()
def pn_url_get_path(url): return url.getName()
def pn_url_set_scheme(url, value): url.setScheme(value)
def pn_url_set_username(url, value): url.setUser(value)
def pn_url_set_password(url, value): url.setPass(value)
def pn_url_set_host(url, value): url.setHost(value)
def pn_url_set_port(url, value): url.setPort(value)
def pn_url_set_path(url, value): url.setName(value)
|
Azure/azure-sdk-for-python
|
sdk/iothub/azure-mgmt-iothub/azure/mgmt/iothub/v2016_02_03/aio/operations/_iot_hub_resource_operations.py
|
Python
|
mit
| 67,149 | 0.004914 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._iot_hub_resource_operations import build_check_name_availability_request, build_create_event_hub_consumer_group_request, build_create_or_update_request_initial, build_delete_event_hub_consumer_group_request, build_delete_request_initial, build_export_devices_request, build_get_event_hub_consumer_group_request, build_get_job_request, build_get_keys_for_key_name_request, build_get_quota_metrics_request, build_get_request, build_get_stats_request, build_get_valid_skus_request, build_import_devices_request, build_list_by_resource_group_request, build_list_by_subscription_request, build_list_event_hub_consumer_groups_request, build_list_jobs_request, build_list_keys_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class IotHubResourceOperations:
"""IotHubResourceOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.iothub.v2016_02_03.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def get(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.IotHubDescription":
"""Get the non-security related metadata of an IoT hub.
Get the non-security related metadata of an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IotHubDescription, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2016_02_03.models.IotHubDescription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, m
|
odel=error, error_format=ARMErrorFormat)
deserialized = self._deserialize
|
('IotHubDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
resource_name: str,
iot_hub_description: "_models.IotHubDescription",
**kwargs: Any
) -> "_models.IotHubDescription":
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(iot_hub_description, 'IotHubDescription')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
iot_hub_description: "_models.IotHubDescription",
**kwargs: Any
) -> AsyncLROPoller["_models.IotHubDescription"]:
"""Create or update the metadata of an IoT hub.
Create or update the metadata of an Iot hub. The usual pattern to modify a property is to
retrieve the IoT hub metadata and security metadata, and then combine them with the modified
values in a new body to update the IoT hub. If certain properties are missing in the JSON,
updating IoT Hub may cause these values to fallback to default, which may lead to unexpected
behavior.
:param resource_group_name: The name of the resource group
|
weka-io/easypy
|
easypy/properties.py
|
Python
|
bsd-3-clause
| 1,878 | 0.002662 |
"""
This module is about 'property' descriptors.
"""
import sys
from functools import wraps
from easypy.caching import cached_property # public import, for back-compat
_builtin_property = property
def safe_property(fget=None, fset=None, fdel=None, doc=None):
"""
A pythonic property which raises a RuntimeError when an attribute error is raised within it.
This fixes an issue in python where AttributeErrors that occur anywhere _within_ 'property' functions
are effectively suppressed, and converted to AttributeErrors for the property itself. This is confusing
for the debugger, and also leads to unintended fallback calls to a __getattr__ if defined
>>> def i_raise_an_exception():
... raise AttributeError("blap")
>>> class Test(object):
... def some_prop(self):
... return i_raise_an_exception()
... def __getattr__(self, attr):
... assert False
... prop = property(some_prop)
... safe_prop = safe_property(some_prop)
>>> t = Test()
>>> t.prop
Traceback (most recent call last):
...
AssertionError
>>> t.safe_prop
Traceback (most recent call last):
...
AttributeError: blap
...
During handling of the above e
|
xception, another exception occurred:
...
Traceback (most recent call last):
...
RuntimeError: Attribute
|
error within a property (blap)
"""
if fget is not None:
@wraps(fget)
def callable(*args, **kwargs):
try:
return fget(*args, **kwargs)
except AttributeError:
_, exc, tb = sys.exc_info()
raise RuntimeError("Attribute error within a property (%s)" % exc).with_traceback(tb)
return _builtin_property(callable, fset, fdel, doc)
else:
return _builtin_property(fget, fset, fdel, doc)
|
talbrecht/pism_pik06
|
test/regression/test_29.py
|
Python
|
gpl-3.0
| 4,068 | 0.009095 |
#!/usr/bin/env python
import subprocess, shutil, shlex, os
from sys import exit, argv
from netCDF4 import Dataset as NC
import numpy as np
def process_arguments():
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("PISM_PATH")
parser.add_argument("MPIEXEC")
parser.add_argument("PISM_SOURCE_DIR")
return parser.parse_args()
def copy_input(opts):
shutil.copy(os.path.join(opts.PISM_SOURCE_DIR, "test/test_hydrology/inputforP_regression.nc"), ".")
def generate_config():
"""Generates the config file with custom ice softness and hydraulic conductivity."""
print "generating testPconfig.nc ..."
nc = NC("testPconfig.nc", 'w')
pism_overrides = nc.createVariable("pism_overrides", 'b')
pism_overrides.standard_gravity = 9.81;
pism_overrides.standard_gravity_doc = "m s-2; = g; acceleration due to gravity on Earth geoid";
pism_overrides.fresh_water_density = 1000.0;
pism_overrides.fresh_water_density_doc = "kg m-3; = rhow";
pism_overrides.ice_softness = 3.1689e-24
pism_overrides.ice_softness_doc = "Pa-3 s-1; ice softness; NOT DEFAULT"
pism_overrides.hydrology_hydraulic_conductivity = 1.0e-2 / (1000.0 * 9.81)
pism_overrides.hydrology_hydraulic_conductivity_doc = "= k; NOT DEFAULT"
pism_overrides.hydrology_tillwat_max = 0.0;
pism_overrides.hydrology_tillwat_max_doc = "m; turn off till water mechanism";
pism_overrides.hydrology_thickness_power_in_flux = 1.0;
pism_overrides.hydrology_thickness_power_in_flux_doc = "; = alpha in notes";
pism_overrides.hydrology_gradient_power_in_flux = 2.0;
pism_overrides.hydrology_gradient_power_in_flux_doc = "; = beta in notes";
pism_overrides.hydrology_roughness_scale = 1.0;
pism_overrides.hydrology_roughness_scale_doc = "m; W_r in notes; roughness scale";
pism_overrides.hydrology_regularizing_porosity = 0.01;
pism_overrides.hydrology_regularizing_porosity_doc = "[pure]; phi_0 in notes";
pism_overrides.yield_stress_model = "constant";
pism_overrides.yield_stress_model_doc = "o
|
nly the constant yield stress model works without till";
pism_overrides.default_tauc = 1e6;
pism_overrides.default_tauc_doc = "set default to 'high tauc'";
nc.close()
def run_pism(opts):
cmd = "%s %s/pismr -config_override testPconfig.nc -boot_file inputforP_regression.nc -Mx %d -My %d -Mz 11 -Lz 4000 -hydrology distribute
|
d -report_mass_accounting -y 0.08333333333333 -max_dt 0.01 -no_mass -energy none -stress_balance ssa+sia -ssa_dirichlet_bc -o end.nc" % (opts.MPIEXEC, opts.PISM_PATH, 21, 21)
print cmd
subprocess.call(shlex.split(cmd))
def check_drift(file1, file2):
nc1 = NC(file1)
nc2 = NC(file2)
stored_drift = {'bwat_max': 0.024263951766380631,
'bwp_max': 81658.173074602877,
'bwp_avg': 7152.4179414459632,
'bwat_avg': 0.004056179416920525}
drift = {}
for name in ("bwat", "bwp"):
var1 = nc1.variables[name]
var2 = nc2.variables[name]
diff = np.abs(np.squeeze(var1[:]) - np.squeeze(var2[:]))
drift["%s_max" % name] = np.max(diff)
drift["%s_avg" % name] = np.average(diff)
print "drift = ", drift
print "stored_drift = ", stored_drift
for name in drift.keys():
rel_diff = np.abs(stored_drift[name] - drift[name]) / stored_drift[name]
if rel_diff > 1e-3:
print "Stored and computed drifts in %s differ: %f != %f" % (name, stored_drift[name], drift[name])
exit(1)
def cleanup():
for fname in ("inputforP_regression.nc", "testPconfig.nc", "end.nc"):
os.remove(fname)
if __name__ == "__main__":
opts = process_arguments()
print "Copying input files..."
copy_input(opts)
print "Generating the -config_override file..."
generate_config()
print "Running PISM..."
run_pism(opts)
print "Checking the drift..."
check_drift("inputforP_regression.nc", "end.nc")
print "Cleaning up..."
cleanup()
|
andbof/plantdb
|
qr/functions.py
|
Python
|
gpl-2.0
| 4,614 | 0.007152 |
from PyQRNative import *
from PIL.Image import BILINEAR, BICUBIC, ANTIALIAS, NEAREST
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import portrait, A4
from reportlab.lib.units import cm, mm
from StringIO import StringIO
from plant.tag import create_tag
import time
from datetime import datetime
QR_TYPE = 4
QR_ECC = QRErrorCorrectLevel.H
TAG_FONT = 'Courier-Bold'
TAG_FONT_PT = 8
FOOT_FONT = 'Helvetica'
FOOT_FONT_PT = 8
TOP_YMARGIN = 0.75*cm
LAYOUTS = {
'Long sticks':
{'qr_size': 2*cm, 'q
|
r_lxma
|
rgin': 1*cm, 'qr_rxmargin': 1*cm, 'qr_ymargin': 5.0*cm, 'created': True, 'paired': False},
'Sticky labels 70x37mm':
{'qr_size': 2.5*cm, 'qr_lxmargin': 0.50*cm, 'qr_rxmargin': 0.50*cm, 'qr_ymargin': 1.2*cm, 'created': False, 'paired': False},
'Sticky labels 70x37mm (paired)':
{'qr_size': 2.5*cm, 'qr_lxmargin': 0.50*cm, 'qr_rxmargin': 0.50*cm, 'qr_ymargin': 1.2*cm, 'created': False, 'paired': True},
# 'Verbose labels ?x?mm':
# {'qr_size': 0, 'qr_lxmargin': 0, 'qr_ymargin': 0},
}
LAYOUT_LIST = LAYOUTS.keys()
DUPLEX_LIST = ['No', 'Short side']
# Typ tre cm verkar vara en rimlig storlek, bade med tanke
# pa vad som far plats i verkligheten och analyserna gjorda pa
# http://www.qrstuff.com/blog/2011/01/18/what-size-should-a-qr-code-be
# Lamplig fontstorlek for taggarna verkar vara 8pt Helvetica
def validate_params(layout, duplex):
if (layout is None) or (layout not in LAYOUT_LIST):
return False
if (duplex is None) or (duplex not in DUPLEX_LIST):
return False
if (layout == 'Verbose labels ?x?mm'):
raise NotImplementedError
return True
def generate_new_qrimage():
tag = create_tag()
qr = QRCode(QR_TYPE, QR_ECC)
qr.addData('https://YOUR_DOMAIN/' + str(tag.tag))
qr.make()
return (qr.makeImage(), tag.tag)
def generate_qr_from_layout(layout, duplex, pagesize=A4):
if duplex == 'Long side':
raise NotImplementedError('only short page duplex implemented')
now = datetime.now()
qr_size = LAYOUTS[layout]['qr_size']
qr_lxmargin = LAYOUTS[layout]['qr_lxmargin']
qr_rxmargin = LAYOUTS[layout]['qr_rxmargin']
qr_ymargin = LAYOUTS[layout]['qr_ymargin']
created = LAYOUTS[layout]['created']
paired = LAYOUTS[layout]['paired']
x = pagesize[0] - (qr_size + qr_lxmargin)
y = pagesize[1] - (qr_size + TOP_YMARGIN)
# Validate parameters; this is mostly for debugging
if (qr_size < 1) or (qr_lxmargin < 1) or (qr_rxmargin < 1) or (qr_ymargin < 1):
raise ValueError(u'Internal error: One of qr size, qr x margin or qr y margin is zero.')
# Generate QR codes with positions
qrimgs = []
while y >= 0:
xnum = 0;
while x > 0:
xnum += 1
if (not paired) or (xnum % 2 != 0):
(qrimg, tag) = generate_new_qrimage()
qrimgs.append({'image': qrimg, 'tag': tag, 'x': x, 'y': y})
x -= (qr_size + qr_rxmargin + qr_lxmargin)
x = pagesize[0] - (qr_size + qr_lxmargin)
y -= (qr_size + qr_ymargin)
f = StringIO();
pdf = canvas.Canvas(f, pagesize=portrait(pagesize), pageCompression=0)
# Plot QR codes on first side
pdf.setFont(TAG_FONT, TAG_FONT_PT)
for qrimg in qrimgs:
x = qrimg['x']
y = qrimg['y']
# drawImage() seems to crash on PIL objects so we use drawInlineImage() instead, even though it's deprecated.
# PyQRNative draws a white margin around the QR code, making it about one eigth smaller than the required size.
pdf.drawInlineImage(qrimg['image'], x, y+(qr_size*0.0625), width=qr_size, height=qr_size, preserveAspectRatio=True)
pdf.drawCentredString(x + (qr_size/2), y + 0.05*cm, qrimg['tag'])
if created:
pdf.setFont(FOOT_FONT, FOOT_FONT_PT)
pdf.drawString(cm, cm, 'Created on %s' % str(now))
pdf.showPage()
if duplex != 'No':
pdf.setFont(TAG_FONT, TAG_FONT_PT)
pdf.setPageRotation(180)
for qrimg in qrimgs:
x = portrait(pagesize)[0] - qrimg['x'] - qr_size
y = qrimg['y']
pdf.drawInlineImage(qrimg['image'], x, y+(qr_size*0.0625), width=qr_size, height=qr_size, preserveAspectRatio=True)
pdf.drawCentredString(x + (qr_size/2), y + 0.05*cm, qrimg['tag'])
if created:
pdf.setFont(FOOT_FONT, FOOT_FONT_PT)
pdf.drawRightString(portrait(pagesize)[0] - cm, cm, 'Created on %s' % str(now))
pdf.showPage()
pdf.save()
return f
|
stonekyx/binary
|
vendor/scons-local-2.3.4/SCons/Tool/m4.py
|
Python
|
gpl-3.0
| 2,309 | 0.003898 |
"""SCons.Tool.m4
Tool-specific initialization for m4.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any per
|
son obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or
|
sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/m4.py 2014/09/27 12:51:43 garyo"
import SCons.Action
import SCons.Builder
import SCons.Util
def generate(env):
"""Add Builders and construction variables for m4 to an Environment."""
M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR')
bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4')
env['BUILDERS']['M4'] = bld
# .m4 files might include other files, and it would be pretty hard
# to write a scanner for it, so let's just cd to the dir of the m4
# file and run from there.
# The src_suffix setup is like so: file.c.m4 -> file.c,
# file.cpp.m4 -> file.cpp etc.
env['M4'] = 'm4'
env['M4FLAGS'] = SCons.Util.CLVar('-E')
env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}'
def exists(env):
return env.Detect('m4')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
atlas0fd00m/CanCat
|
cancat/vstruct/defs/windows/win_6_3_i386/ntdll.py
|
Python
|
bsd-2-clause
| 239,574 | 0.002162 |
# Version: 6.3
# Architecture: i386
# CompanyName: Microsoft Corporation
# FileDescription: NT Layer DLL
# FileVersion: 6.3.9600.17031 (winblue_gdr.140221-1952)
# InternalName: ntdll.dll
# LegalCopyright: Microsoft Corporation. All rights reserved.
# OriginalFilename: ntdll.dll
# ProductName: Microsoft Windows Operating System
# ProductVersion: 6.3.9600.17031
# Translation: 78644233
from past.builtins import xrange
import vstruct
from vstruct.primitives import *
EXQUEUEINDEX = v_enum()
EXQUEUEINDEX.ExPoolUntrusted = 0
EXQUEUEINDEX.ExPoolTrusted = 1
EXQUEUEINDEX.ExPoolMax = 8
KPROCESS_STATE = v_enum()
KPROCESS_STATE.ProcessInMemory = 0
KPROCESS_STATE.ProcessOutOfMemory = 1
KPROCESS_STATE.ProcessInTransition = 2
KPROCESS_STATE.ProcessOutTransition = 3
KPROCESS_STATE.ProcessInSwap = 4
KPROCESS_STATE.ProcessOutSwap = 5
KPROCESS_STATE.ProcessAllSwapStates = 6
EX_GEN_RANDOM_DOMAIN = v_enum()
EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainKernel = 0
EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainFirst = 0
EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainUserVisible = 1
EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainMax = 2
WHEA_ERROR_SEVERITY = v_enum()
WHEA_ERROR_SEVERITY.WheaErrSevRecoverable = 0
WHEA_ERROR_SEVERITY.WheaErrSevFatal = 1
WHEA_ERROR_SEVERITY.WheaErrSevCorrected = 2
WHEA_ERROR_SEVERITY.WheaErrSevInformational = 3
DEVICE_WAKE_DEPTH = v_enum()
DEVICE_WAKE_DEPTH.DeviceWakeDepthNotWakeable = 0
DEVICE_WAKE_DEPTH.DeviceWakeDepthD0 = 1
DEVICE_WAKE_DEPTH.DeviceWakeDepthD1 = 2
DEVICE_WAKE_DEPTH.DeviceWakeDepthD2 = 3
DEVICE_WAKE_DEPTH.DeviceWakeDepthD3hot = 4
DEVICE_WAKE_DEPTH.DeviceWakeDepthD3cold = 5
DEVICE_WAKE_DEPTH.DeviceWakeDepthMaximum = 6
WOW64_SHARED_INFORMATION = v_enum()
WOW64_SHARED_INFORMATION.SharedNtdll32LdrInitializeThunk = 0
WOW64_SHARED_INFORMATION.SharedNtdll32KiUserExceptionDispatcher = 1
WOW64_SHARED_INFORMATION.SharedNtdll32KiUserApcDispatcher = 2
WOW64_SHARED_INFORMATION.SharedNtdll32KiUserCallbackDispatcher = 3
WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListFault = 4
WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListResume = 5
WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListEnd = 6
WOW64_SHARED_INFORMATION.SharedNtdll32RtlUserThreadStart = 7
WOW64_SHARED_INFORMATION.SharedNtdll32pQueryProcessDebugInformationRemote = 8
WOW64_SHARED_INFORMATION.SharedNtdll32BaseAddress = 9
WOW64_SHARED_INFORMATION.SharedNtdll32LdrSystemDllInitBlock = 10
WOW64_SHARED_INFORMATION.Wow64SharedPageEntriesCount = 11
REG_NOTIFY_CLASS = v_enum()
REG_NOTIFY_CLASS.RegNtDeleteKey = 0
REG_NOTIFY_CLASS.RegNtPreDeleteKey = 0
REG_NOTIFY_CLASS.RegNtSetValueKey = 1
REG_NOTIFY_CLASS.RegNtPreSetValueKey = 1
REG_NOTIFY_CLASS.RegNtDeleteValueKey = 2
REG_NOTIFY_CLASS.RegNtPreDeleteValueKey = 2
REG_NOTIFY_CLASS.RegNtSetInformationKey = 3
REG_NOTIFY_CLASS.RegNtPreSetInformationKey = 3
REG_NOTIFY_CLASS.RegNtRenameKey = 4
REG_NOTIFY_CLASS.RegNtPreRenameKey = 4
REG_NOTIFY_CLASS.RegNtEnumerateKey = 5
REG_NOTIFY_CLASS.RegNtPreEnumerateKey = 5
REG_NOTIFY_CLASS.RegNtEnumerateValueKey = 6
REG_NOTIFY_CLASS.RegNtPreEnumerateValueKey = 6
REG_NOTIFY_CLASS.RegNtQueryKey = 7
REG_NOTIFY_CLASS.RegNtPreQueryKey = 7
REG_NOTIFY_CLASS.RegNtQueryValueKey = 8
REG_NOTIFY_CLASS.RegNtPreQueryValueKey = 8
REG_NOTIFY_CLASS.RegNtQueryMultipleValueKey = 9
REG_NOTIFY_CLASS.RegNtPreQueryMultipleValueKey = 9
REG_NOTIFY_CLASS.RegNtPreCreateKey = 10
REG_NOTIFY_CLASS.Reg
|
NtPostCreateKey = 11
REG_NOTIFY_CLASS.RegNtPreOpenKey = 12
REG_NOTIFY_CLASS.RegNtPostOpenKey = 13
REG_NOTIFY_CLASS.RegNtKeyHandleClose = 14
REG_NOTIFY_CLASS.RegNtPreKeyHandleClose = 14
REG_NOTIFY_CLASS.RegNtPostDeleteKey = 15
REG_NOTIFY_CLASS.RegNtPostSetValueKey = 16
REG_NOTIFY_CLASS.RegNtPostDeleteValueKey = 17
REG_NOTIFY_CLASS.RegNtPostSetInformationKey = 18
REG_NOTIFY_CLASS.RegNtPostRenameKey = 19
REG_NOTIFY_CLASS.RegNtPostEnumerateKey = 20
REG_NOTIFY_CLASS.RegNtPostEnumerateValueKey =
|
21
REG_NOTIFY_CLASS.RegNtPostQueryKey = 22
REG_NOTIFY_CLASS.RegNtPostQueryValueKey = 23
REG_NOTIFY_CLASS.RegNtPostQueryMultipleValueKey = 24
REG_NOTIFY_CLASS.RegNtPostKeyHandleClose = 25
REG_NOTIFY_CLASS.RegNtPreCreateKeyEx = 26
REG_NOTIFY_CLASS.RegNtPostCreateKeyEx = 27
REG_NOTIFY_CLASS.RegNtPreOpenKeyEx = 28
REG_NOTIFY_CLASS.RegNtPostOpenKeyEx = 29
REG_NOTIFY_CLASS.RegNtPreFlushKey = 30
REG_NOTIFY_CLASS.RegNtPostFlushKey = 31
REG_NOTIFY_CLASS.RegNtPreLoadKey = 32
REG_NOTIFY_CLASS.RegNtPostLoadKey = 33
REG_NOTIFY_CLASS.RegNtPreUnLoadKey = 34
REG_NOTIFY_CLASS.RegNtPostUnLoadKey = 35
REG_NOTIFY_CLASS.RegNtPreQueryKeySecurity = 36
REG_NOTIFY_CLASS.RegNtPostQueryKeySecurity = 37
REG_NOTIFY_CLASS.RegNtPreSetKeySecurity = 38
REG_NOTIFY_CLASS.RegNtPostSetKeySecurity = 39
REG_NOTIFY_CLASS.RegNtCallbackObjectContextCleanup = 40
REG_NOTIFY_CLASS.RegNtPreRestoreKey = 41
REG_NOTIFY_CLASS.RegNtPostRestoreKey = 42
REG_NOTIFY_CLASS.RegNtPreSaveKey = 43
REG_NOTIFY_CLASS.RegNtPostSaveKey = 44
REG_NOTIFY_CLASS.RegNtPreReplaceKey = 45
REG_NOTIFY_CLASS.RegNtPostReplaceKey = 46
REG_NOTIFY_CLASS.MaxRegNtNotifyClass = 47
DEVICE_RELATION_TYPE = v_enum()
DEVICE_RELATION_TYPE.BusRelations = 0
DEVICE_RELATION_TYPE.EjectionRelations = 1
DEVICE_RELATION_TYPE.PowerRelations = 2
DEVICE_RELATION_TYPE.RemovalRelations = 3
DEVICE_RELATION_TYPE.TargetDeviceRelation = 4
DEVICE_RELATION_TYPE.SingleBusRelations = 5
DEVICE_RELATION_TYPE.TransportRelations = 6
SE_WS_APPX_SIGNATURE_ORIGIN = v_enum()
SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_NOT_VALIDATED = 0
SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_UNKNOWN = 1
SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_APPSTORE = 2
SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_WINDOWS = 3
FILE_INFORMATION_CLASS = v_enum()
FILE_INFORMATION_CLASS.FileDirectoryInformation = 1
FILE_INFORMATION_CLASS.FileFullDirectoryInformation = 2
FILE_INFORMATION_CLASS.FileBothDirectoryInformation = 3
FILE_INFORMATION_CLASS.FileBasicInformation = 4
FILE_INFORMATION_CLASS.FileStandardInformation = 5
FILE_INFORMATION_CLASS.FileInternalInformation = 6
FILE_INFORMATION_CLASS.FileEaInformation = 7
FILE_INFORMATION_CLASS.FileAccessInformation = 8
FILE_INFORMATION_CLASS.FileNameInformation = 9
FILE_INFORMATION_CLASS.FileRenameInformation = 10
FILE_INFORMATION_CLASS.FileLinkInformation = 11
FILE_INFORMATION_CLASS.FileNamesInformation = 12
FILE_INFORMATION_CLASS.FileDispositionInformation = 13
FILE_INFORMATION_CLASS.FilePositionInformation = 14
FILE_INFORMATION_CLASS.FileFullEaInformation = 15
FILE_INFORMATION_CLASS.FileModeInformation = 16
FILE_INFORMATION_CLASS.FileAlignmentInformation = 17
FILE_INFORMATION_CLASS.FileAllInformation = 18
FILE_INFORMATION_CLASS.FileAllocationInformation = 19
FILE_INFORMATION_CLASS.FileEndOfFileInformation = 20
FILE_INFORMATION_CLASS.FileAlternateNameInformation = 21
FILE_INFORMATION_CLASS.FileStreamInformation = 22
FILE_INFORMATION_CLASS.FilePipeInformation = 23
FILE_INFORMATION_CLASS.FilePipeLocalInformation = 24
FILE_INFORMATION_CLASS.FilePipeRemoteInformation = 25
FILE_INFORMATION_CLASS.FileMailslotQueryInformation = 26
FILE_INFORMATION_CLASS.FileMailslotSetInformation = 27
FILE_INFORMATION_CLASS.FileCompressionInformation = 28
FILE_INFORMATION_CLASS.FileObjectIdInformation = 29
FILE_INFORMATION_CLASS.FileCompletionInformation = 30
FILE_INFORMATION_CLASS.FileMoveClusterInformation = 31
FILE_INFORMATION_CLASS.FileQuotaInformation = 32
FILE_INFORMATION_CLASS.FileReparsePointInformation = 33
FILE_INFORMATION_CLASS.FileNetworkOpenInformation = 34
FILE_INFORMATION_CLASS.FileAttributeTagInformation = 35
FILE_INFORMATION_CLASS.FileTrackingInformation = 36
FILE_INFORMATION_CLASS.FileIdBothDirectoryInformation = 37
FILE_INFORMATION_CLASS.FileIdFullDirectoryInformation = 38
FILE_INFORMATION_CLASS.FileValidDataLengthInformation = 39
FILE_INFORMATION_CLASS.FileShortNameInformation = 40
FILE_INFORMATION_CLASS.FileIoCompletionNotificationInformation = 41
FILE_INFORMATION_CLASS.FileIoStatusBlockRangeInformation = 42
FILE_INFORMATION_CLASS.FileIoPriorityHintInformation = 43
FILE_INFORMATION_CLASS.FileSfioReserveInformation = 44
FILE_INFORMATION_CLASS.FileSfioVolumeInformation = 45
FILE_INFORMATION_CLASS.FileHardLinkInformation = 46
FILE_INFORMATION_CLASS.Fil
|
nrgaway/qubes-tools
|
builder-tools/libs/say-1.4.2/test/test_styling.py
|
Python
|
gpl-2.0
| 4,257 | 0.005168 |
from say import *
from say.styling import StyleDef
import six
def test_basic_styling():
fmt = Fmt()
assert fmt('this', style='green+underline') == six.u('\x1b[32;4mthis\x1b[0m')
assert fmt('this', style='bold+red') == six.u('\x1b[31;1mthis\x1b[0m')
def test_readme_example():
fmt = Fmt()
fmt.style(stars=lambda x: fmt('*** ', style='red') + \
fmt(x, style='black') + \
fmt(' ***', style='red'))
message = 'Warning, problem!'
assert fmt(message, style='stars') == six.u('\x1b[31m*** \x1b[0m\x1b[30mWarning, problem!\x1b[0m\x1b[31m ***\x1b[0m')
def test_readme_example2():
fmt = Fmt()
name = 'Joe'
assert six.u('His name is ') + fmt(name, style='blue+underline') == six.u('His name is \x1b[34;4mJoe\x1b[0m')
assert fmt('His name is {name:style=blue+underline}') == six.u('His name is \x1b[34;4mJoe\x1b[0m')
assert fmt('His name is {name:style="blue+underline"}') == six.u('His name is \x1b[34;4mJoe\x1b[0m')
assert fmt("His name is {name:style='blue+underline'}") == six.u('His name is \x1b[34;4mJoe\x1b[0m')
def test_wrapping_example():
fmt = Fmt()
text = "Move over, Coke. It looks like Apple is the real thing. The tech giant has ended Coca-Cola's 13-year run as the world's most valuable brand on a highly regarded annual list."
fmt = Fmt()
fmt.set(prefix=numberer(template=color('{n:>3}: ', fg='blue')), \
wrap=40)
assert fmt(text) == six.u("\x1b[34m 1: \x1b[0mMove over, Coke. It looks\n\x1b[34m 2: \x1b[0mlike Apple is the real\n\x1b[34m 3: \x1b[0mthing. The tech giant has\n\x1b[34m 4: \x1b[0mended Coca-Cola's 13-year\n\x1b[34m 5: \x1b[0mrun as the world's most\n\x1b[34m 6: \x1b[0mvaluable brand on a highly\n\x1b[34m 7: \x1b[0mregarded annual list.")
# now reset so numbering starts back at 1
fmt = Fmt()
fmt.set(prefix=numberer(template=color('{n:>3}: ', fg='blue')), \
wrap=40)
assert fmt(text, style='red') == six.u("\x1b[34m 1: \x1b[0m\x1b[31mMove over, Coke. It looks\x1b[0m\n\x1b[34m 2: \x1b[0m\x1b[31mlike Apple is the real\x1b[0m\n\x1b[34m 3: \x1b[0m\x1b[31mthing. The tech giant has\x1b[0m\n\x1b[34m 4: \x1b[0m\x1b[31mended Coca-Cola's 13-year\x1b[0m\n\x1b[34m 5: \x1b[0m\x1b[31mrun as the world's most\x1b[0m\n\x1b[34m 6: \x1b[0m\x1b[31mvaluable brand on a highly\x1b[0m\n\x1b[34m 7: \x1b[0m\x1b[31mregarded annual list.\x1b[0m")
def test_color():
assert color('text', fg='green') == '\x1b[32mtext\x1b[0m'
assert color('more', fg='blue', bg='yellow') == '\x1b[34;43mmore\x1b[0m'
def test_styled():
assert color('test', fg='green', style='bold') == styled('test', 'green+bold')
assert color('test', fg='green', style='bold') == styled('test', 'bold+green')
assert color('test', fg='green', bg='red', style='bold') == styled('test', 'green+red+bold')
assert color('test', fg='green', bg='red', style='bold') == styled('test', 'bold+green+red')
assert color('test', fg='green', bg='red', style='bold') == styled('test', 'bold|green|red')
assert color('test', fg='green', bg='red', style='bold') == styled('test', 'bold,green,red')
def test_in_or_out():
fmt = Fmt()
x = 12
assert fmt(x, style='blue+white+underline') == fmt("{x:style=blue+white+underline}")
fmt.style(bwu=autostyle('blue+white+underline'))
|
fmt.style(bwu2='blue+white+underline')
assert fmt(x, style='bwu') == fmt(x, style='blue+white+underline')
assert fmt(x, style='bwu') == fmt(x, style='bwu2')
def test_Relative():
assert Relative(4) == 4
assert Relative(+4) == Relative(4)
assert Relative(-5) == -5
as
|
sert Relative(0) == 0
assert Relative(1) + Relative(2) == Relative(3)
assert Relative(1) + 2 == 3
assert 1 - Relative(1) == 0
assert Relative(2) - Relative(-1) == Relative(3)
assert Relative(5) - Relative(2) == Relative(3)
assert 1 + Relative(2) == 3
assert repr(Relative(4)) == 'Relative(+4)'
assert repr(Relative(-5)) == 'Relative(-5)'
assert repr(Relative(0)) == 'Relative(0)'
def test_StyleDef_basic():
"""
Minimal test of evovling StyleDef object
"""
s = StyleDef(name='test', join=False)
assert s("this") == "this"
|
edsu/twarc
|
utils/media2warc.py
|
Python
|
mit
| 8,273 | 0.003143 |
#!/usr/bin/env python
"""
This utility extracts media urls from tweet jsonl.gz and save them as warc records.
Warcio (https://github.com/webrecorder/warcio) is a dependency and before you can use it you need to:
% pip install warcio
You run it like this:
% python media2warc.py /mnt/tweets/ferguson/tweets-0001.jsonl.gz /mnt/tweets/ferguson/tweets-0001.warc.gz
The input file will be checked for duplicate urls to avoid duplicates within the input file. Subsequent runs
will be deduplicated using a sqlite db. If an identical-payload-digest is found a revist record is created.
The script is able to fetch media resources in multiple threads (maximum 2) by passing --threads <int> (default to a single thread).
Please be careful modifying this script to use more than two threads since it can be interpreted as a DoS-attack.
"""
import os
import gzip
import json
import time
import queue
import hashlib
import logging
import sqlite3
import argparse
import requests
import threading
from datetime import timedelta
from warcio.warcwriter import WARCWriter
from warcio.statusandheaders import StatusAndHeaders
q = queue.Queue()
out_queue = queue.Queue()
BLOCK_SIZE = 25600
class GetResource(threading.Thread):
def __init__(self, q):
threading.Thread.__init__(self)
self.q = q
self.rlock = threading.Lock()
self.out_queue = out_queue
self.d = Dedup()
def run(self):
while True:
host = self.q.get()
try:
r = requests.get(host, headers={'Accept-Encoding': 'identity'}, stream=True)
data = [r.raw.headers.items(), r.raw, host, r.status_code, r.reason]
print(data[2])
self.out_queue.put(data)
self.q.task_done()
except requests.exceptions.RequestException as e:
logging.error('%s for %s', e, data[2])
print(e)
self.q.task_done()
continue
class WriteWarc(threading.Thread):
def __init__(self, out_queue, warcfile):
threading.Thread.__init__(self)
self.out_queue = out_queue
self.lock = threading.Lock()
self.warcfile = warcfile
self.dedup = Dedup()
def run(self):
with open(self.warcfile, 'ab') as output:
while True:
self.lock.acquire()
data = self.out_queue.get()
writer = WARCWriter(output, gzip=False)
headers_list = data[0]
http_headers = StatusAndHeaders('{} {}'.format(data[3], data[4]), headers_list, protocol='HTTP/1.0')
record = writer.create_warc_record(data[2], 'response', payload=data[1], http_headers=http_headers)
h = hashlib.sha1()
h.update(record.raw_stream.read(BLOCK_SIZE))
if self.dedup.lookup(h.hexdigest()):
record = writer.create_warc_record(data[2], 'revisit',
http_headers=http_headers)
writer.write_record(record)
self.out_queue.task_done()
self.lock.release()
else:
self.dedup.save(h.hexdigest(), data[2])
record.raw_stream.seek(0)
writer.write_record(record)
self.out_queue.task_done()
self.lock.release()
class Dedup:
"""
Stolen from warcpr
|
ox
https://github.com/internetarchive/warc
|
prox/blob/master/warcprox/dedup.py
"""
def __init__(self):
self.file = os.path.join(args.archive_dir,'dedup.db')
def start(self):
conn = sqlite3.connect(self.file)
conn.execute(
'create table if not exists dedup ('
' key varchar(300) primary key,'
' value varchar(4000)'
');')
conn.commit()
conn.close()
def save(self, digest_key, url):
conn = sqlite3.connect(self.file)
conn.execute(
'insert or replace into dedup (key, value) values (?, ?)',
(digest_key, url))
conn.commit()
conn.close()
def lookup(self, digest_key, url=None):
result = False
conn = sqlite3.connect(self.file)
cursor = conn.execute('select value from dedup where key = ?', (digest_key,))
result_tuple = cursor.fetchone()
conn.close()
if result_tuple:
result = True
return result
def parse_extended_entities(extended_entities_dict):
"""Parse media file URL:s form tweet data
:extended_entities_dict:
:returns: list of media file urls
"""
urls = []
if "media" in extended_entities_dict.keys():
for item in extended_entities_dict["media"]:
# add static image
urls.append(item["media_url_https"])
# add best quality video file
if "video_info" in item.keys():
max_bitrate = -1 # handle twitters occasional bitrate=0
video_url = None
for video in item["video_info"]["variants"]:
if "bitrate" in video.keys() and "content_type" in video.keys():
if video["content_type"] == "video/mp4":
if int(video["bitrate"]) > max_bitrate:
max_bitrate = int(video["bitrate"])
video_url = video["url"]
if not video_url:
print("Error: No bitrate / content_type")
print(item["video_info"])
else:
urls.append(video_url)
return urls
def parse_binlinks_from_tweet(tweetdict):
"""Parse binary file url:s from a single tweet.
:tweetdict: json data dict for tweet
:returns: list of urls for media files
"""
urls = []
if "user" in tweetdict.keys():
urls.append(tweetdict["user"]["profile_image_url_https"])
urls.append(tweetdict["user"]["profile_background_image_url_https"])
if "extended_entities" in tweetdict.keys():
urls.extend(parse_extended_entities(tweetdict["extended_entities"]))
return urls
def main():
start = time.time()
if not os.path.isdir(args.archive_dir):
os.mkdir(args.archive_dir)
logging.basicConfig(
filename=os.path.join(args.archive_dir, "media_harvest.log"),
level=logging.INFO,
format="%(asctime)s %(levelname)s %(message)s"
)
logging.getLogger(__name__)
logging.info("Logging media harvest for %s", args.tweet_file)
urls = []
d = Dedup()
d.start()
uniqueUrlCount = 0
duplicateUrlCount = 0
if args.tweet_file.endswith('.gz'):
tweetfile = gzip.open(args.tweet_file, 'r')
else:
tweetfile = open(args.tweet_file, 'r')
logging.info("Checking for duplicate urls")
for line in tweetfile:
tweet = json.loads(line)
tweet_urls = parse_binlinks_from_tweet(tweet)
for url in tweet_urls:
if not url in urls:
urls.append(url)
q.put(url)
uniqueUrlCount +=1
else:
duplicateUrlCount += 1
logging.info("Found %s total media urls %s unique and %s duplicates", uniqueUrlCount+duplicateUrlCount, uniqueUrlCount, duplicateUrlCount)
threads = int(args.threads)
if threads > 2:
threads = 2
for i in range(threads):
t = GetResource(q)
t.setDaemon(True)
t.start()
wt = WriteWarc(out_queue, os.path.join(args.archive_dir, 'warc.warc'))
wt.setDaemon(True)
wt.start()
q.join()
out_queue.join()
logging.info("Finished media harvest in %s", str(timedelta(seconds=(time.time() - start))))
if __name__ == '__main__':
parser = argparse.ArgumentParser("archive")
parser.add_argument("tweet_file", action="store", help="a twitter jsonl.gz input file")
parser.add_argument("archive_dir", action="store", help="a directory where the resulting warc is stored")
parser.add_argument("--threads", action="store", default=1, hel
|
tedlee/markov
|
markovgen.py
|
Python
|
mit
| 2,709 | 0.032484 |
"""
What
-----
A Markov Chain is a sequence of values where the next value
depends only on the current value (and not past values). It's
basically a really simple state machine, where given the present
state, the future state is conditionally independent of the past.
Thus we can ask the question: Given the present word, how likely
is it that this word I've chosen would be the next?
How
-----
1) The last two words are the current state.
2) Next word depends on last two words only, or on present state only.
I've simplified this example down to the core elements of a Markov text generator.
Run the following to generate your own nonsensical strings:
$ python run.py
"""
import
|
random
# Class for generating markov chain
class Markov(object):
def __init__(self, open_file):
# Simple dict cache
self.cache = {}
self.open_file = open_file
# Grabs all the words from the file
self.words = self.file_to_words()
# Verifys number of words in corpus
self.word_size = len(self.words)
s
|
elf.database()
# Function that grabs all the words from the given file
def file_to_words(self):
self.open_file.seek(0)
data = self.open_file.read()
words = data.split()
return words
def triples(self):
""" Generates triples from the given data string. So if our string were
"What a lovely day", we'd generate (What, a, lovely) and then
(a, lovely, day).
"""
if len(self.words) < 3:
# NEED MOAR WORDS
return
for i in range(len(self.words) - 2):
yield (self.words[i], self.words[i+1], self.words[i+2])
def database(self):
for w1, w2, w3 in self.triples():
# Sets the first 2 words as the key
key = (w1, w2)
# If that key exists in cache append the third word to the cache
if key in self.cache:
self.cache[key].append(w3)
else:
# If the key doesn't exist in the cache set the cache[key] = third word
self.cache[key] = [w3]
# Size denotes the length of the sentence to be outputted
def generate_markov_text(self, size=20):
# set seed to a random integer based on corpus size
seed = random.randint(0, self.word_size-3)
# Set next_word
seed_word, next_word = self.words[seed], self.words[seed+1]
w1, w2 = seed_word, next_word
# Instantiate new list to hold the created sentence
gen_words = []
# size refers the number of words in the requested output
for i in xrange(size):
#Appends the seed_word to the gen_words list
gen_words.append(w1)
# Flips the seed_word to (seed_word + 1)
# random.choice return a random element from the cachce based on key[seed,seed+1]
w1, w2 = w2, random.choice(self.cache[(w1, w2)])
gen_words.append(w2)
print ' '.join(gen_words)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.